syntax: Split parsing and highlighting

pull/1563/head^2
Blaž Hrastnik 3 years ago
parent 83bde1004d
commit 6728e44490

1
Cargo.lock generated

@ -381,6 +381,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"similar", "similar",
"slotmap",
"smallvec", "smallvec",
"tendril", "tendril",
"toml", "toml",

@ -22,6 +22,7 @@ unicode-segmentation = "1.8"
unicode-width = "0.1" unicode-width = "0.1"
unicode-general-category = "0.4" unicode-general-category = "0.4"
# slab = "0.4.2" # slab = "0.4.2"
slotmap = "1.0"
tree-sitter = "0.20" tree-sitter = "0.20"
once_cell = "1.9" once_cell = "1.9"
arc-swap = "1" arc-swap = "1"

@ -454,7 +454,7 @@ where
let language_config = loader.language_config_for_scope("source.rust").unwrap(); let language_config = loader.language_config_for_scope("source.rust").unwrap();
let highlight_config = language_config.highlight_config(&[]).unwrap(); let highlight_config = language_config.highlight_config(&[]).unwrap();
let syntax = Syntax::new(&doc, highlight_config.clone()); let syntax = Syntax::new(&doc, highlight_config.clone(), std::sync::Arc::new(loader));
let text = doc.slice(..); let text = doc.slice(..);
let tab_width = 4; let tab_width = 4;

@ -9,6 +9,7 @@ use crate::{
pub use helix_syntax::get_language; pub use helix_syntax::get_language;
use arc_swap::ArcSwap; use arc_swap::ArcSwap;
use slotmap::{DefaultKey as LayerId, HopSlotMap};
use std::{ use std::{
borrow::Cow, borrow::Cow,
@ -388,9 +389,9 @@ thread_local! {
#[derive(Debug)] #[derive(Debug)]
pub struct Syntax { pub struct Syntax {
config: Arc<HighlightConfiguration>, layers: HopSlotMap<LayerId, LanguageLayer>,
root: LayerId,
root_layer: LanguageLayer, loader: Arc<Loader>,
} }
fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<str> { fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<str> {
@ -400,38 +401,36 @@ fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<st
} }
impl Syntax { impl Syntax {
// buffer, grammar, config, grammars, sync_timeout? pub fn new(source: &Rope, config: Arc<HighlightConfiguration>, loader: Arc<Loader>) -> Self {
pub fn new( let root_layer = LanguageLayer {
/*language: Lang,*/ source: &Rope, tree: None,
config: Arc<HighlightConfiguration>, config,
) -> Self { depth: 0,
let root_layer = LanguageLayer { tree: None }; ranges: vec![Range {
start_byte: 0,
end_byte: usize::MAX,
start_point: Point::new(0, 0),
end_point: Point::new(usize::MAX, usize::MAX),
}],
};
// track markers of injections // track markers of injections
// track scope_descriptor: a Vec of scopes for item in tree // track scope_descriptor: a Vec of scopes for item in tree
let mut layers = HopSlotMap::default();
let root = layers.insert(root_layer);
let mut syntax = Self { let mut syntax = Self {
// grammar, // grammar,
config, root,
root_layer, layers,
loader,
}; };
// update root layer syntax
PARSER.with(|ts_parser| { .update(source, source, &ChangeSet::new(&source))
// TODO: handle the returned `Result` properly. .unwrap();
let _ = syntax.root_layer.parse(
&mut ts_parser.borrow_mut(),
&syntax.config,
source,
0,
vec![Range {
start_byte: 0,
end_byte: usize::MAX,
start_point: Point::new(0, 0),
end_point: Point::new(usize::MAX, usize::MAX),
}],
);
});
syntax syntax
} }
@ -441,30 +440,197 @@ impl Syntax {
source: &Rope, source: &Rope,
changeset: &ChangeSet, changeset: &ChangeSet,
) -> Result<(), Error> { ) -> Result<(), Error> {
PARSER.with(|ts_parser| { use std::collections::VecDeque;
self.root_layer.update( let mut queue = VecDeque::new();
&mut ts_parser.borrow_mut(), // let source = source.slice(..);
&self.config, let injection_callback = |language: &str| {
old_source, self.loader
source, .language_configuration_for_injection_string(language)
changeset, .and_then(|language_config| {
// TODO: get these theme.scopes from somewhere, probably make them settable on Loader
let scopes = &[
"attribute",
"constant",
"function.builtin",
"function",
"keyword",
"operator",
"property",
"punctuation",
"punctuation.bracket",
"punctuation.delimiter",
"string",
"string.special",
"tag",
"type",
"type.builtin",
"variable",
"variable.builtin",
"variable.parameter",
];
language_config.highlight_config(
&scopes
.iter()
.map(|scope| scope.to_string())
.collect::<Vec<_>>(),
) )
}) })
};
queue.push_back(self.root);
// HAXX: for now, clear all layers except root so they get re-parsed
self.layers.retain(|id, _| id == self.root);
// Workaround for Syntax::new() with empty changeset
if !changeset.is_empty() {
// TODO: do this in a recursive way
// Notify the tree about all the changes
let edits = generate_edits(old_source.slice(..), changeset);
let tree = self.layers[self.root].tree.as_mut().unwrap();
for edit in edits.iter().rev() {
// apply the edits in reverse. If we applied them in order then edit 1 would disrupt
// the positioning of edit 2
tree.edit(edit);
}
}
PARSER.with(|ts_parser| {
let ts_parser = &mut ts_parser.borrow_mut();
let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new);
// TODO: might need to set cursor range
while let Some(layer_id) = queue.pop_front() {
// Re-parse the tree.
self.layers[layer_id].parse(ts_parser, source)?;
let source = source.slice(..);
let layer = &self.layers[layer_id];
// Process injections.
let matches = cursor.matches(
&layer.config.injections_query,
layer.tree().root_node(),
RopeProvider(source),
);
let mut injections = Vec::new();
for mat in matches {
let (language_name, content_node, include_children) = injection_for_match(
&layer.config,
&layer.config.injections_query,
&mat,
source,
);
// TODO: deal with injections and update them too // Explicitly remove this match so that none of its other captures will remain
// in the stream of captures.
mat.remove(); // TODO: is this still necessary?
// If a language is found with the given name, then add a new language layer
// to the highlighted document.
if let (Some(language_name), Some(content_node)) = (language_name, content_node)
{
if let Some(config) = (injection_callback)(&language_name) {
let ranges =
intersect_ranges(&layer.ranges, &[content_node], include_children);
if !ranges.is_empty() {
log::info!("{} {:?}", language_name, ranges);
injections.push((config, ranges));
}
}
}
} }
// fn buffer_changed -> call layer.update(range, new_text) on root layer and then all marker layers // Process combined injections.
if let Some(combined_injections_query) = &layer.config.combined_injections_query {
let mut injections_by_pattern_index =
vec![(None, Vec::new(), false); combined_injections_query.pattern_count()];
let matches = cursor.matches(
combined_injections_query,
layer.tree().root_node(),
RopeProvider(source),
);
for mat in matches {
let entry = &mut injections_by_pattern_index[mat.pattern_index];
let (language_name, content_node, include_children) = injection_for_match(
&layer.config,
combined_injections_query,
&mat,
source,
);
if language_name.is_some() {
entry.0 = language_name;
}
if let Some(content_node) = content_node {
entry.1.push(content_node);
}
entry.2 = include_children;
}
for (lang_name, content_nodes, includes_children) in injections_by_pattern_index
{
if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) {
if let Some(config) = (injection_callback)(&lang_name) {
let ranges = intersect_ranges(
&layer.ranges,
&content_nodes,
includes_children,
);
if !ranges.is_empty() {
injections.push((config, ranges));
}
}
}
}
}
let depth = layer.depth + 1;
// TODO: can't inline this since matches borrows self.layers
for (config, ranges) in injections {
let layer_id = self.layers.insert(LanguageLayer {
tree: None,
config,
depth,
ranges,
});
queue.push_back(layer_id);
}
}
// Return the cursor back in the pool.
ts_parser.cursors.push(cursor);
Ok(()) // so we can use the try operator
})?;
Ok(())
}
// fn buffer_changed -> call layer.update(range, new_text) on root layer and then all marker layers
// call this on transaction.apply() -> buffer_changed(changes) // call this on transaction.apply() -> buffer_changed(changes)
//
// fn parse(language, old_tree, ranges)
//
pub fn tree(&self) -> &Tree { pub fn tree(&self) -> &Tree {
self.root_layer.tree() self.layers[self.root].tree()
} }
// root: Tree
// injections: Vec<(Tree, Range marker)>
// handle updates that go over a part of the layer by truncating them to start/end appropriately
// injections tracked by marker:
// if marker areas match it's fine and update
// if not found add new layer
// if length 0 then area got removed, clean up the layer
// //
// <!--update_for_injection(grammar)--> // layer update:
// if range.len = 0 then remove the layer
// for change in changes { tree.edit(change) }
// tree = parser.parse(.., tree, ..)
// calculate affected range and update injections
// injection update:
// look for existing injections
// if present, range = (first injection start, last injection end)
// Highlighting // Highlighting
@ -474,31 +640,37 @@ impl Syntax {
source: RopeSlice<'a>, source: RopeSlice<'a>,
range: Option<std::ops::Range<usize>>, range: Option<std::ops::Range<usize>>,
cancellation_flag: Option<&'a AtomicUsize>, cancellation_flag: Option<&'a AtomicUsize>,
injection_callback: impl FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
) -> impl Iterator<Item = Result<HighlightEvent, Error>> + 'a { ) -> impl Iterator<Item = Result<HighlightEvent, Error>> + 'a {
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which let mut layers = self
// prevents them from being moved. But both of these values are really just .layers
// pointers, so it's actually ok to move them. .iter()
.map(|(_, layer)| {
// reuse a cursor from the pool if possible // Reuse a cursor from the pool if available.
let mut cursor = PARSER.with(|ts_parser| { let mut cursor = PARSER.with(|ts_parser| {
let highlighter = &mut ts_parser.borrow_mut(); let highlighter = &mut ts_parser.borrow_mut();
highlighter.cursors.pop().unwrap_or_else(QueryCursor::new) highlighter.cursors.pop().unwrap_or_else(QueryCursor::new)
}); });
let tree_ref = self.tree();
let cursor_ref = unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
let query_ref = &self.config.query; // prevents them from being moved. But both of these values are really just
let config_ref = self.config.as_ref(); // pointers, so it's actually ok to move them.
let cursor_ref =
unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
// if reusing cursors & no range this resets to whole range // if reusing cursors & no range this resets to whole range
cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX)); // TODO: handle intersect (range & layer.range)
// cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX));
cursor_ref.set_byte_range(0..usize::MAX);
let captures = cursor_ref let captures = cursor_ref
.captures(query_ref, tree_ref.root_node(), RopeProvider(source)) .captures(
&layer.config.query,
layer.tree().root_node(),
RopeProvider(source),
)
.peekable(); .peekable();
// manually craft the root layer based on the existing tree HighlightIterLayer {
let layer = HighlightIterLayer {
highlight_end_stack: Vec::new(), highlight_end_stack: Vec::new(),
scope_stack: vec![LocalScope { scope_stack: vec![LocalScope {
inherits: false, inherits: false,
@ -506,29 +678,38 @@ impl Syntax {
local_defs: Vec::new(), local_defs: Vec::new(),
}], }],
cursor, cursor,
depth: 0,
_tree: None, _tree: None,
captures, captures,
config: config_ref, config: layer.config.as_ref(), // TODO: just reuse
ranges: vec![Range { depth: layer.depth, // TODO: just reuse
start_byte: 0, ranges: layer.ranges.clone(),
end_byte: usize::MAX, }
start_point: Point::new(0, 0), })
end_point: Point::new(usize::MAX, usize::MAX), .collect::<Vec<_>>();
}],
}; log::info!("--");
// HAXX: arrange layers by byte range, with deeper layers positioned first
layers.sort_by_key(|layer| {
(
layer.ranges.first().cloned(),
std::cmp::Reverse(layer.depth),
)
});
let mut result = HighlightIter { let mut result = HighlightIter {
source, source,
byte_offset: range.map_or(0, |r| r.start), // TODO: simplify byte_offset: range.map_or(0, |r| r.start), // TODO: simplify
injection_callback,
cancellation_flag, cancellation_flag,
iter_count: 0, iter_count: 0,
layers: vec![layer], layers,
next_event: None, next_event: None,
last_highlight_range: None, last_highlight_range: None,
}; };
result.sort_layers(); result.sort_layers();
for layer in &result.layers {
log::info!("> {:?} {:?}", layer.depth, layer.ranges); // <- for some reason layers are reversed here
}
result result
} }
// on_tokenize // on_tokenize
@ -556,32 +737,24 @@ impl Syntax {
pub struct LanguageLayer { pub struct LanguageLayer {
// mode // mode
// grammar // grammar
// depth pub config: Arc<HighlightConfiguration>,
pub(crate) tree: Option<Tree>, pub(crate) tree: Option<Tree>,
pub ranges: Vec<Range>,
pub depth: usize,
} }
impl LanguageLayer { impl LanguageLayer {
// pub fn new() -> Self {
// Self { tree: None }
// }
pub fn tree(&self) -> &Tree { pub fn tree(&self) -> &Tree {
// TODO: no unwrap // TODO: no unwrap
self.tree.as_ref().unwrap() self.tree.as_ref().unwrap()
} }
fn parse( fn parse(&mut self, ts_parser: &mut TsParser, source: &Rope) -> Result<(), Error> {
&mut self, ts_parser.parser.set_included_ranges(&self.ranges).unwrap();
ts_parser: &mut TsParser,
config: &HighlightConfiguration,
source: &Rope,
_depth: usize,
ranges: Vec<Range>,
) -> Result<(), Error> {
if ts_parser.parser.set_included_ranges(&ranges).is_ok() {
ts_parser ts_parser
.parser .parser
.set_language(config.language) .set_language(self.config.language)
.map_err(|_| Error::InvalidLanguage)?; .map_err(|_| Error::InvalidLanguage)?;
// unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) };
@ -600,11 +773,11 @@ impl LanguageLayer {
self.tree.as_ref(), self.tree.as_ref(),
) )
.ok_or(Error::Cancelled)?; .ok_or(Error::Cancelled)?;
// unsafe { ts_parser.parser.set_cancellation_flag(None) };
self.tree = Some(tree) self.tree = Some(tree);
}
Ok(()) Ok(())
} }
}
pub(crate) fn generate_edits( pub(crate) fn generate_edits(
old_text: RopeSlice, old_text: RopeSlice,
@ -713,77 +886,6 @@ impl LanguageLayer {
edits edits
} }
fn update(
&mut self,
ts_parser: &mut TsParser,
config: &HighlightConfiguration,
old_source: &Rope,
source: &Rope,
changeset: &ChangeSet,
) -> Result<(), Error> {
if changeset.is_empty() {
return Ok(());
}
let edits = Self::generate_edits(old_source.slice(..), changeset);
// Notify the tree about all the changes
for edit in edits.iter().rev() {
// apply the edits in reverse. If we applied them in order then edit 1 would disrupt
// the positioning of edit 2
self.tree.as_mut().unwrap().edit(edit);
}
self.parse(
ts_parser,
config,
source,
0,
// TODO: what to do about this range on update
vec![Range {
start_byte: 0,
end_byte: usize::MAX,
start_point: Point::new(0, 0),
end_point: Point::new(usize::MAX, usize::MAX),
}],
)
}
// fn highlight_iter() -> same as Mode but for this layer. Mode composits these
// fn buffer_changed
// fn update(range)
// fn update_injections()
}
// -- refactored from tree-sitter-highlight to be able to retain state
// TODO: add seek() to iter
// problem: any time a layer is updated it must update it's injections on the parent (potentially
// removing some from use)
// can't modify to vec and exist in it at the same time since that would violate borrows
// maybe we can do with an arena
// maybe just caching on the top layer and nevermind the injections for now?
//
// Grammar {
// layers: Vec<Box<Layer>> to prevent memory moves when vec is modified
// }
// injections tracked by marker:
// if marker areas match it's fine and update
// if not found add new layer
// if length 0 then area got removed, clean up the layer
//
// layer update:
// if range.len = 0 then remove the layer
// for change in changes { tree.edit(change) }
// tree = parser.parse(.., tree, ..)
// calculate affected range and update injections
// injection update:
// look for existing injections
// if present, range = (first injection start, last injection end)
//
// For now cheat and just throw out non-root layers if they exist. This should still improve
// parsing in majority of cases.
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use std::{iter, mem, ops, str, usize}; use std::{iter, mem, ops, str, usize};
use tree_sitter::{ use tree_sitter::{
@ -820,8 +922,8 @@ pub enum HighlightEvent {
pub struct HighlightConfiguration { pub struct HighlightConfiguration {
pub language: Grammar, pub language: Grammar,
pub query: Query, pub query: Query,
injections_query: Query,
combined_injections_query: Option<Query>, combined_injections_query: Option<Query>,
locals_pattern_index: usize,
highlights_pattern_index: usize, highlights_pattern_index: usize,
highlight_indices: ArcSwap<Vec<Option<Highlight>>>, highlight_indices: ArcSwap<Vec<Option<Highlight>>>,
non_local_variable_patterns: Vec<bool>, non_local_variable_patterns: Vec<bool>,
@ -848,13 +950,9 @@ struct LocalScope<'a> {
} }
#[derive(Debug)] #[derive(Debug)]
struct HighlightIter<'a, F> struct HighlightIter<'a> {
where
F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
{
source: RopeSlice<'a>, source: RopeSlice<'a>,
byte_offset: usize, byte_offset: usize,
injection_callback: F,
cancellation_flag: Option<&'a AtomicUsize>, cancellation_flag: Option<&'a AtomicUsize>,
layers: Vec<HighlightIterLayer<'a>>, layers: Vec<HighlightIterLayer<'a>>,
iter_count: usize, iter_count: usize,
@ -894,8 +992,8 @@ struct HighlightIterLayer<'a> {
config: &'a HighlightConfiguration, config: &'a HighlightConfiguration,
highlight_end_stack: Vec<usize>, highlight_end_stack: Vec<usize>,
scope_stack: Vec<LocalScope<'a>>, scope_stack: Vec<LocalScope<'a>>,
ranges: Vec<Range>,
depth: usize, depth: usize,
ranges: Vec<Range>, // TEMP
} }
impl<'a> fmt::Debug for HighlightIterLayer<'a> { impl<'a> fmt::Debug for HighlightIterLayer<'a> {
@ -927,38 +1025,32 @@ impl HighlightConfiguration {
) -> Result<Self, QueryError> { ) -> Result<Self, QueryError> {
// Concatenate the query strings, keeping track of the start offset of each section. // Concatenate the query strings, keeping track of the start offset of each section.
let mut query_source = String::new(); let mut query_source = String::new();
query_source.push_str(injection_query);
let locals_query_offset = query_source.len();
query_source.push_str(locals_query); query_source.push_str(locals_query);
let highlights_query_offset = query_source.len(); let highlights_query_offset = query_source.len();
query_source.push_str(highlights_query); query_source.push_str(highlights_query);
// Construct a single query by concatenating the three query strings, but record the // Construct a single query by concatenating the three query strings, but record the
// range of pattern indices that belong to each individual string. // range of pattern indices that belong to each individual string.
let mut query = Query::new(language, &query_source)?; let query = Query::new(language, &query_source)?;
let mut locals_pattern_index = 0;
let mut highlights_pattern_index = 0; let mut highlights_pattern_index = 0;
for i in 0..(query.pattern_count()) { for i in 0..(query.pattern_count()) {
let pattern_offset = query.start_byte_for_pattern(i); let pattern_offset = query.start_byte_for_pattern(i);
if pattern_offset < highlights_query_offset {
if pattern_offset < highlights_query_offset { if pattern_offset < highlights_query_offset {
highlights_pattern_index += 1; highlights_pattern_index += 1;
} }
if pattern_offset < locals_query_offset {
locals_pattern_index += 1;
}
}
} }
let mut injections_query = Query::new(language, injection_query)?;
// Construct a separate query just for dealing with the 'combined injections'. // Construct a separate query just for dealing with the 'combined injections'.
// Disable the combined injection patterns in the main query. // Disable the combined injection patterns in the main query.
let mut combined_injections_query = Query::new(language, injection_query)?; let mut combined_injections_query = Query::new(language, injection_query)?;
let mut has_combined_queries = false; let mut has_combined_queries = false;
for pattern_index in 0..locals_pattern_index { for pattern_index in 0..injections_query.pattern_count() {
let settings = query.property_settings(pattern_index); let settings = injections_query.property_settings(pattern_index);
if settings.iter().any(|s| &*s.key == "injection.combined") { if settings.iter().any(|s| &*s.key == "injection.combined") {
has_combined_queries = true; has_combined_queries = true;
query.disable_pattern(pattern_index); injections_query.disable_pattern(pattern_index);
} else { } else {
combined_injections_query.disable_pattern(pattern_index); combined_injections_query.disable_pattern(pattern_index);
} }
@ -990,8 +1082,6 @@ impl HighlightConfiguration {
for (i, name) in query.capture_names().iter().enumerate() { for (i, name) in query.capture_names().iter().enumerate() {
let i = Some(i as u32); let i = Some(i as u32);
match name.as_str() { match name.as_str() {
"injection.content" => injection_content_capture_index = i,
"injection.language" => injection_language_capture_index = i,
"local.definition" => local_def_capture_index = i, "local.definition" => local_def_capture_index = i,
"local.definition-value" => local_def_value_capture_index = i, "local.definition-value" => local_def_value_capture_index = i,
"local.reference" => local_ref_capture_index = i, "local.reference" => local_ref_capture_index = i,
@ -1000,12 +1090,21 @@ impl HighlightConfiguration {
} }
} }
for (i, name) in injections_query.capture_names().iter().enumerate() {
let i = Some(i as u32);
match name.as_str() {
"injection.content" => injection_content_capture_index = i,
"injection.language" => injection_language_capture_index = i,
_ => {}
}
}
let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]); let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]);
Ok(Self { Ok(Self {
language, language,
query, query,
injections_query,
combined_injections_query, combined_injections_query,
locals_pattern_index,
highlights_pattern_index, highlights_pattern_index,
highlight_indices, highlight_indices,
non_local_variable_patterns, non_local_variable_patterns,
@ -1070,142 +1169,29 @@ impl HighlightConfiguration {
} }
impl<'a> HighlightIterLayer<'a> { impl<'a> HighlightIterLayer<'a> {
/// Create a new 'layer' of highlighting for this document. // First, sort scope boundaries by their byte offset in the document. At a
/// // given position, emit scope endings before scope beginnings. Finally, emit
/// In the even that the new layer contains "combined injections" (injections where multiple // scope boundaries from deeper layers first.
/// disjoint ranges are parsed as one syntax tree), these will be eagerly processed and fn sort_key(&mut self) -> Option<(usize, bool, isize)> {
/// added to the returned vector. let depth = -(self.depth as isize);
fn new<F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a>( let next_start = self
source: RopeSlice<'a>, .captures
cancellation_flag: Option<&'a AtomicUsize>, .peek()
injection_callback: &mut F, .map(|(m, i)| m.captures[*i].node.start_byte());
mut config: &'a HighlightConfiguration, let next_end = self.highlight_end_stack.last().cloned();
mut depth: usize, match (next_start, next_end) {
mut ranges: Vec<Range>, (Some(start), Some(end)) => {
) -> Result<Vec<Self>, Error> { if start < end {
let mut result = Vec::with_capacity(1); Some((start, true, depth))
let mut queue = Vec::new();
loop {
// --> Tree parsing part
PARSER.with(|ts_parser| {
let highlighter = &mut ts_parser.borrow_mut();
if highlighter.parser.set_included_ranges(&ranges).is_ok() {
highlighter
.parser
.set_language(config.language)
.map_err(|_| Error::InvalidLanguage)?;
unsafe { highlighter.parser.set_cancellation_flag(cancellation_flag) };
let tree = highlighter
.parser
.parse_with(
&mut |byte, _| {
if byte <= source.len_bytes() {
let (chunk, start_byte, _, _) = source.chunk_at_byte(byte);
chunk[byte - start_byte..].as_bytes()
} else { } else {
// out of range Some((end, false, depth))
&[]
}
},
None,
)
.ok_or(Error::Cancelled)?;
unsafe { highlighter.parser.set_cancellation_flag(None) };
let mut cursor = highlighter.cursors.pop().unwrap_or_else(QueryCursor::new);
// Process combined injections.
if let Some(combined_injections_query) = &config.combined_injections_query {
let mut injections_by_pattern_index = vec![
(None, Vec::new(), false);
combined_injections_query
.pattern_count()
];
let matches = cursor.matches(
combined_injections_query,
tree.root_node(),
RopeProvider(source),
);
for mat in matches {
let entry = &mut injections_by_pattern_index[mat.pattern_index];
let (language_name, content_node, include_children) =
injection_for_match(
config,
combined_injections_query,
&mat,
source,
);
if language_name.is_some() {
entry.0 = language_name;
}
if let Some(content_node) = content_node {
entry.1.push(content_node);
}
entry.2 = include_children;
}
for (lang_name, content_nodes, includes_children) in
injections_by_pattern_index
{
if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty())
{
if let Some(next_config) = (injection_callback)(&lang_name) {
let ranges = Self::intersect_ranges(
&ranges,
&content_nodes,
includes_children,
);
if !ranges.is_empty() {
queue.push((next_config, depth + 1, ranges));
}
}
}
} }
} }
(Some(i), None) => Some((i, true, depth)),
// --> Highlighting query part (None, Some(j)) => Some((j, false, depth)),
_ => None,
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
// prevents them from being moved. But both of these values are really just
// pointers, so it's actually ok to move them.
let tree_ref = unsafe { mem::transmute::<_, &'static Tree>(&tree) };
let cursor_ref =
unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
let captures = cursor_ref
.captures(&config.query, tree_ref.root_node(), RopeProvider(source))
.peekable();
result.push(HighlightIterLayer {
highlight_end_stack: Vec::new(),
scope_stack: vec![LocalScope {
inherits: false,
range: 0..usize::MAX,
local_defs: Vec::new(),
}],
cursor,
depth,
_tree: Some(tree),
captures,
config,
ranges,
});
}
Ok(()) // so we can use the try operator
})?;
if queue.is_empty() {
break;
} }
let (next_config, next_depth, next_ranges) = queue.remove(0);
config = next_config;
depth = next_depth;
ranges = next_ranges;
} }
Ok(result)
} }
// Compute the ranges that should be included when parsing an injection. // Compute the ranges that should be included when parsing an injection.
@ -1302,35 +1288,7 @@ impl<'a> HighlightIterLayer<'a> {
result result
} }
// First, sort scope boundaries by their byte offset in the document. At a impl<'a> HighlightIter<'a> {
// given position, emit scope endings before scope beginnings. Finally, emit
// scope boundaries from deeper layers first.
fn sort_key(&mut self) -> Option<(usize, bool, isize)> {
let depth = -(self.depth as isize);
let next_start = self
.captures
.peek()
.map(|(m, i)| m.captures[*i].node.start_byte());
let next_end = self.highlight_end_stack.last().cloned();
match (next_start, next_end) {
(Some(start), Some(end)) => {
if start < end {
Some((start, true, depth))
} else {
Some((end, false, depth))
}
}
(Some(i), None) => Some((i, true, depth)),
(None, Some(j)) => Some((j, false, depth)),
_ => None,
}
}
}
impl<'a, F> HighlightIter<'a, F>
where
F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
{
fn emit_event( fn emit_event(
&mut self, &mut self,
offset: usize, offset: usize,
@ -1361,6 +1319,12 @@ where
i += 1; i += 1;
continue; continue;
} }
} else {
let layer = self.layers.remove(i + 1);
PARSER.with(|ts_parser| {
let highlighter = &mut ts_parser.borrow_mut();
highlighter.cursors.push(layer.cursor);
});
} }
break; break;
} }
@ -1377,30 +1341,9 @@ where
} }
} }
} }
fn insert_layer(&mut self, mut layer: HighlightIterLayer<'a>) {
if let Some(sort_key) = layer.sort_key() {
let mut i = 1;
while i < self.layers.len() {
if let Some(sort_key_i) = self.layers[i].sort_key() {
if sort_key_i > sort_key {
self.layers.insert(i, layer);
return;
}
i += 1;
} else {
self.layers.remove(i);
}
}
self.layers.push(layer);
}
}
} }
impl<'a, F> Iterator for HighlightIter<'a, F> impl<'a> Iterator for HighlightIter<'a> {
where
F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
{
type Item = Result<HighlightEvent, Error>; type Item = Result<HighlightEvent, Error>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
@ -1460,55 +1403,12 @@ where
layer.highlight_end_stack.pop(); layer.highlight_end_stack.pop();
return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd)); return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd));
} else { } else {
// return self.emit_event(self.source.len(), None); return self.emit_event(self.source.len_bytes(), None);
return None;
}; };
let (mut match_, capture_index) = layer.captures.next().unwrap(); let (mut match_, capture_index) = layer.captures.next().unwrap();
let mut capture = match_.captures[capture_index]; let mut capture = match_.captures[capture_index];
// If this capture represents an injection, then process the injection.
if match_.pattern_index < layer.config.locals_pattern_index {
let (language_name, content_node, include_children) =
injection_for_match(layer.config, &layer.config.query, &match_, self.source);
// Explicitly remove this match so that none of its other captures will remain
// in the stream of captures.
match_.remove();
// If a language is found with the given name, then add a new language layer
// to the highlighted document.
if let (Some(language_name), Some(content_node)) = (language_name, content_node) {
if let Some(config) = (self.injection_callback)(&language_name) {
let ranges = HighlightIterLayer::intersect_ranges(
&self.layers[0].ranges,
&[content_node],
include_children,
);
if !ranges.is_empty() {
match HighlightIterLayer::new(
self.source,
self.cancellation_flag,
&mut self.injection_callback,
config,
self.layers[0].depth + 1,
ranges,
) {
Ok(layers) => {
for layer in layers {
self.insert_layer(layer);
}
}
Err(e) => return Some(Err(e)),
}
}
}
}
self.sort_layers();
continue 'main;
}
// Remove from the local scope stack any local scopes that have already ended. // Remove from the local scope stack any local scopes that have already ended.
while range.start > layer.scope_stack.last().unwrap().range.end { while range.start > layer.scope_stack.last().unwrap().range.end {
layer.scope_stack.pop(); layer.scope_stack.pop();
@ -1703,14 +1603,6 @@ fn injection_for_match<'a>(
(language_name, content_node, include_children) (language_name, content_node, include_children)
} }
// fn shrink_and_clear<T>(vec: &mut Vec<T>, capacity: usize) {
// if vec.len() > capacity {
// vec.truncate(capacity);
// vec.shrink_to_fit();
// }
// vec.clear();
// }
pub struct Merge<I> { pub struct Merge<I> {
iter: I, iter: I,
spans: Box<dyn Iterator<Item = (usize, std::ops::Range<usize>)>>, spans: Box<dyn Iterator<Item = (usize, std::ops::Range<usize>)>>,
@ -1877,6 +1769,8 @@ mod test {
.map(String::from) .map(String::from)
.collect(); .collect();
let loader = Loader::new(Configuration { language: vec![] });
let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap(); let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap();
let config = HighlightConfiguration::new( let config = HighlightConfiguration::new(
language, language,
@ -1899,7 +1793,7 @@ mod test {
fn main() {} fn main() {}
", ",
); );
let syntax = Syntax::new(&source, Arc::new(config)); let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader));
let tree = syntax.tree(); let tree = syntax.tree();
let root = tree.root_node(); let root = tree.root_node();
assert_eq!(root.kind(), "source_file"); assert_eq!(root.kind(), "source_file");
@ -1926,7 +1820,7 @@ mod test {
&doc, &doc,
vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(), vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(),
); );
let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes()); let edits = generate_edits(doc.slice(..), transaction.changes());
// transaction.apply(&mut state); // transaction.apply(&mut state);
assert_eq!( assert_eq!(
@ -1955,7 +1849,7 @@ mod test {
let mut doc = Rope::from("fn test() {}"); let mut doc = Rope::from("fn test() {}");
let transaction = let transaction =
Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter()); Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter());
let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes()); let edits = generate_edits(doc.slice(..), transaction.changes());
transaction.apply(&mut doc); transaction.apply(&mut doc);
assert_eq!(doc, "fn test(a: u32) {}"); assert_eq!(doc, "fn test(a: u32) {}");

@ -68,13 +68,12 @@ impl EditorView {
surface: &mut Surface, surface: &mut Surface,
theme: &Theme, theme: &Theme,
is_focused: bool, is_focused: bool,
loader: &syntax::Loader,
config: &helix_view::editor::Config, config: &helix_view::editor::Config,
) { ) {
let inner = view.inner_area(); let inner = view.inner_area();
let area = view.area; let area = view.area;
let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, loader); let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme);
let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme)); let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme));
let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused { let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused {
Box::new(syntax::merge( Box::new(syntax::merge(
@ -121,8 +120,7 @@ impl EditorView {
doc: &'doc Document, doc: &'doc Document,
offset: Position, offset: Position,
height: u16, height: u16,
theme: &Theme, _theme: &Theme,
loader: &syntax::Loader,
) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> { ) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> {
let text = doc.text().slice(..); let text = doc.text().slice(..);
let last_line = std::cmp::min( let last_line = std::cmp::min(
@ -142,25 +140,8 @@ impl EditorView {
// TODO: range doesn't actually restrict source, just highlight range // TODO: range doesn't actually restrict source, just highlight range
let highlights = match doc.syntax() { let highlights = match doc.syntax() {
Some(syntax) => { Some(syntax) => {
let scopes = theme.scopes();
syntax syntax
.highlight_iter(text.slice(..), Some(range), None, |language| { .highlight_iter(text.slice(..), Some(range), None)
loader.language_configuration_for_injection_string(language)
.and_then(|language_config| {
let config = language_config.highlight_config(scopes)?;
let config_ref = config.as_ref();
// SAFETY: the referenced `HighlightConfiguration` behind
// the `Arc` is guaranteed to remain valid throughout the
// duration of the highlight.
let config_ref = unsafe {
std::mem::transmute::<
_,
&'static syntax::HighlightConfiguration,
>(config_ref)
};
Some(config_ref)
})
})
.map(|event| event.unwrap()) .map(|event| event.unwrap())
.collect() // TODO: we collect here to avoid holding the lock, fix later .collect() // TODO: we collect here to avoid holding the lock, fix later
} }
@ -1070,7 +1051,6 @@ impl Component for EditorView {
for (view, is_focused) in cx.editor.tree.views() { for (view, is_focused) in cx.editor.tree.views() {
let doc = cx.editor.document(view.doc).unwrap(); let doc = cx.editor.document(view.doc).unwrap();
let loader = &cx.editor.syn_loader;
self.render_view( self.render_view(
doc, doc,
view, view,
@ -1078,7 +1058,6 @@ impl Component for EditorView {
surface, surface,
&cx.editor.theme, &cx.editor.theme,
is_focused, is_focused,
loader,
&cx.editor.config, &cx.editor.config,
); );
} }

@ -38,7 +38,7 @@ impl Markdown {
fn parse<'a>( fn parse<'a>(
contents: &'a str, contents: &'a str,
theme: Option<&Theme>, theme: Option<&Theme>,
loader: &syntax::Loader, loader: Arc<syntax::Loader>,
) -> tui::text::Text<'a> { ) -> tui::text::Text<'a> {
// // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}}
// let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect<B: FromIterator<Self::Item>>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result<Collection<T>, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec<i32> = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec<i32>` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque<T>`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect<B: FromIterator<Self::Item>>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result<Collection<T>, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec<i32> = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec<i32>` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque<T>`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```";
@ -98,14 +98,13 @@ fn parse<'a>(
let syntax = loader let syntax = loader
.language_configuration_for_injection_string(language) .language_configuration_for_injection_string(language)
.and_then(|config| config.highlight_config(theme.scopes())) .and_then(|config| config.highlight_config(theme.scopes()))
.map(|config| Syntax::new(&rope, config)); .map(|config| Syntax::new(&rope, config, loader.clone()));
if let Some(syntax) = syntax { if let Some(syntax) = syntax {
// if we have a syntax available, highlight_iter and generate spans // if we have a syntax available, highlight_iter and generate spans
let mut highlights = Vec::new(); let mut highlights = Vec::new();
for event in syntax.highlight_iter(rope.slice(..), None, None, |_| None) for event in syntax.highlight_iter(rope.slice(..), None, None) {
{
match event.unwrap() { match event.unwrap() {
HighlightEvent::HighlightStart(span) => { HighlightEvent::HighlightStart(span) => {
highlights.push(span); highlights.push(span);
@ -211,7 +210,11 @@ impl Component for Markdown {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
use tui::widgets::{Paragraph, Widget, Wrap}; use tui::widgets::{Paragraph, Widget, Wrap};
let text = parse(&self.contents, Some(&cx.editor.theme), &self.config_loader); let text = parse(
&self.contents,
Some(&cx.editor.theme),
self.config_loader.clone(),
);
let par = Paragraph::new(text) let par = Paragraph::new(text)
.wrap(Wrap { trim: false }) .wrap(Wrap { trim: false })
@ -229,7 +232,7 @@ impl Component for Markdown {
if padding >= viewport.1 || padding >= viewport.0 { if padding >= viewport.1 || padding >= viewport.0 {
return None; return None;
} }
let contents = parse(&self.contents, None, &self.config_loader); let contents = parse(&self.contents, None, self.config_loader.clone());
// TODO: account for tab width // TODO: account for tab width
let max_text_width = (viewport.0 - padding).min(120); let max_text_width = (viewport.0 - padding).min(120);
let mut text_width = 0; let mut text_width = 0;

@ -221,13 +221,8 @@ impl<T: 'static> Component for FilePicker<T> {
let offset = Position::new(first_line, 0); let offset = Position::new(first_line, 0);
let highlights = EditorView::doc_syntax_highlights( let highlights =
doc, EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme);
offset,
area.height,
&cx.editor.theme,
&cx.editor.syn_loader,
);
EditorView::render_text_highlights( EditorView::render_text_highlights(
doc, doc,
offset, offset,

@ -359,7 +359,7 @@ impl Document {
path: &Path, path: &Path,
encoding: Option<&'static encoding::Encoding>, encoding: Option<&'static encoding::Encoding>,
theme: Option<&Theme>, theme: Option<&Theme>,
config_loader: Option<&syntax::Loader>, config_loader: Option<Arc<syntax::Loader>>,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
// Open the file if it exists, otherwise assume it is a new file (and thus empty). // Open the file if it exists, otherwise assume it is a new file (and thus empty).
let (rope, encoding) = if path.exists() { let (rope, encoding) = if path.exists() {
@ -498,12 +498,12 @@ impl Document {
} }
/// Detect the programming language based on the file type. /// Detect the programming language based on the file type.
pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: &syntax::Loader) { pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: Arc<syntax::Loader>) {
if let Some(path) = &self.path { if let Some(path) = &self.path {
let language_config = config_loader let language_config = config_loader
.language_config_for_file_name(path) .language_config_for_file_name(path)
.or_else(|| config_loader.language_config_for_shebang(self.text())); .or_else(|| config_loader.language_config_for_shebang(self.text()));
self.set_language(theme, language_config); self.set_language(theme, language_config, Some(config_loader));
} }
} }
@ -579,11 +579,12 @@ impl Document {
&mut self, &mut self,
theme: Option<&Theme>, theme: Option<&Theme>,
language_config: Option<Arc<helix_core::syntax::LanguageConfiguration>>, language_config: Option<Arc<helix_core::syntax::LanguageConfiguration>>,
loader: Option<Arc<helix_core::syntax::Loader>>,
) { ) {
if let Some(language_config) = language_config { if let (Some(language_config), Some(loader)) = (language_config, loader) {
let scopes = theme.map(|theme| theme.scopes()).unwrap_or(&[]); let scopes = theme.map(|theme| theme.scopes()).unwrap_or(&[]);
if let Some(highlight_config) = language_config.highlight_config(scopes) { if let Some(highlight_config) = language_config.highlight_config(scopes) {
let syntax = Syntax::new(&self.text, highlight_config); let syntax = Syntax::new(&self.text, highlight_config, loader);
self.syntax = Some(syntax); self.syntax = Some(syntax);
// TODO: config.configure(scopes) is now delayed, is that ok? // TODO: config.configure(scopes) is now delayed, is that ok?
} }
@ -605,7 +606,7 @@ impl Document {
) { ) {
let language_config = config_loader.language_config_for_scope(scope); let language_config = config_loader.language_config_for_scope(scope);
self.set_language(theme, language_config); self.set_language(theme, language_config, Some(config_loader));
} }
/// Set the LSP. /// Set the LSP.

@ -283,7 +283,7 @@ impl Editor {
/// Refreshes the language server for a given document /// Refreshes the language server for a given document
pub fn refresh_language_server(&mut self, doc_id: DocumentId) -> Option<()> { pub fn refresh_language_server(&mut self, doc_id: DocumentId) -> Option<()> {
let doc = self.documents.get_mut(&doc_id)?; let doc = self.documents.get_mut(&doc_id)?;
doc.detect_language(Some(&self.theme), &self.syn_loader); doc.detect_language(Some(&self.theme), self.syn_loader.clone());
Self::launch_language_server(&mut self.language_servers, doc) Self::launch_language_server(&mut self.language_servers, doc)
} }
@ -462,7 +462,12 @@ impl Editor {
let id = if let Some(id) = id { let id = if let Some(id) = id {
id id
} else { } else {
let mut doc = Document::open(&path, None, Some(&self.theme), Some(&self.syn_loader))?; let mut doc = Document::open(
&path,
None,
Some(&self.theme),
Some(self.syn_loader.clone()),
)?;
let _ = Self::launch_language_server(&mut self.language_servers, &mut doc); let _ = Self::launch_language_server(&mut self.language_servers, &mut doc);

Loading…
Cancel
Save