mirror of https://github.com/helix-editor/helix
commit
08ee8b9443
@ -0,0 +1,122 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// A generic pointer to a file location.
|
||||
///
|
||||
/// Currently this type only supports paths to local files.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[non_exhaustive]
|
||||
pub enum Uri {
|
||||
File(PathBuf),
|
||||
}
|
||||
|
||||
impl Uri {
|
||||
// This clippy allow mirrors url::Url::from_file_path
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn to_url(&self) -> Result<url::Url, ()> {
|
||||
match self {
|
||||
Uri::File(path) => url::Url::from_file_path(path),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_path(&self) -> Option<&Path> {
|
||||
match self {
|
||||
Self::File(path) => Some(path),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_path_buf(self) -> Option<PathBuf> {
|
||||
match self {
|
||||
Self::File(path) => Some(path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PathBuf> for Uri {
|
||||
fn from(path: PathBuf) -> Self {
|
||||
Self::File(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Uri> for PathBuf {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(uri: Uri) -> Result<Self, Self::Error> {
|
||||
match uri {
|
||||
Uri::File(path) => Ok(path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UrlConversionError {
|
||||
source: url::Url,
|
||||
kind: UrlConversionErrorKind,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum UrlConversionErrorKind {
|
||||
UnsupportedScheme,
|
||||
UnableToConvert,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UrlConversionError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.kind {
|
||||
UrlConversionErrorKind::UnsupportedScheme => {
|
||||
write!(f, "unsupported scheme in URL: {}", self.source.scheme())
|
||||
}
|
||||
UrlConversionErrorKind::UnableToConvert => {
|
||||
write!(f, "unable to convert URL to file path: {}", self.source)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for UrlConversionError {}
|
||||
|
||||
fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> {
|
||||
if url.scheme() == "file" {
|
||||
url.to_file_path()
|
||||
.map(|path| Uri::File(helix_stdx::path::normalize(path)))
|
||||
.map_err(|_| UrlConversionErrorKind::UnableToConvert)
|
||||
} else {
|
||||
Err(UrlConversionErrorKind::UnsupportedScheme)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<url::Url> for Uri {
|
||||
type Error = UrlConversionError;
|
||||
|
||||
fn try_from(url: url::Url) -> Result<Self, Self::Error> {
|
||||
convert_url_to_uri(&url).map_err(|kind| Self::Error { source: url, kind })
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&url::Url> for Uri {
|
||||
type Error = UrlConversionError;
|
||||
|
||||
fn try_from(url: &url::Url) -> Result<Self, Self::Error> {
|
||||
convert_url_to_uri(url).map_err(|kind| Self::Error {
|
||||
source: url.clone(),
|
||||
kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use url::Url;
|
||||
|
||||
#[test]
|
||||
fn unknown_scheme() {
|
||||
let url = Url::parse("csharp:/metadata/foo/bar/Baz.cs").unwrap();
|
||||
assert!(matches!(
|
||||
Uri::try_from(url),
|
||||
Err(UrlConversionError {
|
||||
kind: UrlConversionErrorKind::UnsupportedScheme,
|
||||
..
|
||||
})
|
||||
));
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,182 @@
|
||||
use std::{
|
||||
path::Path,
|
||||
sync::{atomic, Arc},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use helix_event::AsyncHook;
|
||||
use tokio::time::Instant;
|
||||
|
||||
use crate::{job, ui::overlay::Overlay};
|
||||
|
||||
use super::{CachedPreview, DynQueryCallback, Picker};
|
||||
|
||||
pub(super) struct PreviewHighlightHandler<T: 'static + Send + Sync, D: 'static + Send + Sync> {
|
||||
trigger: Option<Arc<Path>>,
|
||||
phantom_data: std::marker::PhantomData<(T, D)>,
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Default for PreviewHighlightHandler<T, D> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
trigger: None,
|
||||
phantom_data: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync, D: 'static + Send + Sync> AsyncHook
|
||||
for PreviewHighlightHandler<T, D>
|
||||
{
|
||||
type Event = Arc<Path>;
|
||||
|
||||
fn handle_event(
|
||||
&mut self,
|
||||
path: Self::Event,
|
||||
timeout: Option<tokio::time::Instant>,
|
||||
) -> Option<tokio::time::Instant> {
|
||||
if self
|
||||
.trigger
|
||||
.as_ref()
|
||||
.is_some_and(|trigger| trigger == &path)
|
||||
{
|
||||
// If the path hasn't changed, don't reset the debounce
|
||||
timeout
|
||||
} else {
|
||||
self.trigger = Some(path);
|
||||
Some(Instant::now() + Duration::from_millis(150))
|
||||
}
|
||||
}
|
||||
|
||||
fn finish_debounce(&mut self) {
|
||||
let Some(path) = self.trigger.take() else {
|
||||
return;
|
||||
};
|
||||
|
||||
job::dispatch_blocking(move |editor, compositor| {
|
||||
let Some(Overlay {
|
||||
content: picker, ..
|
||||
}) = compositor.find::<Overlay<Picker<T, D>>>()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(CachedPreview::Document(ref mut doc)) = picker.preview_cache.get_mut(&path)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if doc.language_config().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(language_config) = doc.detect_language_config(&editor.syn_loader.load())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
doc.language = Some(language_config.clone());
|
||||
let text = doc.text().clone();
|
||||
let loader = editor.syn_loader.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let Some(syntax) = language_config
|
||||
.highlight_config(&loader.load().scopes())
|
||||
.and_then(|highlight_config| {
|
||||
helix_core::Syntax::new(text.slice(..), highlight_config, loader)
|
||||
})
|
||||
else {
|
||||
log::info!("highlighting picker item failed");
|
||||
return;
|
||||
};
|
||||
|
||||
job::dispatch_blocking(move |editor, compositor| {
|
||||
let Some(Overlay {
|
||||
content: picker, ..
|
||||
}) = compositor.find::<Overlay<Picker<T, D>>>()
|
||||
else {
|
||||
log::info!("picker closed before syntax highlighting finished");
|
||||
return;
|
||||
};
|
||||
let Some(CachedPreview::Document(ref mut doc)) =
|
||||
picker.preview_cache.get_mut(&path)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let diagnostics = helix_view::Editor::doc_diagnostics(
|
||||
&editor.language_servers,
|
||||
&editor.diagnostics,
|
||||
doc,
|
||||
);
|
||||
doc.replace_diagnostics(diagnostics, &[], None);
|
||||
doc.syntax = Some(syntax);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct DynamicQueryHandler<T: 'static + Send + Sync, D: 'static + Send + Sync> {
|
||||
callback: Arc<DynQueryCallback<T, D>>,
|
||||
// Duration used as a debounce.
|
||||
// Defaults to 100ms if not provided via `Picker::with_dynamic_query`. Callers may want to set
|
||||
// this higher if the dynamic query is expensive - for example global search.
|
||||
debounce: Duration,
|
||||
last_query: Arc<str>,
|
||||
query: Option<Arc<str>>,
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync, D: 'static + Send + Sync> DynamicQueryHandler<T, D> {
|
||||
pub(super) fn new(callback: DynQueryCallback<T, D>, duration_ms: Option<u64>) -> Self {
|
||||
Self {
|
||||
callback: Arc::new(callback),
|
||||
debounce: Duration::from_millis(duration_ms.unwrap_or(100)),
|
||||
last_query: "".into(),
|
||||
query: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync, D: 'static + Send + Sync> AsyncHook for DynamicQueryHandler<T, D> {
|
||||
type Event = Arc<str>;
|
||||
|
||||
fn handle_event(&mut self, query: Self::Event, _timeout: Option<Instant>) -> Option<Instant> {
|
||||
if query == self.last_query {
|
||||
// If the search query reverts to the last one we requested, no need to
|
||||
// make a new request.
|
||||
self.query = None;
|
||||
None
|
||||
} else {
|
||||
self.query = Some(query);
|
||||
Some(Instant::now() + self.debounce)
|
||||
}
|
||||
}
|
||||
|
||||
fn finish_debounce(&mut self) {
|
||||
let Some(query) = self.query.take() else {
|
||||
return;
|
||||
};
|
||||
self.last_query = query.clone();
|
||||
let callback = self.callback.clone();
|
||||
|
||||
job::dispatch_blocking(move |editor, compositor| {
|
||||
let Some(Overlay {
|
||||
content: picker, ..
|
||||
}) = compositor.find::<Overlay<Picker<T, D>>>()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
// Increment the version number to cancel any ongoing requests.
|
||||
picker.version.fetch_add(1, atomic::Ordering::Relaxed);
|
||||
picker.matcher.restart(false);
|
||||
let injector = picker.injector();
|
||||
let get_options = (callback)(&query, editor, picker.editor_data.clone(), &injector);
|
||||
tokio::spawn(async move {
|
||||
if let Err(err) = get_options.await {
|
||||
log::info!("Dynamic request failed: {err}");
|
||||
}
|
||||
// NOTE: the Drop implementation of Injector will request a redraw when the
|
||||
// injector falls out of scope here, clearing the "running" indicator.
|
||||
});
|
||||
})
|
||||
}
|
||||
}
|
@ -0,0 +1,368 @@
|
||||
use std::{collections::HashMap, mem, ops::Range, sync::Arc};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct PickerQuery {
|
||||
/// The column names of the picker.
|
||||
column_names: Box<[Arc<str>]>,
|
||||
/// The index of the primary column in `column_names`.
|
||||
/// The primary column is selected by default unless another
|
||||
/// field is specified explicitly with `%fieldname`.
|
||||
primary_column: usize,
|
||||
/// The mapping between column names and input in the query
|
||||
/// for those columns.
|
||||
inner: HashMap<Arc<str>, Arc<str>>,
|
||||
/// The byte ranges of the input text which are used as input for each column.
|
||||
/// This is calculated at parsing time for use in [Self::active_column].
|
||||
/// This Vec is naturally sorted in ascending order and ranges do not overlap.
|
||||
column_ranges: Vec<(Range<usize>, Option<Arc<str>>)>,
|
||||
}
|
||||
|
||||
impl PartialEq<HashMap<Arc<str>, Arc<str>>> for PickerQuery {
|
||||
fn eq(&self, other: &HashMap<Arc<str>, Arc<str>>) -> bool {
|
||||
self.inner.eq(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerQuery {
|
||||
pub(super) fn new<I: Iterator<Item = Arc<str>>>(
|
||||
column_names: I,
|
||||
primary_column: usize,
|
||||
) -> Self {
|
||||
let column_names: Box<[_]> = column_names.collect();
|
||||
let inner = HashMap::with_capacity(column_names.len());
|
||||
let column_ranges = vec![(0..usize::MAX, Some(column_names[primary_column].clone()))];
|
||||
Self {
|
||||
column_names,
|
||||
primary_column,
|
||||
inner,
|
||||
column_ranges,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn get(&self, column: &str) -> Option<&Arc<str>> {
|
||||
self.inner.get(column)
|
||||
}
|
||||
|
||||
pub(super) fn parse(&mut self, input: &str) -> HashMap<Arc<str>, Arc<str>> {
|
||||
let mut fields: HashMap<Arc<str>, String> = HashMap::new();
|
||||
let primary_field = &self.column_names[self.primary_column];
|
||||
let mut escaped = false;
|
||||
let mut in_field = false;
|
||||
let mut field = None;
|
||||
let mut text = String::new();
|
||||
self.column_ranges.clear();
|
||||
self.column_ranges
|
||||
.push((0..usize::MAX, Some(primary_field.clone())));
|
||||
|
||||
macro_rules! finish_field {
|
||||
() => {
|
||||
let key = field.take().unwrap_or(primary_field);
|
||||
|
||||
if let Some(pattern) = fields.get_mut(key) {
|
||||
pattern.push(' ');
|
||||
pattern.push_str(text.trim());
|
||||
} else {
|
||||
fields.insert(key.clone(), text.trim().to_string());
|
||||
}
|
||||
text.clear();
|
||||
};
|
||||
}
|
||||
|
||||
for (idx, ch) in input.char_indices() {
|
||||
match ch {
|
||||
// Backslash escaping
|
||||
_ if escaped => {
|
||||
// '%' is the only character that is special cased.
|
||||
// You can escape it to prevent parsing the text that
|
||||
// follows it as a field name.
|
||||
if ch != '%' {
|
||||
text.push('\\');
|
||||
}
|
||||
text.push(ch);
|
||||
escaped = false;
|
||||
}
|
||||
'\\' => escaped = !escaped,
|
||||
'%' => {
|
||||
if !text.is_empty() {
|
||||
finish_field!();
|
||||
}
|
||||
let (range, _field) = self
|
||||
.column_ranges
|
||||
.last_mut()
|
||||
.expect("column_ranges is non-empty");
|
||||
range.end = idx;
|
||||
in_field = true;
|
||||
}
|
||||
' ' if in_field => {
|
||||
text.clear();
|
||||
in_field = false;
|
||||
}
|
||||
_ if in_field => {
|
||||
text.push(ch);
|
||||
// Go over all columns and their indices, find all that starts with field key,
|
||||
// select a column that fits key the most.
|
||||
field = self
|
||||
.column_names
|
||||
.iter()
|
||||
.filter(|col| col.starts_with(&text))
|
||||
// select "fittest" column
|
||||
.min_by_key(|col| col.len());
|
||||
|
||||
// Update the column range for this column.
|
||||
if let Some((_range, current_field)) = self
|
||||
.column_ranges
|
||||
.last_mut()
|
||||
.filter(|(range, _)| range.end == usize::MAX)
|
||||
{
|
||||
*current_field = field.cloned();
|
||||
} else {
|
||||
self.column_ranges.push((idx..usize::MAX, field.cloned()));
|
||||
}
|
||||
}
|
||||
_ => text.push(ch),
|
||||
}
|
||||
}
|
||||
|
||||
if !in_field && !text.is_empty() {
|
||||
finish_field!();
|
||||
}
|
||||
|
||||
let new_inner: HashMap<_, _> = fields
|
||||
.into_iter()
|
||||
.map(|(field, query)| (field, query.as_str().into()))
|
||||
.collect();
|
||||
|
||||
mem::replace(&mut self.inner, new_inner)
|
||||
}
|
||||
|
||||
/// Finds the column which the cursor is 'within' in the last parse.
|
||||
///
|
||||
/// The cursor is considered to be within a column when it is placed within any
|
||||
/// of a column's text. See the `active_column_test` unit test below for examples.
|
||||
///
|
||||
/// `cursor` is a byte index that represents the location of the prompt's cursor.
|
||||
pub fn active_column(&self, cursor: usize) -> Option<&Arc<str>> {
|
||||
let point = self
|
||||
.column_ranges
|
||||
.partition_point(|(range, _field)| cursor > range.end);
|
||||
|
||||
self.column_ranges
|
||||
.get(point)
|
||||
.filter(|(range, _field)| cursor >= range.start && cursor <= range.end)
|
||||
.and_then(|(_range, field)| field.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use helix_core::hashmap;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_query_test() {
|
||||
let mut query = PickerQuery::new(
|
||||
[
|
||||
"primary".into(),
|
||||
"field1".into(),
|
||||
"field2".into(),
|
||||
"another".into(),
|
||||
"anode".into(),
|
||||
]
|
||||
.into_iter(),
|
||||
0,
|
||||
);
|
||||
|
||||
// Basic field splitting
|
||||
query.parse("hello world");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello world".into(),
|
||||
)
|
||||
);
|
||||
query.parse("hello %field1 world %field2 !");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"field1".into() => "world".into(),
|
||||
"field2".into() => "!".into(),
|
||||
)
|
||||
);
|
||||
query.parse("%field1 abc %field2 def xyz");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"field1".into() => "abc".into(),
|
||||
"field2".into() => "def xyz".into(),
|
||||
)
|
||||
);
|
||||
|
||||
// Trailing space is trimmed
|
||||
query.parse("hello ");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
)
|
||||
);
|
||||
|
||||
// Unknown fields are trimmed.
|
||||
query.parse("hello %foo");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
)
|
||||
);
|
||||
|
||||
// Multiple words in a field
|
||||
query.parse("hello %field1 a b c");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"field1".into() => "a b c".into(),
|
||||
)
|
||||
);
|
||||
|
||||
// Escaping
|
||||
query.parse(r#"hello\ world"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => r#"hello\ world"#.into(),
|
||||
)
|
||||
);
|
||||
query.parse(r#"hello \%field1 world"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello %field1 world".into(),
|
||||
)
|
||||
);
|
||||
query.parse(r#"%field1 hello\ world"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"field1".into() => r#"hello\ world"#.into(),
|
||||
)
|
||||
);
|
||||
query.parse(r#"hello %field1 a\"b"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"field1".into() => r#"a\"b"#.into(),
|
||||
)
|
||||
);
|
||||
query.parse(r#"%field1 hello\ world"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"field1".into() => r#"hello\ world"#.into(),
|
||||
)
|
||||
);
|
||||
query.parse(r#"\bfoo\b"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => r#"\bfoo\b"#.into(),
|
||||
)
|
||||
);
|
||||
query.parse(r#"\\n"#);
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => r#"\\n"#.into(),
|
||||
)
|
||||
);
|
||||
|
||||
// Only the prefix of a field is required.
|
||||
query.parse("hello %anot abc");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"another".into() => "abc".into(),
|
||||
)
|
||||
);
|
||||
// The shortest matching the prefix is selected.
|
||||
query.parse("hello %ano abc");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"anode".into() => "abc".into()
|
||||
)
|
||||
);
|
||||
// Multiple uses of a column are concatenated with space separators.
|
||||
query.parse("hello %field1 xyz %fie abc");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"field1".into() => "xyz abc".into()
|
||||
)
|
||||
);
|
||||
query.parse("hello %fie abc");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello".into(),
|
||||
"field1".into() => "abc".into()
|
||||
)
|
||||
);
|
||||
// The primary column can be explicitly qualified.
|
||||
query.parse("hello %fie abc %prim world");
|
||||
assert_eq!(
|
||||
query,
|
||||
hashmap!(
|
||||
"primary".into() => "hello world".into(),
|
||||
"field1".into() => "abc".into()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn active_column_test() {
|
||||
fn active_column<'a>(query: &'a mut PickerQuery, input: &str) -> Option<&'a str> {
|
||||
let cursor = input.find('|').expect("cursor must be indicated with '|'");
|
||||
let input = input.replace('|', "");
|
||||
query.parse(&input);
|
||||
query.active_column(cursor).map(AsRef::as_ref)
|
||||
}
|
||||
|
||||
let mut query = PickerQuery::new(
|
||||
["primary".into(), "foo".into(), "bar".into()].into_iter(),
|
||||
0,
|
||||
);
|
||||
|
||||
assert_eq!(active_column(&mut query, "|"), Some("primary"));
|
||||
assert_eq!(active_column(&mut query, "hello| world"), Some("primary"));
|
||||
assert_eq!(active_column(&mut query, "|%foo hello"), Some("primary"));
|
||||
assert_eq!(active_column(&mut query, "%foo|"), Some("foo"));
|
||||
assert_eq!(active_column(&mut query, "%|"), None);
|
||||
assert_eq!(active_column(&mut query, "%baz|"), None);
|
||||
assert_eq!(active_column(&mut query, "%quiz%|"), None);
|
||||
assert_eq!(active_column(&mut query, "%foo hello| world"), Some("foo"));
|
||||
assert_eq!(active_column(&mut query, "%foo hello world|"), Some("foo"));
|
||||
assert_eq!(active_column(&mut query, "%foo| hello world"), Some("foo"));
|
||||
assert_eq!(active_column(&mut query, "%|foo hello world"), Some("foo"));
|
||||
assert_eq!(active_column(&mut query, "%f|oo hello world"), Some("foo"));
|
||||
assert_eq!(active_column(&mut query, "hello %f|oo world"), Some("foo"));
|
||||
assert_eq!(
|
||||
active_column(&mut query, "hello %f|oo world %bar !"),
|
||||
Some("foo")
|
||||
);
|
||||
assert_eq!(
|
||||
active_column(&mut query, "hello %foo wo|rld %bar !"),
|
||||
Some("foo")
|
||||
);
|
||||
assert_eq!(
|
||||
active_column(&mut query, "hello %foo world %bar !|"),
|
||||
Some("bar")
|
||||
);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue