mirror of https://github.com/helix-editor/helix
Merge branch 'master' into situational-lsp
commit
5f6a511120
@ -1,3 +1,17 @@
|
|||||||
|
# we use tokio_unstable to enable runtime::Handle::id so we can separate
|
||||||
|
# globals from multiple parallel tests. If that function ever does get removed
|
||||||
|
# its possible to replace (with some additional overhead and effort)
|
||||||
|
# Annoyingly build.rustflags doesn't work here because it gets overwritten
|
||||||
|
# if people have their own global target.<..> config (for example to enable mold)
|
||||||
|
# specifying flags this way is more robust as they get merged
|
||||||
|
# This still gets overwritten by RUST_FLAGS though, luckily it shouldn't be necessary
|
||||||
|
# to set those most of the time. If downstream does overwrite this its not a huge
|
||||||
|
# deal since it will only break tests anyway
|
||||||
|
[target."cfg(all())"]
|
||||||
|
rustflags = ["--cfg", "tokio_unstable", "-C", "target-feature=-crt-static"]
|
||||||
|
|
||||||
|
|
||||||
[alias]
|
[alias]
|
||||||
xtask = "run --package xtask --"
|
xtask = "run --package xtask --"
|
||||||
integration-test = "test --features integration --profile integration --workspace --test integration"
|
integration-test = "test --features integration --profile integration --workspace --test integration"
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
watch_file shell.nix
|
watch_file shell.nix
|
||||||
watch_file flake.lock
|
watch_file flake.lock
|
||||||
|
watch_file rust-toolchain.toml
|
||||||
|
|
||||||
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
||||||
use flake || use nix
|
use flake || use nix
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
# Things that we don't want ripgrep to search that we do want in git
|
|
||||||
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering
|
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
After Width: | Height: | Size: 264 KiB |
@ -1,25 +1,27 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-dap"
|
name = "helix-dap"
|
||||||
version = "0.6.0"
|
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
|
||||||
edition = "2018"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
description = "DAP client implementation for Helix project"
|
description = "DAP client implementation for Helix project"
|
||||||
categories = ["editor"]
|
version.workspace = true
|
||||||
repository = "https://github.com/helix-editor/helix"
|
authors.workspace = true
|
||||||
homepage = "https://helix-editor.com"
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
helix-core = { version = "0.6", path = "../helix-core" }
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
|
helix-core = { path = "../helix-core" }
|
||||||
|
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||||
which = "4.4"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
fern = "0.6"
|
fern = "0.6"
|
||||||
|
@ -1,15 +1,29 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-event"
|
name = "helix-event"
|
||||||
version = "0.6.0"
|
version.workspace = true
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
authors.workspace = true
|
||||||
edition = "2021"
|
edition.workspace = true
|
||||||
license = "MPL-2.0"
|
license.workspace = true
|
||||||
categories = ["editor"]
|
rust-version.workspace = true
|
||||||
repository = "https://github.com/helix-editor/helix"
|
categories.workspace = true
|
||||||
homepage = "https://helix-editor.com"
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot"] }
|
ahash = "0.8.3"
|
||||||
parking_lot = { version = "0.12", features = ["send_guard"] }
|
hashbrown = "0.14.0"
|
||||||
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
|
||||||
|
# the event registry is essentially read only but must be an rwlock so we can
|
||||||
|
# setup new events on initialization, hardware-lock-elision hugely benefits this case
|
||||||
|
# as it essentially makes the lock entirely free as long as there is no writes
|
||||||
|
parking_lot = { version = "0.12", features = ["hardware-lock-elision"] }
|
||||||
|
once_cell = "1.18"
|
||||||
|
|
||||||
|
anyhow = "1"
|
||||||
|
log = "0.4"
|
||||||
|
futures-executor = "0.3.28"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
integration_test = []
|
||||||
|
@ -0,0 +1,19 @@
|
|||||||
|
use std::future::Future;
|
||||||
|
|
||||||
|
pub use oneshot::channel as cancelation;
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
pub type CancelTx = oneshot::Sender<()>;
|
||||||
|
pub type CancelRx = oneshot::Receiver<()>;
|
||||||
|
|
||||||
|
pub async fn cancelable_future<T>(future: impl Future<Output = T>, cancel: CancelRx) -> Option<T> {
|
||||||
|
tokio::select! {
|
||||||
|
biased;
|
||||||
|
_ = cancel => {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
res = future => {
|
||||||
|
Some(res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,67 @@
|
|||||||
|
//! Utilities for declaring an async (usually debounced) hook
|
||||||
|
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use futures_executor::block_on;
|
||||||
|
use tokio::sync::mpsc::{self, error::TrySendError, Sender};
|
||||||
|
use tokio::time::Instant;
|
||||||
|
|
||||||
|
/// Async hooks provide a convenient framework for implementing (debounced)
|
||||||
|
/// async event handlers. Most synchronous event hooks will likely need to
|
||||||
|
/// debounce their events, coordinate multiple different hooks and potentially
|
||||||
|
/// track some state. `AsyncHooks` facilitate these use cases by running as
|
||||||
|
/// a background tokio task that waits for events (usually an enum) to be
|
||||||
|
/// sent through a channel.
|
||||||
|
pub trait AsyncHook: Sync + Send + 'static + Sized {
|
||||||
|
type Event: Sync + Send + 'static;
|
||||||
|
/// Called immediately whenever an event is received, this function can
|
||||||
|
/// consume the event immediately or debounce it. In case of debouncing,
|
||||||
|
/// it can either define a new debounce timeout or continue the current one
|
||||||
|
fn handle_event(&mut self, event: Self::Event, timeout: Option<Instant>) -> Option<Instant>;
|
||||||
|
|
||||||
|
/// Called whenever the debounce timeline is reached
|
||||||
|
fn finish_debounce(&mut self);
|
||||||
|
|
||||||
|
fn spawn(self) -> mpsc::Sender<Self::Event> {
|
||||||
|
// the capacity doesn't matter too much here, unless the cpu is totally overwhelmed
|
||||||
|
// the cap will never be reached since we always immediately drain the channel
|
||||||
|
// so it should only be reached in case of total CPU overload.
|
||||||
|
// However, a bounded channel is much more efficient so it's nice to use here
|
||||||
|
let (tx, rx) = mpsc::channel(128);
|
||||||
|
tokio::spawn(run(self, rx));
|
||||||
|
tx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run<Hook: AsyncHook>(mut hook: Hook, mut rx: mpsc::Receiver<Hook::Event>) {
|
||||||
|
let mut deadline = None;
|
||||||
|
loop {
|
||||||
|
let event = match deadline {
|
||||||
|
Some(deadline_) => {
|
||||||
|
let res = tokio::time::timeout_at(deadline_, rx.recv()).await;
|
||||||
|
match res {
|
||||||
|
Ok(event) => event,
|
||||||
|
Err(_) => {
|
||||||
|
hook.finish_debounce();
|
||||||
|
deadline = None;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => rx.recv().await,
|
||||||
|
};
|
||||||
|
let Some(event) = event else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
deadline = hook.handle_event(event, deadline);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_blocking<T>(tx: &Sender<T>, data: T) {
|
||||||
|
// block_on has some overhead and in practice the channel should basically
|
||||||
|
// never be full anyway so first try sending without blocking
|
||||||
|
if let Err(TrySendError::Full(data)) = tx.try_send(data) {
|
||||||
|
// set a timeout so that we just drop a message instead of freezing the editor in the worst case
|
||||||
|
let _ = block_on(tx.send_timeout(data, Duration::from_millis(10)));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,91 @@
|
|||||||
|
//! rust dynamic dispatch is extremely limited so we have to build our
|
||||||
|
//! own vtable implementation. Otherwise implementing the event system would not be possible.
|
||||||
|
//! A nice bonus of this approach is that we can optimize the vtable a bit more. Normally
|
||||||
|
//! a dyn Trait fat pointer contains two pointers: A pointer to the data itself and a
|
||||||
|
//! pointer to a global (static) vtable entry which itself contains multiple other pointers
|
||||||
|
//! (the various functions of the trait, drop, size and align). That makes dynamic
|
||||||
|
//! dispatch pretty slow (double pointer indirections). However, we only have a single function
|
||||||
|
//! in the hook trait and don't need a drop implementation (event system is global anyway
|
||||||
|
//! and never dropped) so we can just store the entire vtable inline.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use std::ptr::{self, NonNull};
|
||||||
|
|
||||||
|
use crate::Event;
|
||||||
|
|
||||||
|
/// Opaque handle type that represents an erased type parameter.
|
||||||
|
///
|
||||||
|
/// If extern types were stable, this could be implemented as `extern { pub type Opaque; }` but
|
||||||
|
/// until then we can use this.
|
||||||
|
///
|
||||||
|
/// Care should be taken that we don't use a concrete instance of this. It should only be used
|
||||||
|
/// through a reference, so we can maintain something else's lifetime.
|
||||||
|
struct Opaque(());
|
||||||
|
|
||||||
|
pub(crate) struct ErasedHook {
|
||||||
|
data: NonNull<Opaque>,
|
||||||
|
call: unsafe fn(NonNull<Opaque>, NonNull<Opaque>, NonNull<Opaque>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErasedHook {
|
||||||
|
pub(crate) fn new_dynamic<H: Fn() -> Result<()> + 'static + Send + Sync>(
|
||||||
|
hook: H,
|
||||||
|
) -> ErasedHook {
|
||||||
|
unsafe fn call<F: Fn() -> Result<()> + 'static + Send + Sync>(
|
||||||
|
hook: NonNull<Opaque>,
|
||||||
|
_event: NonNull<Opaque>,
|
||||||
|
result: NonNull<Opaque>,
|
||||||
|
) {
|
||||||
|
let hook: NonNull<F> = hook.cast();
|
||||||
|
let result: NonNull<Result<()>> = result.cast();
|
||||||
|
let hook: &F = hook.as_ref();
|
||||||
|
let res = hook();
|
||||||
|
ptr::write(result.as_ptr(), res)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
ErasedHook {
|
||||||
|
data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque),
|
||||||
|
call: call::<H>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn new<E: Event, F: Fn(&mut E) -> Result<()>>(hook: F) -> ErasedHook {
|
||||||
|
unsafe fn call<E: Event, F: Fn(&mut E) -> Result<()>>(
|
||||||
|
hook: NonNull<Opaque>,
|
||||||
|
event: NonNull<Opaque>,
|
||||||
|
result: NonNull<Opaque>,
|
||||||
|
) {
|
||||||
|
let hook: NonNull<F> = hook.cast();
|
||||||
|
let mut event: NonNull<E> = event.cast();
|
||||||
|
let result: NonNull<Result<()>> = result.cast();
|
||||||
|
let hook: &F = hook.as_ref();
|
||||||
|
let res = hook(event.as_mut());
|
||||||
|
ptr::write(result.as_ptr(), res)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
ErasedHook {
|
||||||
|
data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque),
|
||||||
|
call: call::<E, F>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn call<E: Event>(&self, event: &mut E) -> Result<()> {
|
||||||
|
let mut res = Ok(());
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
(self.call)(
|
||||||
|
self.data,
|
||||||
|
NonNull::from(event).cast(),
|
||||||
|
NonNull::from(&mut res).cast(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Sync for ErasedHook {}
|
||||||
|
unsafe impl Send for ErasedHook {}
|
@ -1,8 +1,203 @@
|
|||||||
//! `helix-event` contains systems that allow (often async) communication between
|
//! `helix-event` contains systems that allow (often async) communication between
|
||||||
//! different editor components without strongly coupling them. Currently this
|
//! different editor components without strongly coupling them. Specifically
|
||||||
//! crate only contains some smaller facilities but the intend is to add more
|
//! it allows defining synchronous hooks that run when certain editor events
|
||||||
//! functionality in the future ( like a generic hook system)
|
//! occur.
|
||||||
|
//!
|
||||||
|
//! The core of the event system are hook callbacks and the [`Event`] trait. A
|
||||||
|
//! hook is essentially just a closure `Fn(event: &mut impl Event) -> Result<()>`
|
||||||
|
//! that gets called every time an appropriate event is dispatched. The implementation
|
||||||
|
//! details of the [`Event`] trait are considered private. The [`events`] macro is
|
||||||
|
//! provided which automatically declares event types. Similarly the `register_hook`
|
||||||
|
//! macro should be used to (safely) declare event hooks.
|
||||||
|
//!
|
||||||
|
//! Hooks run synchronously which can be advantageous since they can modify the
|
||||||
|
//! current editor state right away (for example to immediately hide the completion
|
||||||
|
//! popup). However, they can not contain their own state without locking since
|
||||||
|
//! they only receive immutable references. For handler that want to track state, do
|
||||||
|
//! expensive background computations or debouncing an [`AsyncHook`] is preferable.
|
||||||
|
//! Async hooks are based around a channels that receive events specific to
|
||||||
|
//! that `AsyncHook` (usually an enum). These events can be sent by synchronous
|
||||||
|
//! hooks. Due to some limitations around tokio channels the [`send_blocking`]
|
||||||
|
//! function exported in this crate should be used instead of the builtin
|
||||||
|
//! `blocking_send`.
|
||||||
|
//!
|
||||||
|
//! In addition to the core event system, this crate contains some message queues
|
||||||
|
//! that allow transfer of data back to the main event loop from async hooks and
|
||||||
|
//! hooks that may not have access to all application data (for example in helix-view).
|
||||||
|
//! This include the ability to control rendering ([`lock_frame`], [`request_redraw`]) and
|
||||||
|
//! display status messages ([`status`]).
|
||||||
|
//!
|
||||||
|
//! Hooks declared in helix-term can furthermore dispatch synchronous jobs to be run on the
|
||||||
|
//! main loop (including access to the compositor). Ideally that queue will be moved
|
||||||
|
//! to helix-view in the future if we manage to detach the compositor from its rendering backend.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
pub use cancel::{cancelable_future, cancelation, CancelRx, CancelTx};
|
||||||
|
pub use debounce::{send_blocking, AsyncHook};
|
||||||
pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
|
pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
|
||||||
|
pub use registry::Event;
|
||||||
|
|
||||||
|
mod cancel;
|
||||||
|
mod debounce;
|
||||||
|
mod hook;
|
||||||
mod redraw;
|
mod redraw;
|
||||||
|
mod registry;
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub mod runtime;
|
||||||
|
pub mod status;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test;
|
||||||
|
|
||||||
|
pub fn register_event<E: Event + 'static>() {
|
||||||
|
registry::with_mut(|registry| registry.register_event::<E>())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Registers a hook that will be called when an event of type `E` is dispatched.
|
||||||
|
/// This function should usually not be used directly, use the [`register_hook`]
|
||||||
|
/// macro instead.
|
||||||
|
///
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// `hook` must be totally generic over all lifetime parameters of `E`. For
|
||||||
|
/// example if `E` was a known type `Foo<'a, 'b>`, then the correct trait bound
|
||||||
|
/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)`, but there is no way to
|
||||||
|
/// express that kind of constraint for a generic type with the Rust type system
|
||||||
|
/// as of this writing.
|
||||||
|
pub unsafe fn register_hook_raw<E: Event>(
|
||||||
|
hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync,
|
||||||
|
) {
|
||||||
|
registry::with_mut(|registry| registry.register_hook(hook))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a hook solely by event name
|
||||||
|
pub fn register_dynamic_hook(
|
||||||
|
hook: impl Fn() -> Result<()> + 'static + Send + Sync,
|
||||||
|
id: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
registry::with_mut(|reg| reg.register_dynamic_hook(hook, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dispatch(e: impl Event) {
|
||||||
|
registry::with(|registry| registry.dispatch(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Macro to declare events
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ``` no-compile
|
||||||
|
/// events! {
|
||||||
|
/// FileWrite(&Path)
|
||||||
|
/// ViewScrolled{ view: View, new_pos: ViewOffset }
|
||||||
|
/// DocumentChanged<'a> { old_doc: &'a Rope, doc: &'a mut Document, changes: &'a ChangeSet }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn init() {
|
||||||
|
/// register_event::<FileWrite>();
|
||||||
|
/// register_event::<ViewScrolled>();
|
||||||
|
/// register_event::<DocumentChanged>();
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn save(path: &Path, content: &str){
|
||||||
|
/// std::fs::write(path, content);
|
||||||
|
/// dispatch(FileWrite(path));
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! events {
|
||||||
|
($name: ident<$($lt: lifetime),*> { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => {
|
||||||
|
pub struct $name<$($lt),*> { $(pub $data: $data_ty),* }
|
||||||
|
unsafe impl<$($lt),*> $crate::Event for $name<$($lt),*> {
|
||||||
|
const ID: &'static str = stringify!($name);
|
||||||
|
const LIFETIMES: usize = $crate::events!(@sum $(1, $lt),*);
|
||||||
|
type Static = $crate::events!(@replace_lt $name, $('static, $lt),*);
|
||||||
|
}
|
||||||
|
$crate::events!{ $($rem)* }
|
||||||
|
};
|
||||||
|
($name: ident { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => {
|
||||||
|
pub struct $name { $(pub $data: $data_ty),* }
|
||||||
|
unsafe impl $crate::Event for $name {
|
||||||
|
const ID: &'static str = stringify!($name);
|
||||||
|
const LIFETIMES: usize = 0;
|
||||||
|
type Static = Self;
|
||||||
|
}
|
||||||
|
$crate::events!{ $($rem)* }
|
||||||
|
};
|
||||||
|
() => {};
|
||||||
|
(@replace_lt $name: ident, $($lt1: lifetime, $lt2: lifetime),* ) => {$name<$($lt1),*>};
|
||||||
|
(@sum $($val: expr, $lt1: lifetime),* ) => {0 $(+ $val)*};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Safely register statically typed event hooks
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! register_hook {
|
||||||
|
// Safety: this is safe because we fully control the type of the event here and
|
||||||
|
// ensure all lifetime arguments are fully generic and the correct number of lifetime arguments
|
||||||
|
// is present
|
||||||
|
(move |$event:ident: &mut $event_ty: ident<$($lt: lifetime),*>| $body: expr) => {
|
||||||
|
let val = move |$event: &mut $event_ty<$($lt),*>| $body;
|
||||||
|
unsafe {
|
||||||
|
// Lifetimes are a bit of a pain. We want to allow events being
|
||||||
|
// non-static. Lifetimes don't actually exist at runtime so its
|
||||||
|
// fine to essentially transmute the lifetimes as long as we can
|
||||||
|
// prove soundness. The hook must therefore accept any combination
|
||||||
|
// of lifetimes. In other words fn(&'_ mut Event<'_, '_>) is ok
|
||||||
|
// but examples like fn(&'_ mut Event<'_, 'static>) or fn<'a>(&'a
|
||||||
|
// mut Event<'a, 'a>) are not. To make this safe we use a macro to
|
||||||
|
// forbid the user from specifying lifetimes manually (all lifetimes
|
||||||
|
// specified are always function generics and passed to the event so
|
||||||
|
// lifetimes can't be used multiple times and using 'static causes a
|
||||||
|
// syntax error).
|
||||||
|
//
|
||||||
|
// There is one soundness hole tough: Type Aliases allow
|
||||||
|
// "accidentally" creating these problems. For example:
|
||||||
|
//
|
||||||
|
// type Event2 = Event<'static>.
|
||||||
|
// type Event2<'a> = Event<'a, a>.
|
||||||
|
//
|
||||||
|
// These cases can be caught by counting the number of lifetimes
|
||||||
|
// parameters at the parameter declaration site and then at the hook
|
||||||
|
// declaration site. By asserting the number of lifetime parameters
|
||||||
|
// are equal we can catch all bad type aliases under one assumption:
|
||||||
|
// There are no unused lifetime parameters. Introducing a static
|
||||||
|
// would reduce the number of arguments of the alias by one in the
|
||||||
|
// above example Event2 has zero lifetime arguments while the original
|
||||||
|
// event has one lifetime argument. Similar logic applies to using
|
||||||
|
// a lifetime argument multiple times. The ASSERT below performs a
|
||||||
|
// a compile time assertion to ensure exactly this property.
|
||||||
|
//
|
||||||
|
// With unused lifetime arguments it is still one way to cause unsound code:
|
||||||
|
//
|
||||||
|
// type Event2<'a, 'b> = Event<'a, 'a>;
|
||||||
|
//
|
||||||
|
// However, this case will always emit a compiler warning/cause CI
|
||||||
|
// failures so a user would have to introduce #[allow(unused)] which
|
||||||
|
// is easily caught in review (and a very theoretical case anyway).
|
||||||
|
// If we want to be pedantic we can simply compile helix with
|
||||||
|
// forbid(unused). All of this is just a safety net to prevent
|
||||||
|
// very theoretical misuse. This won't come up in real code (and is
|
||||||
|
// easily caught in review).
|
||||||
|
#[allow(unused)]
|
||||||
|
const ASSERT: () = {
|
||||||
|
if <$event_ty as $crate::Event>::LIFETIMES != 0 + $crate::events!(@sum $(1, $lt),*){
|
||||||
|
panic!("invalid type alias");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
$crate::register_hook_raw::<$crate::events!(@replace_lt $event_ty, $('static, $lt),*)>(val);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
(move |$event:ident: &mut $event_ty: ident| $body: expr) => {
|
||||||
|
let val = move |$event: &mut $event_ty| $body;
|
||||||
|
unsafe {
|
||||||
|
#[allow(unused)]
|
||||||
|
const ASSERT: () = {
|
||||||
|
if <$event_ty as $crate::Event>::LIFETIMES != 0{
|
||||||
|
panic!("invalid type alias");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
$crate::register_hook_raw::<$event_ty>(val);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -0,0 +1,131 @@
|
|||||||
|
//! A global registry where events are registered and can be
|
||||||
|
//! subscribed to by registering hooks. The registry identifies event
|
||||||
|
//! types using their type name so multiple event with the same type name
|
||||||
|
//! may not be registered (will cause a panic to ensure soundness)
|
||||||
|
|
||||||
|
use std::any::TypeId;
|
||||||
|
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use hashbrown::hash_map::Entry;
|
||||||
|
use hashbrown::HashMap;
|
||||||
|
use parking_lot::RwLock;
|
||||||
|
|
||||||
|
use crate::hook::ErasedHook;
|
||||||
|
use crate::runtime_local;
|
||||||
|
|
||||||
|
pub struct Registry {
|
||||||
|
events: HashMap<&'static str, TypeId, ahash::RandomState>,
|
||||||
|
handlers: HashMap<&'static str, Vec<ErasedHook>, ahash::RandomState>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Registry {
|
||||||
|
pub fn register_event<E: Event + 'static>(&mut self) {
|
||||||
|
let ty = TypeId::of::<E>();
|
||||||
|
assert_eq!(ty, TypeId::of::<E::Static>());
|
||||||
|
match self.events.entry(E::ID) {
|
||||||
|
Entry::Occupied(entry) => {
|
||||||
|
if entry.get() == &ty {
|
||||||
|
// don't warn during tests to avoid log spam
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
panic!("Event {} was registered multiple times", E::ID);
|
||||||
|
} else {
|
||||||
|
panic!("Multiple events with ID {} were registered", E::ID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Entry::Vacant(ent) => {
|
||||||
|
ent.insert(ty);
|
||||||
|
self.handlers.insert(E::ID, Vec::new());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// `hook` must be totally generic over all lifetime parameters of `E`. For
|
||||||
|
/// example if `E` was a known type `Foo<'a, 'b> then the correct trait bound
|
||||||
|
/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)` but there is no way to
|
||||||
|
/// express that kind of constraint for a generic type with the rust type system
|
||||||
|
/// right now.
|
||||||
|
pub unsafe fn register_hook<E: Event>(
|
||||||
|
&mut self,
|
||||||
|
hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync,
|
||||||
|
) {
|
||||||
|
// ensure event type ids match so we can rely on them always matching
|
||||||
|
let id = E::ID;
|
||||||
|
let Some(&event_id) = self.events.get(id) else {
|
||||||
|
panic!("Tried to register handler for unknown event {id}");
|
||||||
|
};
|
||||||
|
assert!(
|
||||||
|
TypeId::of::<E::Static>() == event_id,
|
||||||
|
"Tried to register invalid hook for event {id}"
|
||||||
|
);
|
||||||
|
let hook = ErasedHook::new(hook);
|
||||||
|
self.handlers.get_mut(id).unwrap().push(hook);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_dynamic_hook(
|
||||||
|
&mut self,
|
||||||
|
hook: impl Fn() -> Result<()> + 'static + Send + Sync,
|
||||||
|
id: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
// ensure event type ids match so we can rely on them always matching
|
||||||
|
if self.events.get(id).is_none() {
|
||||||
|
bail!("Tried to register handler for unknown event {id}");
|
||||||
|
};
|
||||||
|
let hook = ErasedHook::new_dynamic(hook);
|
||||||
|
self.handlers.get_mut(id).unwrap().push(hook);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dispatch<E: Event>(&self, mut event: E) {
|
||||||
|
let Some(hooks) = self.handlers.get(E::ID) else {
|
||||||
|
log::error!("Dispatched unknown event {}", E::ID);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let event_id = self.events[E::ID];
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
TypeId::of::<E::Static>(),
|
||||||
|
event_id,
|
||||||
|
"Tried to dispatch invalid event {}",
|
||||||
|
E::ID
|
||||||
|
);
|
||||||
|
|
||||||
|
for hook in hooks {
|
||||||
|
// safety: event type is the same
|
||||||
|
if let Err(err) = unsafe { hook.call(&mut event) } {
|
||||||
|
log::error!("{} hook failed: {err:#?}", E::ID);
|
||||||
|
crate::status::report_blocking(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runtime_local! {
|
||||||
|
static REGISTRY: RwLock<Registry> = RwLock::new(Registry {
|
||||||
|
// hardcoded random number is good enough here we don't care about DOS resistance
|
||||||
|
// and avoids the additional complexity of `Option<Registry>`
|
||||||
|
events: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 9978, 38322, 3280080)),
|
||||||
|
handlers: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 99078, 382322, 3282938)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with<T>(f: impl FnOnce(&Registry) -> T) -> T {
|
||||||
|
f(®ISTRY.read())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_mut<T>(f: impl FnOnce(&mut Registry) -> T) -> T {
|
||||||
|
f(&mut REGISTRY.write())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Safety
|
||||||
|
/// The number of specified lifetimes and the static type *must* be correct.
|
||||||
|
/// This is ensured automatically by the [`events`](crate::events)
|
||||||
|
/// macro.
|
||||||
|
pub unsafe trait Event: Sized {
|
||||||
|
/// Globally unique (case sensitive) string that identifies this type.
|
||||||
|
/// A good candidate is the events type name
|
||||||
|
const ID: &'static str;
|
||||||
|
const LIFETIMES: usize;
|
||||||
|
type Static: Event + 'static;
|
||||||
|
}
|
@ -0,0 +1,88 @@
|
|||||||
|
//! The event system makes use of global to decouple different systems.
|
||||||
|
//! However, this can cause problems for the integration test system because
|
||||||
|
//! it runs multiple helix applications in parallel. Making the globals
|
||||||
|
//! thread-local does not work because a applications can/does have multiple
|
||||||
|
//! runtime threads. Instead this crate implements a similar notion to a thread
|
||||||
|
//! local but instead of being local to a single thread, the statics are local to
|
||||||
|
//! a single tokio-runtime. The implementation requires locking so it's not exactly efficient.
|
||||||
|
//!
|
||||||
|
//! Therefore this function is only enabled during integration tests and behaves like
|
||||||
|
//! a normal static otherwise. I would prefer this module to be fully private and to only
|
||||||
|
//! export the macro but the macro still need to construct these internals so it's marked
|
||||||
|
//! `doc(hidden)` instead
|
||||||
|
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
pub struct RuntimeLocal<T: 'static> {
|
||||||
|
/// inner API used in the macro, not part of public API
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub __data: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
impl<T> Deref for RuntimeLocal<T> {
|
||||||
|
type Target = T;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.__data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! runtime_local {
|
||||||
|
($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => {
|
||||||
|
$($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal {
|
||||||
|
__data: $init
|
||||||
|
};)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
pub struct RuntimeLocal<T: 'static> {
|
||||||
|
data:
|
||||||
|
parking_lot::RwLock<hashbrown::HashMap<tokio::runtime::Id, &'static T, ahash::RandomState>>,
|
||||||
|
init: fn() -> T,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
impl<T> RuntimeLocal<T> {
|
||||||
|
/// inner API used in the macro, not part of public API
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub const fn __new(init: fn() -> T) -> Self {
|
||||||
|
Self {
|
||||||
|
data: parking_lot::RwLock::new(hashbrown::HashMap::with_hasher(
|
||||||
|
ahash::RandomState::with_seeds(423, 9978, 38322, 3280080),
|
||||||
|
)),
|
||||||
|
init,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
impl<T> Deref for RuntimeLocal<T> {
|
||||||
|
type Target = T;
|
||||||
|
fn deref(&self) -> &T {
|
||||||
|
let id = tokio::runtime::Handle::current().id();
|
||||||
|
let guard = self.data.read();
|
||||||
|
match guard.get(&id) {
|
||||||
|
Some(res) => res,
|
||||||
|
None => {
|
||||||
|
drop(guard);
|
||||||
|
let data = Box::leak(Box::new((self.init)()));
|
||||||
|
let mut guard = self.data.write();
|
||||||
|
guard.insert(id, data);
|
||||||
|
data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! runtime_local {
|
||||||
|
($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => {
|
||||||
|
$($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal::__new(|| $init);)*
|
||||||
|
};
|
||||||
|
}
|
@ -0,0 +1,68 @@
|
|||||||
|
//! A queue of async messages/errors that will be shown in the editor
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use crate::{runtime_local, send_blocking};
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use tokio::sync::mpsc::{Receiver, Sender};
|
||||||
|
|
||||||
|
/// Describes the severity level of a [`StatusMessage`].
|
||||||
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord)]
|
||||||
|
pub enum Severity {
|
||||||
|
Hint,
|
||||||
|
Info,
|
||||||
|
Warning,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct StatusMessage {
|
||||||
|
pub severity: Severity,
|
||||||
|
pub message: Cow<'static, str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<anyhow::Error> for StatusMessage {
|
||||||
|
fn from(err: anyhow::Error) -> Self {
|
||||||
|
StatusMessage {
|
||||||
|
severity: Severity::Error,
|
||||||
|
message: err.to_string().into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&'static str> for StatusMessage {
|
||||||
|
fn from(msg: &'static str) -> Self {
|
||||||
|
StatusMessage {
|
||||||
|
severity: Severity::Info,
|
||||||
|
message: msg.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runtime_local! {
|
||||||
|
static MESSAGES: OnceCell<Sender<StatusMessage>> = OnceCell::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report(msg: impl Into<StatusMessage>) {
|
||||||
|
// if the error channel overflows just ignore it
|
||||||
|
let _ = MESSAGES
|
||||||
|
.wait()
|
||||||
|
.send_timeout(msg.into(), Duration::from_millis(10))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn report_blocking(msg: impl Into<StatusMessage>) {
|
||||||
|
let messages = MESSAGES.wait();
|
||||||
|
send_blocking(messages, msg.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Must be called once during editor startup exactly once
|
||||||
|
/// before any of the messages in this module can be used
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
/// If called multiple times
|
||||||
|
pub fn setup() -> Receiver<StatusMessage> {
|
||||||
|
let (tx, rx) = tokio::sync::mpsc::channel(128);
|
||||||
|
let _ = MESSAGES.set(tx);
|
||||||
|
rx
|
||||||
|
}
|
@ -0,0 +1,90 @@
|
|||||||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use parking_lot::Mutex;
|
||||||
|
|
||||||
|
use crate::{dispatch, events, register_dynamic_hook, register_event, register_hook};
|
||||||
|
#[test]
|
||||||
|
fn smoke_test() {
|
||||||
|
events! {
|
||||||
|
Event1 { content: String }
|
||||||
|
Event2 { content: usize }
|
||||||
|
}
|
||||||
|
register_event::<Event1>();
|
||||||
|
register_event::<Event2>();
|
||||||
|
|
||||||
|
// setup hooks
|
||||||
|
let res1: Arc<Mutex<String>> = Arc::default();
|
||||||
|
let acc = Arc::clone(&res1);
|
||||||
|
register_hook!(move |event: &mut Event1| {
|
||||||
|
acc.lock().push_str(&event.content);
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
let res2: Arc<AtomicUsize> = Arc::default();
|
||||||
|
let acc = Arc::clone(&res2);
|
||||||
|
register_hook!(move |event: &mut Event2| {
|
||||||
|
acc.fetch_add(event.content, Ordering::Relaxed);
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
|
||||||
|
// triggers events
|
||||||
|
let thread = std::thread::spawn(|| {
|
||||||
|
for i in 0..1000 {
|
||||||
|
dispatch(Event2 { content: i });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
std::thread::sleep(Duration::from_millis(1));
|
||||||
|
dispatch(Event1 {
|
||||||
|
content: "foo".to_owned(),
|
||||||
|
});
|
||||||
|
dispatch(Event2 { content: 42 });
|
||||||
|
dispatch(Event1 {
|
||||||
|
content: "bar".to_owned(),
|
||||||
|
});
|
||||||
|
dispatch(Event1 {
|
||||||
|
content: "hello world".to_owned(),
|
||||||
|
});
|
||||||
|
thread.join().unwrap();
|
||||||
|
|
||||||
|
// check output
|
||||||
|
assert_eq!(&**res1.lock(), "foobarhello world");
|
||||||
|
assert_eq!(
|
||||||
|
res2.load(Ordering::Relaxed),
|
||||||
|
42 + (0..1000usize).sum::<usize>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dynamic() {
|
||||||
|
events! {
|
||||||
|
Event3 {}
|
||||||
|
Event4 { count: usize }
|
||||||
|
};
|
||||||
|
register_event::<Event3>();
|
||||||
|
register_event::<Event4>();
|
||||||
|
|
||||||
|
let count = Arc::new(AtomicUsize::new(0));
|
||||||
|
let count1 = count.clone();
|
||||||
|
let count2 = count.clone();
|
||||||
|
register_dynamic_hook(
|
||||||
|
move || {
|
||||||
|
count1.fetch_add(2, Ordering::Relaxed);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
"Event3",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
register_dynamic_hook(
|
||||||
|
move || {
|
||||||
|
count2.fetch_add(3, Ordering::Relaxed);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
"Event4",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
dispatch(Event3 {});
|
||||||
|
dispatch(Event4 { count: 0 });
|
||||||
|
dispatch(Event3 {});
|
||||||
|
assert_eq!(count.load(Ordering::Relaxed), 7)
|
||||||
|
}
|
@ -1,31 +1,32 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-lsp"
|
name = "helix-lsp"
|
||||||
version = "0.6.0"
|
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
|
||||||
edition = "2021"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
description = "LSP client implementation for Helix project"
|
description = "LSP client implementation for Helix project"
|
||||||
categories = ["editor"]
|
version.workspace = true
|
||||||
repository = "https://github.com/helix-editor/helix"
|
authors.workspace = true
|
||||||
homepage = "https://helix-editor.com"
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
helix-core = { version = "0.6", path = "../helix-core" }
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
helix-core = { path = "../helix-core" }
|
||||||
helix-parsec = { version = "0.6", path = "../helix-parsec" }
|
helix-loader = { path = "../helix-loader" }
|
||||||
|
helix-parsec = { path = "../helix-parsec" }
|
||||||
|
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
futures-executor = "0.3"
|
futures-executor = "0.3"
|
||||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||||
globset = "0.4.13"
|
globset = "0.4.14"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
lsp-types = { version = "0.94" }
|
lsp-types = { version = "0.95" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
tokio = { version = "1.33", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
tokio = { version = "1.35", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||||
tokio-stream = "0.1.14"
|
tokio-stream = "0.1.14"
|
||||||
which = "4.4"
|
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
|
@ -0,0 +1,105 @@
|
|||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use globset::{GlobBuilder, GlobSet};
|
||||||
|
|
||||||
|
use crate::lsp;
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub(crate) struct FileOperationFilter {
|
||||||
|
dir_globs: GlobSet,
|
||||||
|
file_globs: GlobSet,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileOperationFilter {
|
||||||
|
fn new(capability: Option<&lsp::FileOperationRegistrationOptions>) -> FileOperationFilter {
|
||||||
|
let Some(cap) = capability else {
|
||||||
|
return FileOperationFilter::default();
|
||||||
|
};
|
||||||
|
let mut dir_globs = GlobSet::builder();
|
||||||
|
let mut file_globs = GlobSet::builder();
|
||||||
|
for filter in &cap.filters {
|
||||||
|
// TODO: support other url schemes
|
||||||
|
let is_non_file_schema = filter
|
||||||
|
.scheme
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|schema| schema != "file");
|
||||||
|
if is_non_file_schema {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let ignore_case = filter
|
||||||
|
.pattern
|
||||||
|
.options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|opts| opts.ignore_case)
|
||||||
|
.unwrap_or(false);
|
||||||
|
let mut glob_builder = GlobBuilder::new(&filter.pattern.glob);
|
||||||
|
glob_builder.case_insensitive(!ignore_case);
|
||||||
|
let glob = match glob_builder.build() {
|
||||||
|
Ok(glob) => glob,
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("invalid glob send by LS: {err}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match filter.pattern.matches {
|
||||||
|
Some(lsp::FileOperationPatternKind::File) => {
|
||||||
|
file_globs.add(glob);
|
||||||
|
}
|
||||||
|
Some(lsp::FileOperationPatternKind::Folder) => {
|
||||||
|
dir_globs.add(glob);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
file_globs.add(glob.clone());
|
||||||
|
dir_globs.add(glob);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let file_globs = file_globs.build().unwrap_or_else(|err| {
|
||||||
|
log::error!("invalid globs send by LS: {err}");
|
||||||
|
GlobSet::empty()
|
||||||
|
});
|
||||||
|
let dir_globs = dir_globs.build().unwrap_or_else(|err| {
|
||||||
|
log::error!("invalid globs send by LS: {err}");
|
||||||
|
GlobSet::empty()
|
||||||
|
});
|
||||||
|
FileOperationFilter {
|
||||||
|
dir_globs,
|
||||||
|
file_globs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn has_interest(&self, path: &Path, is_dir: bool) -> bool {
|
||||||
|
if is_dir {
|
||||||
|
self.dir_globs.is_match(path)
|
||||||
|
} else {
|
||||||
|
self.file_globs.is_match(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub(crate) struct FileOperationsInterest {
|
||||||
|
// TODO: support other notifications
|
||||||
|
// did_create: FileOperationFilter,
|
||||||
|
// will_create: FileOperationFilter,
|
||||||
|
pub did_rename: FileOperationFilter,
|
||||||
|
pub will_rename: FileOperationFilter,
|
||||||
|
// did_delete: FileOperationFilter,
|
||||||
|
// will_delete: FileOperationFilter,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileOperationsInterest {
|
||||||
|
pub fn new(capabilities: &lsp::ServerCapabilities) -> FileOperationsInterest {
|
||||||
|
let capabilities = capabilities
|
||||||
|
.workspace
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|capabilities| capabilities.file_operations.as_ref());
|
||||||
|
let Some(capabilities) = capabilities else {
|
||||||
|
return FileOperationsInterest::default();
|
||||||
|
};
|
||||||
|
FileOperationsInterest {
|
||||||
|
did_rename: FileOperationFilter::new(capabilities.did_rename.as_ref()),
|
||||||
|
will_rename: FileOperationFilter::new(capabilities.will_rename.as_ref()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,13 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-parsec"
|
name = "helix-parsec"
|
||||||
version = "0.6.0"
|
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
|
||||||
edition = "2021"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
description = "Parser combinators for Helix"
|
description = "Parser combinators for Helix"
|
||||||
categories = ["editor"]
|
|
||||||
repository = "https://github.com/helix-editor/helix"
|
|
||||||
homepage = "https://helix-editor.com"
|
|
||||||
include = ["src/**/*", "README.md"]
|
include = ["src/**/*", "README.md"]
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "helix-stdx"
|
||||||
|
description = "Standard library extensions"
|
||||||
|
include = ["src/**/*", "README.md"]
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
dunce = "1.0"
|
||||||
|
etcetera = "0.8"
|
||||||
|
ropey = { version = "1.6.1", default-features = false }
|
||||||
|
which = "6.0"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.9"
|
@ -0,0 +1,80 @@
|
|||||||
|
use std::{
|
||||||
|
ffi::OsStr,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::RwLock,
|
||||||
|
};
|
||||||
|
|
||||||
|
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||||
|
|
||||||
|
// Get the current working directory.
|
||||||
|
// This information is managed internally as the call to std::env::current_dir
|
||||||
|
// might fail if the cwd has been deleted.
|
||||||
|
pub fn current_working_dir() -> PathBuf {
|
||||||
|
if let Some(path) = &*CWD.read().unwrap() {
|
||||||
|
return path.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = std::env::current_dir()
|
||||||
|
.map(crate::path::normalize)
|
||||||
|
.expect("Couldn't determine current working directory");
|
||||||
|
let mut cwd = CWD.write().unwrap();
|
||||||
|
*cwd = Some(path.clone());
|
||||||
|
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||||
|
let path = crate::path::canonicalize(path);
|
||||||
|
std::env::set_current_dir(&path)?;
|
||||||
|
let mut cwd = CWD.write().unwrap();
|
||||||
|
*cwd = Some(path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn env_var_is_set(env_var_name: &str) -> bool {
|
||||||
|
std::env::var_os(env_var_name).is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn binary_exists<T: AsRef<OsStr>>(binary_name: T) -> bool {
|
||||||
|
which::which(binary_name).is_ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn which<T: AsRef<OsStr>>(
|
||||||
|
binary_name: T,
|
||||||
|
) -> Result<std::path::PathBuf, ExecutableNotFoundError> {
|
||||||
|
which::which(binary_name.as_ref()).map_err(|err| ExecutableNotFoundError {
|
||||||
|
command: binary_name.as_ref().to_string_lossy().into_owned(),
|
||||||
|
inner: err,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ExecutableNotFoundError {
|
||||||
|
command: String,
|
||||||
|
inner: which::Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for ExecutableNotFoundError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "command '{}' not found: {}", self.command, self.inner)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ExecutableNotFoundError {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{current_working_dir, set_current_working_dir};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn current_dir_is_set() {
|
||||||
|
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
assert_ne!(cwd, new_path);
|
||||||
|
|
||||||
|
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||||
|
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
assert_eq!(cwd, new_path);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,3 @@
|
|||||||
|
pub mod env;
|
||||||
|
pub mod path;
|
||||||
|
pub mod rope;
|
@ -0,0 +1,26 @@
|
|||||||
|
use ropey::RopeSlice;
|
||||||
|
|
||||||
|
pub trait RopeSliceExt: Sized {
|
||||||
|
fn ends_with(self, text: &str) -> bool;
|
||||||
|
fn starts_with(self, text: &str) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RopeSliceExt for RopeSlice<'_> {
|
||||||
|
fn ends_with(self, text: &str) -> bool {
|
||||||
|
let len = self.len_bytes();
|
||||||
|
if len < text.len() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
self.get_byte_slice(len - text.len()..)
|
||||||
|
.map_or(false, |end| end == text)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn starts_with(self, text: &str) -> bool {
|
||||||
|
let len = self.len_bytes();
|
||||||
|
if len < text.len() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
self.get_byte_slice(..len - text.len())
|
||||||
|
.map_or(false, |start| start == text)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,124 @@
|
|||||||
|
#![cfg(windows)]
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
env::set_current_dir,
|
||||||
|
error::Error,
|
||||||
|
path::{Component, Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
use helix_stdx::path;
|
||||||
|
use tempfile::Builder;
|
||||||
|
|
||||||
|
// Paths on Windows are almost always case-insensitive.
|
||||||
|
// Normalization should return the original path.
|
||||||
|
// E.g. mkdir `CaSe`, normalize(`case`) = `CaSe`.
|
||||||
|
#[test]
|
||||||
|
fn test_case_folding_windows() -> Result<(), Box<dyn Error>> {
|
||||||
|
// tmp/root/case
|
||||||
|
let tmp_prefix = std::env::temp_dir();
|
||||||
|
set_current_dir(&tmp_prefix)?;
|
||||||
|
|
||||||
|
let root = Builder::new().prefix("root-").tempdir()?;
|
||||||
|
let case = Builder::new().prefix("CaSe-").tempdir_in(&root)?;
|
||||||
|
|
||||||
|
let root_without_prefix = root.path().strip_prefix(&tmp_prefix)?;
|
||||||
|
|
||||||
|
let lowercase_case = format!(
|
||||||
|
"case-{}",
|
||||||
|
case.path()
|
||||||
|
.file_name()
|
||||||
|
.unwrap()
|
||||||
|
.to_string_lossy()
|
||||||
|
.split_at(5)
|
||||||
|
.1
|
||||||
|
);
|
||||||
|
let test_path = root_without_prefix.join(lowercase_case);
|
||||||
|
assert_eq!(
|
||||||
|
path::normalize(&test_path),
|
||||||
|
case.path().strip_prefix(&tmp_prefix)?
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
||||||
|
/*
|
||||||
|
tmp/root/
|
||||||
|
├── link -> dir1/orig_file
|
||||||
|
├── dir1/
|
||||||
|
│ └── orig_file
|
||||||
|
└── dir2/
|
||||||
|
└── dir_link -> ../dir1/
|
||||||
|
*/
|
||||||
|
|
||||||
|
let tmp_prefix = std::env::temp_dir();
|
||||||
|
set_current_dir(&tmp_prefix)?;
|
||||||
|
|
||||||
|
// Create a tree structure as shown above
|
||||||
|
let root = Builder::new().prefix("root-").tempdir()?;
|
||||||
|
let dir1 = Builder::new().prefix("dir1-").tempdir_in(&root)?;
|
||||||
|
let orig_file = Builder::new().prefix("orig_file-").tempfile_in(&dir1)?;
|
||||||
|
let dir2 = Builder::new().prefix("dir2-").tempdir_in(&root)?;
|
||||||
|
|
||||||
|
// Create path and delete existing file
|
||||||
|
let dir_link = Builder::new()
|
||||||
|
.prefix("dir_link-")
|
||||||
|
.tempfile_in(&dir2)?
|
||||||
|
.path()
|
||||||
|
.to_owned();
|
||||||
|
let link = Builder::new()
|
||||||
|
.prefix("link-")
|
||||||
|
.tempfile_in(&root)?
|
||||||
|
.path()
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
use std::os::windows;
|
||||||
|
windows::fs::symlink_dir(&dir1, &dir_link)?;
|
||||||
|
windows::fs::symlink_file(&orig_file, &link)?;
|
||||||
|
|
||||||
|
// root/link
|
||||||
|
let path = link.strip_prefix(&tmp_prefix)?;
|
||||||
|
assert_eq!(
|
||||||
|
path::normalize(path),
|
||||||
|
path,
|
||||||
|
"input {:?} and symlink last component shouldn't be resolved",
|
||||||
|
path
|
||||||
|
);
|
||||||
|
|
||||||
|
// root/dir2/dir_link/orig_file/../..
|
||||||
|
let path = dir_link
|
||||||
|
.strip_prefix(&tmp_prefix)
|
||||||
|
.unwrap()
|
||||||
|
.join(orig_file.path().file_name().unwrap())
|
||||||
|
.join(Component::ParentDir)
|
||||||
|
.join(Component::ParentDir);
|
||||||
|
let expected = dir_link
|
||||||
|
.strip_prefix(&tmp_prefix)
|
||||||
|
.unwrap()
|
||||||
|
.join(Component::ParentDir);
|
||||||
|
assert_eq!(
|
||||||
|
path::normalize(&path),
|
||||||
|
expected,
|
||||||
|
"input {:?} and \"..\" should not erase the simlink that goes ahead",
|
||||||
|
&path
|
||||||
|
);
|
||||||
|
|
||||||
|
// root/link/.././../dir2/../
|
||||||
|
let path = link
|
||||||
|
.strip_prefix(&tmp_prefix)
|
||||||
|
.unwrap()
|
||||||
|
.join(Component::ParentDir)
|
||||||
|
.join(Component::CurDir)
|
||||||
|
.join(Component::ParentDir)
|
||||||
|
.join(dir2.path().file_name().unwrap())
|
||||||
|
.join(Component::ParentDir);
|
||||||
|
let expected = link
|
||||||
|
.strip_prefix(&tmp_prefix)
|
||||||
|
.unwrap()
|
||||||
|
.join(Component::ParentDir)
|
||||||
|
.join(Component::ParentDir);
|
||||||
|
assert_eq!(path::normalize(&path), expected, "input {:?}", &path);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
use helix_event::{events, register_event};
|
||||||
|
use helix_view::document::Mode;
|
||||||
|
use helix_view::events::{DocumentDidChange, SelectionDidChange};
|
||||||
|
|
||||||
|
use crate::commands;
|
||||||
|
use crate::keymap::MappableCommand;
|
||||||
|
|
||||||
|
events! {
|
||||||
|
OnModeSwitch<'a, 'cx> { old_mode: Mode, new_mode: Mode, cx: &'a mut commands::Context<'cx> }
|
||||||
|
PostInsertChar<'a, 'cx> { c: char, cx: &'a mut commands::Context<'cx> }
|
||||||
|
PostCommand<'a, 'cx> { command: & 'a MappableCommand, cx: &'a mut commands::Context<'cx> }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register() {
|
||||||
|
register_event::<OnModeSwitch>();
|
||||||
|
register_event::<PostInsertChar>();
|
||||||
|
register_event::<PostCommand>();
|
||||||
|
register_event::<DocumentDidChange>();
|
||||||
|
register_event::<SelectionDidChange>();
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use arc_swap::ArcSwap;
|
||||||
|
use helix_event::AsyncHook;
|
||||||
|
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::events;
|
||||||
|
use crate::handlers::completion::CompletionHandler;
|
||||||
|
use crate::handlers::signature_help::SignatureHelpHandler;
|
||||||
|
|
||||||
|
pub use completion::trigger_auto_completion;
|
||||||
|
pub use helix_view::handlers::lsp::SignatureHelpInvoked;
|
||||||
|
pub use helix_view::handlers::Handlers;
|
||||||
|
|
||||||
|
mod completion;
|
||||||
|
mod signature_help;
|
||||||
|
|
||||||
|
pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
|
||||||
|
events::register();
|
||||||
|
|
||||||
|
let completions = CompletionHandler::new(config).spawn();
|
||||||
|
let signature_hints = SignatureHelpHandler::new().spawn();
|
||||||
|
let handlers = Handlers {
|
||||||
|
completions,
|
||||||
|
signature_hints,
|
||||||
|
};
|
||||||
|
completion::register_hooks(&handlers);
|
||||||
|
signature_help::register_hooks(&handlers);
|
||||||
|
handlers
|
||||||
|
}
|
@ -0,0 +1,465 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use arc_swap::ArcSwap;
|
||||||
|
use futures_util::stream::FuturesUnordered;
|
||||||
|
use helix_core::chars::char_is_word;
|
||||||
|
use helix_core::syntax::LanguageServerFeature;
|
||||||
|
use helix_event::{
|
||||||
|
cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx,
|
||||||
|
};
|
||||||
|
use helix_lsp::lsp;
|
||||||
|
use helix_lsp::util::pos_to_lsp_pos;
|
||||||
|
use helix_stdx::rope::RopeSliceExt;
|
||||||
|
use helix_view::document::{Mode, SavePoint};
|
||||||
|
use helix_view::handlers::lsp::CompletionEvent;
|
||||||
|
use helix_view::{DocumentId, Editor, ViewId};
|
||||||
|
use tokio::sync::mpsc::Sender;
|
||||||
|
use tokio::time::Instant;
|
||||||
|
use tokio_stream::StreamExt;
|
||||||
|
|
||||||
|
use crate::commands;
|
||||||
|
use crate::compositor::Compositor;
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::events::{OnModeSwitch, PostCommand, PostInsertChar};
|
||||||
|
use crate::job::{dispatch, dispatch_blocking};
|
||||||
|
use crate::keymap::MappableCommand;
|
||||||
|
use crate::ui::editor::InsertEvent;
|
||||||
|
use crate::ui::lsp::SignatureHelp;
|
||||||
|
use crate::ui::{self, CompletionItem, Popup};
|
||||||
|
|
||||||
|
use super::Handlers;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
enum TriggerKind {
|
||||||
|
Auto,
|
||||||
|
TriggerChar,
|
||||||
|
Manual,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
struct Trigger {
|
||||||
|
pos: usize,
|
||||||
|
view: ViewId,
|
||||||
|
doc: DocumentId,
|
||||||
|
kind: TriggerKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(super) struct CompletionHandler {
|
||||||
|
/// currently active trigger which will cause a
|
||||||
|
/// completion request after the timeout
|
||||||
|
trigger: Option<Trigger>,
|
||||||
|
/// A handle for currently active completion request.
|
||||||
|
/// This can be used to determine whether the current
|
||||||
|
/// request is still active (and new triggers should be
|
||||||
|
/// ignored) and can also be used to abort the current
|
||||||
|
/// request (by dropping the handle)
|
||||||
|
request: Option<CancelTx>,
|
||||||
|
config: Arc<ArcSwap<Config>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompletionHandler {
|
||||||
|
pub fn new(config: Arc<ArcSwap<Config>>) -> CompletionHandler {
|
||||||
|
Self {
|
||||||
|
config,
|
||||||
|
request: None,
|
||||||
|
trigger: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl helix_event::AsyncHook for CompletionHandler {
|
||||||
|
type Event = CompletionEvent;
|
||||||
|
|
||||||
|
fn handle_event(
|
||||||
|
&mut self,
|
||||||
|
event: Self::Event,
|
||||||
|
_old_timeout: Option<Instant>,
|
||||||
|
) -> Option<Instant> {
|
||||||
|
match event {
|
||||||
|
CompletionEvent::AutoTrigger {
|
||||||
|
cursor: trigger_pos,
|
||||||
|
doc,
|
||||||
|
view,
|
||||||
|
} => {
|
||||||
|
// techically it shouldn't be possible to switch views/documents in insert mode
|
||||||
|
// but people may create weird keymaps/use the mouse so lets be extra careful
|
||||||
|
if self
|
||||||
|
.trigger
|
||||||
|
.as_ref()
|
||||||
|
.map_or(true, |trigger| trigger.doc != doc || trigger.view != view)
|
||||||
|
{
|
||||||
|
self.trigger = Some(Trigger {
|
||||||
|
pos: trigger_pos,
|
||||||
|
view,
|
||||||
|
doc,
|
||||||
|
kind: TriggerKind::Auto,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CompletionEvent::TriggerChar { cursor, doc, view } => {
|
||||||
|
// immediately request completions and drop all auto completion requests
|
||||||
|
self.request = None;
|
||||||
|
self.trigger = Some(Trigger {
|
||||||
|
pos: cursor,
|
||||||
|
view,
|
||||||
|
doc,
|
||||||
|
kind: TriggerKind::TriggerChar,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
CompletionEvent::ManualTrigger { cursor, doc, view } => {
|
||||||
|
// immediately request completions and drop all auto completion requests
|
||||||
|
self.request = None;
|
||||||
|
self.trigger = Some(Trigger {
|
||||||
|
pos: cursor,
|
||||||
|
view,
|
||||||
|
doc,
|
||||||
|
kind: TriggerKind::Manual,
|
||||||
|
});
|
||||||
|
// stop debouncing immediately and request the completion
|
||||||
|
self.finish_debounce();
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
CompletionEvent::Cancel => {
|
||||||
|
self.trigger = None;
|
||||||
|
self.request = None;
|
||||||
|
}
|
||||||
|
CompletionEvent::DeleteText { cursor } => {
|
||||||
|
// if we deleted the original trigger, abort the completion
|
||||||
|
if matches!(self.trigger, Some(Trigger{ pos, .. }) if cursor < pos) {
|
||||||
|
self.trigger = None;
|
||||||
|
self.request = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.trigger.map(|trigger| {
|
||||||
|
// if the current request was closed forget about it
|
||||||
|
// otherwise immediately restart the completion request
|
||||||
|
let cancel = self.request.take().map_or(false, |req| !req.is_closed());
|
||||||
|
let timeout = if trigger.kind == TriggerKind::Auto && !cancel {
|
||||||
|
self.config.load().editor.completion_timeout
|
||||||
|
} else {
|
||||||
|
// we want almost instant completions for trigger chars
|
||||||
|
// and restarting completion requests. The small timeout here mainly
|
||||||
|
// serves to better handle cases where the completion handler
|
||||||
|
// may fall behind (so multiple events in the channel) and macros
|
||||||
|
Duration::from_millis(5)
|
||||||
|
};
|
||||||
|
Instant::now() + timeout
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish_debounce(&mut self) {
|
||||||
|
let trigger = self.trigger.take().expect("debounce always has a trigger");
|
||||||
|
let (tx, rx) = cancelation();
|
||||||
|
self.request = Some(tx);
|
||||||
|
dispatch_blocking(move |editor, compositor| {
|
||||||
|
request_completion(trigger, rx, editor, compositor)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_completion(
|
||||||
|
mut trigger: Trigger,
|
||||||
|
cancel: CancelRx,
|
||||||
|
editor: &mut Editor,
|
||||||
|
compositor: &mut Compositor,
|
||||||
|
) {
|
||||||
|
let (view, doc) = current!(editor);
|
||||||
|
|
||||||
|
if compositor
|
||||||
|
.find::<ui::EditorView>()
|
||||||
|
.unwrap()
|
||||||
|
.completion
|
||||||
|
.is_some()
|
||||||
|
|| editor.mode != Mode::Insert
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let text = doc.text();
|
||||||
|
let cursor = doc.selection(view.id).primary().cursor(text.slice(..));
|
||||||
|
if trigger.view != view.id || trigger.doc != doc.id() || cursor < trigger.pos {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// this looks odd... Why are we not using the trigger position from
|
||||||
|
// the `trigger` here? Won't that mean that the trigger char doesn't get
|
||||||
|
// send to the LS if we type fast enougn? Yes that is true but it's
|
||||||
|
// not actually a problem. The LSP will resolve the completion to the identifier
|
||||||
|
// anyway (in fact sending the later position is necessary to get the right results
|
||||||
|
// from LSPs that provide incomplete completion list). We rely on trigger offset
|
||||||
|
// and primary cursor matching for multi-cursor completions so this is definitely
|
||||||
|
// necessary from our side too.
|
||||||
|
trigger.pos = cursor;
|
||||||
|
let trigger_text = text.slice(..cursor);
|
||||||
|
|
||||||
|
let mut seen_language_servers = HashSet::new();
|
||||||
|
let mut futures: FuturesUnordered<_> = doc
|
||||||
|
.language_servers_with_feature(LanguageServerFeature::Completion)
|
||||||
|
.filter(|ls| seen_language_servers.insert(ls.id()))
|
||||||
|
.map(|ls| {
|
||||||
|
let language_server_id = ls.id();
|
||||||
|
let offset_encoding = ls.offset_encoding();
|
||||||
|
let pos = pos_to_lsp_pos(text, cursor, offset_encoding);
|
||||||
|
let doc_id = doc.identifier();
|
||||||
|
let context = if trigger.kind == TriggerKind::Manual {
|
||||||
|
lsp::CompletionContext {
|
||||||
|
trigger_kind: lsp::CompletionTriggerKind::INVOKED,
|
||||||
|
trigger_character: None,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let trigger_char =
|
||||||
|
ls.capabilities()
|
||||||
|
.completion_provider
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|provider| {
|
||||||
|
provider
|
||||||
|
.trigger_characters
|
||||||
|
.as_deref()?
|
||||||
|
.iter()
|
||||||
|
.find(|&trigger| trigger_text.ends_with(trigger))
|
||||||
|
});
|
||||||
|
lsp::CompletionContext {
|
||||||
|
trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER,
|
||||||
|
trigger_character: trigger_char.cloned(),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let completion_response = ls.completion(doc_id, pos, None, context).unwrap();
|
||||||
|
async move {
|
||||||
|
let json = completion_response.await?;
|
||||||
|
let response: Option<lsp::CompletionResponse> = serde_json::from_value(json)?;
|
||||||
|
let items = match response {
|
||||||
|
Some(lsp::CompletionResponse::Array(items)) => items,
|
||||||
|
// TODO: do something with is_incomplete
|
||||||
|
Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||||
|
is_incomplete: _is_incomplete,
|
||||||
|
items,
|
||||||
|
})) => items,
|
||||||
|
None => Vec::new(),
|
||||||
|
}
|
||||||
|
.into_iter()
|
||||||
|
.map(|item| CompletionItem {
|
||||||
|
item,
|
||||||
|
language_server_id,
|
||||||
|
resolved: false,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
anyhow::Ok(items)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let future = async move {
|
||||||
|
let mut items = Vec::new();
|
||||||
|
while let Some(lsp_items) = futures.next().await {
|
||||||
|
match lsp_items {
|
||||||
|
Ok(mut lsp_items) => items.append(&mut lsp_items),
|
||||||
|
Err(err) => {
|
||||||
|
log::debug!("completion request failed: {err:?}");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
items
|
||||||
|
};
|
||||||
|
|
||||||
|
let savepoint = doc.savepoint(view);
|
||||||
|
|
||||||
|
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||||
|
ui.last_insert.1.push(InsertEvent::RequestCompletion);
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let items = cancelable_future(future, cancel).await.unwrap_or_default();
|
||||||
|
if items.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
dispatch(move |editor, compositor| {
|
||||||
|
show_completion(editor, compositor, items, trigger, savepoint)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show_completion(
|
||||||
|
editor: &mut Editor,
|
||||||
|
compositor: &mut Compositor,
|
||||||
|
items: Vec<CompletionItem>,
|
||||||
|
trigger: Trigger,
|
||||||
|
savepoint: Arc<SavePoint>,
|
||||||
|
) {
|
||||||
|
let (view, doc) = current_ref!(editor);
|
||||||
|
// check if the completion request is stale.
|
||||||
|
//
|
||||||
|
// Completions are completed asynchronously and therefore the user could
|
||||||
|
//switch document/view or leave insert mode. In all of thoise cases the
|
||||||
|
// completion should be discarded
|
||||||
|
if editor.mode != Mode::Insert || view.id != trigger.view || doc.id() != trigger.doc {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let size = compositor.size();
|
||||||
|
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||||
|
if ui.completion.is_some() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let completion_area = ui.set_completion(editor, savepoint, items, trigger.pos, size);
|
||||||
|
let signature_help_area = compositor
|
||||||
|
.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID)
|
||||||
|
.map(|signature_help| signature_help.area(size, editor));
|
||||||
|
// Delete the signature help popup if they intersect.
|
||||||
|
if matches!((completion_area, signature_help_area),(Some(a), Some(b)) if a.intersects(b)) {
|
||||||
|
compositor.remove(SignatureHelp::ID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn trigger_auto_completion(
|
||||||
|
tx: &Sender<CompletionEvent>,
|
||||||
|
editor: &Editor,
|
||||||
|
trigger_char_only: bool,
|
||||||
|
) {
|
||||||
|
let config = editor.config.load();
|
||||||
|
if !config.auto_completion {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let (view, doc): (&helix_view::View, &helix_view::Document) = current_ref!(editor);
|
||||||
|
let mut text = doc.text().slice(..);
|
||||||
|
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||||
|
text = doc.text().slice(..cursor);
|
||||||
|
|
||||||
|
let is_trigger_char = doc
|
||||||
|
.language_servers_with_feature(LanguageServerFeature::Completion)
|
||||||
|
.any(|ls| {
|
||||||
|
matches!(&ls.capabilities().completion_provider, Some(lsp::CompletionOptions {
|
||||||
|
trigger_characters: Some(triggers),
|
||||||
|
..
|
||||||
|
}) if triggers.iter().any(|trigger| text.ends_with(trigger)))
|
||||||
|
});
|
||||||
|
if is_trigger_char {
|
||||||
|
send_blocking(
|
||||||
|
tx,
|
||||||
|
CompletionEvent::TriggerChar {
|
||||||
|
cursor,
|
||||||
|
doc: doc.id(),
|
||||||
|
view: view.id,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let is_auto_trigger = !trigger_char_only
|
||||||
|
&& doc
|
||||||
|
.text()
|
||||||
|
.chars_at(cursor)
|
||||||
|
.reversed()
|
||||||
|
.take(config.completion_trigger_len as usize)
|
||||||
|
.all(char_is_word);
|
||||||
|
|
||||||
|
if is_auto_trigger {
|
||||||
|
send_blocking(
|
||||||
|
tx,
|
||||||
|
CompletionEvent::AutoTrigger {
|
||||||
|
cursor,
|
||||||
|
doc: doc.id(),
|
||||||
|
view: view.id,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_completions(cx: &mut commands::Context, c: Option<char>) {
|
||||||
|
cx.callback.push(Box::new(move |compositor, cx| {
|
||||||
|
let editor_view = compositor.find::<ui::EditorView>().unwrap();
|
||||||
|
if let Some(completion) = &mut editor_view.completion {
|
||||||
|
completion.update_filter(c);
|
||||||
|
if completion.is_empty() {
|
||||||
|
editor_view.clear_completion(cx.editor);
|
||||||
|
// clearing completions might mean we want to immediately rerequest them (usually
|
||||||
|
// this occurs if typing a trigger char)
|
||||||
|
if c.is_some() {
|
||||||
|
trigger_auto_completion(&cx.editor.handlers.completions, cx.editor, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clear_completions(cx: &mut commands::Context) {
|
||||||
|
cx.callback.push(Box::new(|compositor, cx| {
|
||||||
|
let editor_view = compositor.find::<ui::EditorView>().unwrap();
|
||||||
|
editor_view.clear_completion(cx.editor);
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn completion_post_command_hook(
|
||||||
|
tx: &Sender<CompletionEvent>,
|
||||||
|
PostCommand { command, cx }: &mut PostCommand<'_, '_>,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
if cx.editor.mode == Mode::Insert {
|
||||||
|
if cx.editor.last_completion.is_some() {
|
||||||
|
match command {
|
||||||
|
MappableCommand::Static {
|
||||||
|
name: "delete_word_forward" | "delete_char_forward" | "completion",
|
||||||
|
..
|
||||||
|
} => (),
|
||||||
|
MappableCommand::Static {
|
||||||
|
name: "delete_char_backward",
|
||||||
|
..
|
||||||
|
} => update_completions(cx, None),
|
||||||
|
_ => clear_completions(cx),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let event = match command {
|
||||||
|
MappableCommand::Static {
|
||||||
|
name: "delete_char_backward" | "delete_word_forward" | "delete_char_forward",
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
let (view, doc) = current!(cx.editor);
|
||||||
|
let primary_cursor = doc
|
||||||
|
.selection(view.id)
|
||||||
|
.primary()
|
||||||
|
.cursor(doc.text().slice(..));
|
||||||
|
CompletionEvent::DeleteText {
|
||||||
|
cursor: primary_cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// hacks: some commands are handeled elsewhere and we don't want to
|
||||||
|
// cancel in that case
|
||||||
|
MappableCommand::Static {
|
||||||
|
name: "completion" | "insert_mode" | "append_mode",
|
||||||
|
..
|
||||||
|
} => return Ok(()),
|
||||||
|
_ => CompletionEvent::Cancel,
|
||||||
|
};
|
||||||
|
send_blocking(tx, event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn register_hooks(handlers: &Handlers) {
|
||||||
|
let tx = handlers.completions.clone();
|
||||||
|
register_hook!(move |event: &mut PostCommand<'_, '_>| completion_post_command_hook(&tx, event));
|
||||||
|
|
||||||
|
let tx = handlers.completions.clone();
|
||||||
|
register_hook!(move |event: &mut OnModeSwitch<'_, '_>| {
|
||||||
|
if event.old_mode == Mode::Insert {
|
||||||
|
send_blocking(&tx, CompletionEvent::Cancel);
|
||||||
|
clear_completions(event.cx);
|
||||||
|
} else if event.new_mode == Mode::Insert {
|
||||||
|
trigger_auto_completion(&tx, event.cx.editor, false)
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
|
||||||
|
let tx = handlers.completions.clone();
|
||||||
|
register_hook!(move |event: &mut PostInsertChar<'_, '_>| {
|
||||||
|
if event.cx.editor.last_completion.is_some() {
|
||||||
|
update_completions(event.cx, Some(event.c))
|
||||||
|
} else {
|
||||||
|
trigger_auto_completion(&tx, event.cx.editor, false);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,335 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use helix_core::syntax::LanguageServerFeature;
|
||||||
|
use helix_event::{
|
||||||
|
cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx,
|
||||||
|
};
|
||||||
|
use helix_lsp::lsp;
|
||||||
|
use helix_stdx::rope::RopeSliceExt;
|
||||||
|
use helix_view::document::Mode;
|
||||||
|
use helix_view::events::{DocumentDidChange, SelectionDidChange};
|
||||||
|
use helix_view::handlers::lsp::{SignatureHelpEvent, SignatureHelpInvoked};
|
||||||
|
use helix_view::Editor;
|
||||||
|
use tokio::sync::mpsc::Sender;
|
||||||
|
use tokio::time::Instant;
|
||||||
|
|
||||||
|
use crate::commands::Open;
|
||||||
|
use crate::compositor::Compositor;
|
||||||
|
use crate::events::{OnModeSwitch, PostInsertChar};
|
||||||
|
use crate::handlers::Handlers;
|
||||||
|
use crate::ui::lsp::SignatureHelp;
|
||||||
|
use crate::ui::Popup;
|
||||||
|
use crate::{job, ui};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum State {
|
||||||
|
Open,
|
||||||
|
Closed,
|
||||||
|
Pending { request: CancelTx },
|
||||||
|
}
|
||||||
|
|
||||||
|
/// debounce timeout in ms, value taken from VSCode
|
||||||
|
/// TODO: make this configurable?
|
||||||
|
const TIMEOUT: u64 = 120;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(super) struct SignatureHelpHandler {
|
||||||
|
trigger: Option<SignatureHelpInvoked>,
|
||||||
|
state: State,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignatureHelpHandler {
|
||||||
|
pub fn new() -> SignatureHelpHandler {
|
||||||
|
SignatureHelpHandler {
|
||||||
|
trigger: None,
|
||||||
|
state: State::Closed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl helix_event::AsyncHook for SignatureHelpHandler {
|
||||||
|
type Event = SignatureHelpEvent;
|
||||||
|
|
||||||
|
fn handle_event(
|
||||||
|
&mut self,
|
||||||
|
event: Self::Event,
|
||||||
|
timeout: Option<tokio::time::Instant>,
|
||||||
|
) -> Option<Instant> {
|
||||||
|
match event {
|
||||||
|
SignatureHelpEvent::Invoked => {
|
||||||
|
self.trigger = Some(SignatureHelpInvoked::Manual);
|
||||||
|
self.state = State::Closed;
|
||||||
|
self.finish_debounce();
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
SignatureHelpEvent::Trigger => {}
|
||||||
|
SignatureHelpEvent::ReTrigger => {
|
||||||
|
// don't retrigger if we aren't open/pending yet
|
||||||
|
if matches!(self.state, State::Closed) {
|
||||||
|
return timeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SignatureHelpEvent::Cancel => {
|
||||||
|
self.state = State::Closed;
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
SignatureHelpEvent::RequestComplete { open } => {
|
||||||
|
// don't cancel rerequest that was already triggered
|
||||||
|
if let State::Pending { request } = &self.state {
|
||||||
|
if !request.is_closed() {
|
||||||
|
return timeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.state = if open { State::Open } else { State::Closed };
|
||||||
|
return timeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if self.trigger.is_none() {
|
||||||
|
self.trigger = Some(SignatureHelpInvoked::Automatic)
|
||||||
|
}
|
||||||
|
Some(Instant::now() + Duration::from_millis(TIMEOUT))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish_debounce(&mut self) {
|
||||||
|
let invocation = self.trigger.take().unwrap();
|
||||||
|
let (tx, rx) = cancelation();
|
||||||
|
self.state = State::Pending { request: tx };
|
||||||
|
job::dispatch_blocking(move |editor, _| request_signature_help(editor, invocation, rx))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn request_signature_help(
|
||||||
|
editor: &mut Editor,
|
||||||
|
invoked: SignatureHelpInvoked,
|
||||||
|
cancel: CancelRx,
|
||||||
|
) {
|
||||||
|
let (view, doc) = current!(editor);
|
||||||
|
|
||||||
|
// TODO merge multiple language server signature help into one instead of just taking the first language server that supports it
|
||||||
|
let future = doc
|
||||||
|
.language_servers_with_feature(LanguageServerFeature::SignatureHelp)
|
||||||
|
.find_map(|language_server| {
|
||||||
|
let pos = doc.position(view.id, language_server.offset_encoding());
|
||||||
|
language_server.text_document_signature_help(doc.identifier(), pos, None)
|
||||||
|
});
|
||||||
|
|
||||||
|
let Some(future) = future else {
|
||||||
|
// Do not show the message if signature help was invoked
|
||||||
|
// automatically on backspace, trigger characters, etc.
|
||||||
|
if invoked == SignatureHelpInvoked::Manual {
|
||||||
|
editor
|
||||||
|
.set_error("No configured language server supports signature-help");
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
match cancelable_future(future, cancel).await {
|
||||||
|
Some(Ok(res)) => {
|
||||||
|
job::dispatch(move |editor, compositor| {
|
||||||
|
show_signature_help(editor, compositor, invoked, res)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
Some(Err(err)) => log::error!("signature help request failed: {err}"),
|
||||||
|
None => (),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn show_signature_help(
|
||||||
|
editor: &mut Editor,
|
||||||
|
compositor: &mut Compositor,
|
||||||
|
invoked: SignatureHelpInvoked,
|
||||||
|
response: Option<lsp::SignatureHelp>,
|
||||||
|
) {
|
||||||
|
let config = &editor.config();
|
||||||
|
|
||||||
|
if !(config.lsp.auto_signature_help
|
||||||
|
|| SignatureHelp::visible_popup(compositor).is_some()
|
||||||
|
|| invoked == SignatureHelpInvoked::Manual)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the signature help invocation is automatic, don't show it outside of Insert Mode:
|
||||||
|
// it very probably means the server was a little slow to respond and the user has
|
||||||
|
// already moved on to something else, making a signature help popup will just be an
|
||||||
|
// annoyance, see https://github.com/helix-editor/helix/issues/3112
|
||||||
|
// For the most part this should not be needed as the request gets canceled automatically now
|
||||||
|
// but it's technically possible for the mode change to just preempt this callback so better safe than sorry
|
||||||
|
if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = match response {
|
||||||
|
// According to the spec the response should be None if there
|
||||||
|
// are no signatures, but some servers don't follow this.
|
||||||
|
Some(s) if !s.signatures.is_empty() => s,
|
||||||
|
_ => {
|
||||||
|
send_blocking(
|
||||||
|
&editor.handlers.signature_hints,
|
||||||
|
SignatureHelpEvent::RequestComplete { open: false },
|
||||||
|
);
|
||||||
|
compositor.remove(SignatureHelp::ID);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
send_blocking(
|
||||||
|
&editor.handlers.signature_hints,
|
||||||
|
SignatureHelpEvent::RequestComplete { open: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = doc!(editor);
|
||||||
|
let language = doc.language_name().unwrap_or("");
|
||||||
|
|
||||||
|
let signature = match response
|
||||||
|
.signatures
|
||||||
|
.get(response.active_signature.unwrap_or(0) as usize)
|
||||||
|
{
|
||||||
|
Some(s) => s,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
let mut contents = SignatureHelp::new(
|
||||||
|
signature.label.clone(),
|
||||||
|
language.to_string(),
|
||||||
|
Arc::clone(&editor.syn_loader),
|
||||||
|
);
|
||||||
|
|
||||||
|
let signature_doc = if config.lsp.display_signature_help_docs {
|
||||||
|
signature.documentation.as_ref().map(|doc| match doc {
|
||||||
|
lsp::Documentation::String(s) => s.clone(),
|
||||||
|
lsp::Documentation::MarkupContent(markup) => markup.value.clone(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
contents.set_signature_doc(signature_doc);
|
||||||
|
|
||||||
|
let active_param_range = || -> Option<(usize, usize)> {
|
||||||
|
let param_idx = signature
|
||||||
|
.active_parameter
|
||||||
|
.or(response.active_parameter)
|
||||||
|
.unwrap_or(0) as usize;
|
||||||
|
let param = signature.parameters.as_ref()?.get(param_idx)?;
|
||||||
|
match ¶m.label {
|
||||||
|
lsp::ParameterLabel::Simple(string) => {
|
||||||
|
let start = signature.label.find(string.as_str())?;
|
||||||
|
Some((start, start + string.len()))
|
||||||
|
}
|
||||||
|
lsp::ParameterLabel::LabelOffsets([start, end]) => {
|
||||||
|
// LS sends offsets based on utf-16 based string representation
|
||||||
|
// but highlighting in helix is done using byte offset.
|
||||||
|
use helix_core::str_utils::char_to_byte_idx;
|
||||||
|
let from = char_to_byte_idx(&signature.label, *start as usize);
|
||||||
|
let to = char_to_byte_idx(&signature.label, *end as usize);
|
||||||
|
Some((from, to))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
contents.set_active_param_range(active_param_range());
|
||||||
|
|
||||||
|
let old_popup = compositor.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID);
|
||||||
|
let mut popup = Popup::new(SignatureHelp::ID, contents)
|
||||||
|
.position(old_popup.and_then(|p| p.get_position()))
|
||||||
|
.position_bias(Open::Above)
|
||||||
|
.ignore_escape_key(true);
|
||||||
|
|
||||||
|
// Don't create a popup if it intersects the auto-complete menu.
|
||||||
|
let size = compositor.size();
|
||||||
|
if compositor
|
||||||
|
.find::<ui::EditorView>()
|
||||||
|
.unwrap()
|
||||||
|
.completion
|
||||||
|
.as_mut()
|
||||||
|
.map(|completion| completion.area(size, editor))
|
||||||
|
.filter(|area| area.intersects(popup.area(size, editor)))
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
compositor.replace_or_push(SignatureHelp::ID, popup);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature_help_post_insert_char_hook(
|
||||||
|
tx: &Sender<SignatureHelpEvent>,
|
||||||
|
PostInsertChar { cx, .. }: &mut PostInsertChar<'_, '_>,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
if !cx.editor.config().lsp.auto_signature_help {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let (view, doc) = current!(cx.editor);
|
||||||
|
// TODO support multiple language servers (not just the first that is found), likely by merging UI somehow
|
||||||
|
let Some(language_server) = doc
|
||||||
|
.language_servers_with_feature(LanguageServerFeature::SignatureHelp)
|
||||||
|
.next()
|
||||||
|
else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
let capabilities = language_server.capabilities();
|
||||||
|
|
||||||
|
if let lsp::ServerCapabilities {
|
||||||
|
signature_help_provider:
|
||||||
|
Some(lsp::SignatureHelpOptions {
|
||||||
|
trigger_characters: Some(triggers),
|
||||||
|
// TODO: retrigger_characters
|
||||||
|
..
|
||||||
|
}),
|
||||||
|
..
|
||||||
|
} = capabilities
|
||||||
|
{
|
||||||
|
let mut text = doc.text().slice(..);
|
||||||
|
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||||
|
text = text.slice(..cursor);
|
||||||
|
if triggers.iter().any(|trigger| text.ends_with(trigger)) {
|
||||||
|
send_blocking(tx, SignatureHelpEvent::Trigger)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn register_hooks(handlers: &Handlers) {
|
||||||
|
let tx = handlers.signature_hints.clone();
|
||||||
|
register_hook!(move |event: &mut OnModeSwitch<'_, '_>| {
|
||||||
|
match (event.old_mode, event.new_mode) {
|
||||||
|
(Mode::Insert, _) => {
|
||||||
|
send_blocking(&tx, SignatureHelpEvent::Cancel);
|
||||||
|
event.cx.callback.push(Box::new(|compositor, _| {
|
||||||
|
compositor.remove(SignatureHelp::ID);
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
(_, Mode::Insert) => {
|
||||||
|
if event.cx.editor.config().lsp.auto_signature_help {
|
||||||
|
send_blocking(&tx, SignatureHelpEvent::Trigger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
|
||||||
|
let tx = handlers.signature_hints.clone();
|
||||||
|
register_hook!(
|
||||||
|
move |event: &mut PostInsertChar<'_, '_>| signature_help_post_insert_char_hook(&tx, event)
|
||||||
|
);
|
||||||
|
|
||||||
|
let tx = handlers.signature_hints.clone();
|
||||||
|
register_hook!(move |event: &mut DocumentDidChange<'_>| {
|
||||||
|
if event.doc.config.load().lsp.auto_signature_help {
|
||||||
|
send_blocking(&tx, SignatureHelpEvent::ReTrigger);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
|
||||||
|
let tx = handlers.signature_hints.clone();
|
||||||
|
register_hook!(move |event: &mut SelectionDidChange<'_>| {
|
||||||
|
if event.doc.config.load().lsp.auto_signature_help {
|
||||||
|
send_blocking(&tx, SignatureHelpEvent::ReTrigger);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue