diff --git a/Cargo.lock b/Cargo.lock index 5de6e610390a..89c6388ee25c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -388,6 +388,20 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "helix-dap" +version = "0.5.0" +dependencies = [ + "anyhow", + "fern", + "helix-core", + "log", + "serde", + "serde_json", + "thiserror", + "tokio", +] + [[package]] name = "helix-lsp" version = "0.5.0" @@ -431,6 +445,7 @@ dependencies = [ "grep-regex", "grep-searcher", "helix-core", + "helix-dap", "helix-lsp", "helix-tui", "helix-view", @@ -473,6 +488,7 @@ dependencies = [ "encoding_rs", "futures-util", "helix-core", + "helix-dap", "helix-lsp", "helix-tui", "log", @@ -480,6 +496,7 @@ dependencies = [ "serde", "slotmap", "tokio", + "tokio-stream", "toml", "url", "which", diff --git a/Cargo.toml b/Cargo.toml index 580cccd64a04..6c360ffda380 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "helix-tui", "helix-syntax", "helix-lsp", + "helix-dap", ] # Build helix-syntax in release mode to make the code path faster in development. diff --git a/flake.lock b/flake.lock index 2029d58091d8..db0fface5ba6 100644 --- a/flake.lock +++ b/flake.lock @@ -30,22 +30,6 @@ "type": "github" } }, - "flakeCompat": { - "flake": false, - "locked": { - "lastModified": 1627913399, - "narHash": "sha256-hY8g6H2KFL8ownSiFeMOjwPC8P0ueXpCVEbxgda3pko=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "12c64ca55c1014cdc1b16ed5a804aa8576601ff2", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, "nixCargoIntegration": { "inputs": { "devshell": "devshell", @@ -103,7 +87,6 @@ }, "root": { "inputs": { - "flakeCompat": "flakeCompat", "nixCargoIntegration": "nixCargoIntegration", "nixpkgs": "nixpkgs", "rust-overlay": "rust-overlay" diff --git a/flake.nix b/flake.nix index 296a68d5ae1b..cbf10c975d5a 100644 --- a/flake.nix +++ b/flake.nix @@ -9,10 +9,6 @@ inputs.nixpkgs.follows = "nixpkgs"; inputs.rustOverlay.follows = "rust-overlay"; }; - flakeCompat = { - url = "github:edolstra/flake-compat"; - flake = false; - }; }; outputs = inputs@{ self, nixCargoIntegration, ... }: @@ -63,7 +59,7 @@ ''; }; shell = common: prev: { - packages = prev.packages ++ (with common.pkgs; [ lld_12 lldb cargo-tarpaulin ]); + packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin ]); env = prev.env ++ [ { name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; } { name = "RUST_BACKTRACE"; value = "1"; } diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index b6f5081acd63..8ccc0120aa58 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -194,10 +194,7 @@ fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option node, - None => return None, - }; + let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?; while let Some(parent) = node.parent() { if parent.start_byte() == node.start_byte() { @@ -466,6 +463,7 @@ where }), indent_query: OnceCell::new(), textobject_query: OnceCell::new(), + debugger: None, }], }); diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index de7e95c16b13..7d79040610af 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -197,7 +197,7 @@ pub use {regex, tree_sitter}; pub use graphemes::RopeGraphemes; pub use position::{coords_at_pos, pos_at_coords, visual_coords_at_pos, Position}; pub use selection::{Range, Selection}; -pub use smallvec::SmallVec; +pub use smallvec::{smallvec, SmallVec}; pub use syntax::Syntax; pub use diagnostic::Diagnostic; diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 142265a82f6a..f1c399d2c04e 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -81,6 +81,8 @@ pub struct LanguageConfiguration { pub(crate) indent_query: OnceCell>, #[serde(skip)] pub(crate) textobject_query: OnceCell>, + #[serde(skip_serializing_if = "Option::is_none")] + pub debugger: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -92,6 +94,60 @@ pub struct LanguageServerConfiguration { pub args: Vec, } +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct AdvancedCompletion { + pub name: Option, + pub completion: Option, + pub default: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case", untagged)] +pub enum DebugConfigCompletion { + Named(String), + Advanced(AdvancedCompletion), +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(untagged)] +pub enum DebugArgumentValue { + String(String), + Array(Vec), + Boolean(bool), +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct DebugTemplate { + pub name: String, + pub request: String, + pub completion: Vec, + pub args: HashMap, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct DebugAdapterConfig { + pub name: String, + pub transport: String, + #[serde(default)] + pub command: String, + #[serde(default)] + pub args: Vec, + pub port_arg: Option, + pub templates: Vec, + #[serde(default)] + pub quirks: DebuggerQuirks, +} + +// Different workarounds for adapters' differences +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] +pub struct DebuggerQuirks { + #[serde(default)] + pub absolute_paths: bool, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct IndentationConfiguration { diff --git a/helix-dap/Cargo.toml b/helix-dap/Cargo.toml new file mode 100644 index 000000000000..42dd29a895b6 --- /dev/null +++ b/helix-dap/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "helix-dap" +version = "0.5.0" +authors = ["Blaž Hrastnik "] +edition = "2018" +license = "MPL-2.0" +description = "DAP client implementation for Helix project" +categories = ["editor"] +repository = "https://github.com/helix-editor/helix" +homepage = "https://helix-editor.com" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +helix-core = { version = "0.5", path = "../helix-core" } +anyhow = "1.0" +log = "0.4" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +thiserror = "1.0" +tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] } + +[dev-dependencies] +fern = "0.6" diff --git a/helix-dap/src/client.rs b/helix-dap/src/client.rs new file mode 100644 index 000000000000..651bf4d6f5cd --- /dev/null +++ b/helix-dap/src/client.rs @@ -0,0 +1,472 @@ +use crate::{ + transport::{Payload, Request, Response, Transport}, + types::*, + Error, Result, ThreadId, +}; +use helix_core::syntax::DebuggerQuirks; + +use serde_json::Value; + +use anyhow::anyhow; +pub use log::{error, info}; +use std::{ + collections::HashMap, + future::Future, + net::{IpAddr, Ipv4Addr, SocketAddr}, + path::PathBuf, + process::Stdio, + sync::atomic::{AtomicU64, Ordering}, +}; +use tokio::{ + io::{AsyncBufRead, AsyncWrite, BufReader, BufWriter}, + net::TcpStream, + process::{Child, Command}, + sync::mpsc::{channel, unbounded_channel, UnboundedReceiver, UnboundedSender}, + time, +}; + +#[derive(Debug)] +pub struct Client { + id: usize, + _process: Option, + server_tx: UnboundedSender, + request_counter: AtomicU64, + pub caps: Option, + // thread_id -> frames + pub stack_frames: HashMap>, + pub thread_states: HashMap, + pub thread_id: Option, + /// Currently active frame for the current thread. + pub active_frame: Option, + pub quirks: DebuggerQuirks, +} + +impl Client { + // Spawn a process and communicate with it by either TCP or stdio + pub async fn process( + transport: &str, + command: &str, + args: Vec<&str>, + port_arg: Option<&str>, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + if command.is_empty() { + return Result::Err(Error::Other(anyhow!("Command not provided"))); + } + if transport == "tcp" && port_arg.is_some() { + Self::tcp_process(command, args, port_arg.unwrap(), id).await + } else if transport == "stdio" { + Self::stdio(command, args, id) + } else { + Result::Err(Error::Other(anyhow!("Incorrect transport {}", transport))) + } + } + + pub fn streams( + rx: Box, + tx: Box, + err: Option>, + id: usize, + process: Option, + ) -> Result<(Self, UnboundedReceiver)> { + let (server_rx, server_tx) = Transport::start(rx, tx, err, id); + let (client_rx, client_tx) = unbounded_channel(); + + let client = Self { + id, + _process: process, + server_tx, + request_counter: AtomicU64::new(0), + caps: None, + // + stack_frames: HashMap::new(), + thread_states: HashMap::new(), + thread_id: None, + active_frame: None, + quirks: DebuggerQuirks::default(), + }; + + tokio::spawn(Self::recv(server_rx, client_rx)); + + Ok((client, client_tx)) + } + + pub async fn tcp( + addr: std::net::SocketAddr, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + let stream = TcpStream::connect(addr).await?; + let (rx, tx) = stream.into_split(); + Self::streams(Box::new(BufReader::new(rx)), Box::new(tx), None, id, None) + } + + pub fn stdio( + cmd: &str, + args: Vec<&str>, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + let process = Command::new(cmd) + .args(args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + // make sure the process is reaped on drop + .kill_on_drop(true) + .spawn(); + + let mut process = process?; + + // TODO: do we need bufreader/writer here? or do we use async wrappers on unblock? + let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin")); + let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout")); + let errors = process.stderr.take().map(BufReader::new); + + Self::streams( + Box::new(BufReader::new(reader)), + Box::new(writer), + // errors.map(|errors| Box::new(BufReader::new(errors))), + match errors { + Some(errors) => Some(Box::new(BufReader::new(errors))), + None => None, + }, + id, + Some(process), + ) + } + + async fn get_port() -> Option { + Some( + tokio::net::TcpListener::bind(SocketAddr::new( + IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), + 0, + )) + .await + .ok()? + .local_addr() + .ok()? + .port(), + ) + } + + pub async fn tcp_process( + cmd: &str, + args: Vec<&str>, + port_format: &str, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + let port = Self::get_port().await.unwrap(); + + let process = Command::new(cmd) + .args(args) + .args(port_format.replace("{}", &port.to_string()).split(' ')) + // silence messages + .stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + // Do not kill debug adapter when leaving, it should exit automatically + .spawn()?; + + // Wait for adapter to become ready for connection + time::sleep(time::Duration::from_millis(500)).await; + + let stream = TcpStream::connect(SocketAddr::new( + IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), + port, + )) + .await?; + + let (rx, tx) = stream.into_split(); + Self::streams( + Box::new(BufReader::new(rx)), + Box::new(tx), + None, + id, + Some(process), + ) + } + + async fn recv(mut server_rx: UnboundedReceiver, client_tx: UnboundedSender) { + while let Some(msg) = server_rx.recv().await { + match msg { + Payload::Event(ev) => { + client_tx.send(Payload::Event(ev)).expect("Failed to send"); + } + Payload::Response(_) => unreachable!(), + Payload::Request(req) => { + client_tx + .send(Payload::Request(req)) + .expect("Failed to send"); + } + } + } + } + + pub fn id(&self) -> usize { + self.id + } + + fn next_request_id(&self) -> u64 { + self.request_counter.fetch_add(1, Ordering::Relaxed) + } + + /// Execute a RPC request on the debugger. + pub fn call( + &self, + arguments: R::Arguments, + ) -> impl Future> + where + R::Arguments: serde::Serialize, + { + let server_tx = self.server_tx.clone(); + let id = self.next_request_id(); + + async move { + use std::time::Duration; + use tokio::time::timeout; + + let arguments = Some(serde_json::to_value(arguments)?); + + let (callback_tx, mut callback_rx) = channel(1); + + let req = Request { + back_ch: Some(callback_tx), + seq: id, + command: R::COMMAND.to_string(), + arguments, + }; + + server_tx + .send(Payload::Request(req)) + .map_err(|e| Error::Other(e.into()))?; + + // TODO: specifiable timeout, delay other calls until initialize success + timeout(Duration::from_secs(20), callback_rx.recv()) + .await + .map_err(|_| Error::Timeout)? // return Timeout + .ok_or(Error::StreamClosed)? + .map(|response| response.body.unwrap_or_default()) + // TODO: check response.success + } + } + + pub async fn request(&self, params: R::Arguments) -> Result + where + R::Arguments: serde::Serialize, + R::Result: core::fmt::Debug, // TODO: temporary + { + // a future that resolves into the response + let json = self.call::(params).await?; + let response = serde_json::from_value(json)?; + Ok(response) + } + + pub fn reply( + &self, + request_seq: u64, + command: &str, + result: core::result::Result, + ) -> impl Future> { + let server_tx = self.server_tx.clone(); + let command = command.to_string(); + + async move { + let response = match result { + Ok(result) => Response { + request_seq, + command, + success: true, + message: None, + body: Some(result), + }, + Err(error) => Response { + request_seq, + command, + success: false, + message: Some(error.to_string()), + body: None, + }, + }; + + server_tx + .send(Payload::Response(response)) + .map_err(|e| Error::Other(e.into()))?; + + Ok(()) + } + } + + pub fn capabilities(&self) -> &DebuggerCapabilities { + self.caps.as_ref().expect("debugger not yet initialized!") + } + + pub async fn initialize(&mut self, adapter_id: String) -> Result<()> { + let args = requests::InitializeArguments { + client_id: Some("hx".to_owned()), + client_name: Some("helix".to_owned()), + adapter_id, + locale: Some("en-us".to_owned()), + lines_start_at_one: Some(true), + columns_start_at_one: Some(true), + path_format: Some("path".to_owned()), + supports_variable_type: Some(true), + supports_variable_paging: Some(false), + supports_run_in_terminal_request: Some(true), + supports_memory_references: Some(false), + supports_progress_reporting: Some(false), + supports_invalidated_event: Some(false), + }; + + let response = self.request::(args).await?; + self.caps = Some(response); + + Ok(()) + } + + pub async fn disconnect(&self) -> Result<()> { + self.request::(()).await + } + + pub fn launch(&self, args: serde_json::Value) -> impl Future> { + self.call::(args) + } + + pub fn attach(&self, args: serde_json::Value) -> impl Future> { + self.call::(args) + } + + pub async fn set_breakpoints( + &self, + file: PathBuf, + breakpoints: Vec, + ) -> Result>> { + let args = requests::SetBreakpointsArguments { + source: Source { + path: Some(file), + name: None, + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }, + breakpoints: Some(breakpoints), + source_modified: Some(false), + }; + + let response = self.request::(args).await?; + + Ok(response.breakpoints) + } + + pub async fn configuration_done(&self) -> Result<()> { + self.request::(()).await + } + + pub async fn continue_thread(&self, thread_id: ThreadId) -> Result> { + let args = requests::ContinueArguments { thread_id }; + + let response = self.request::(args).await?; + Ok(response.all_threads_continued) + } + + pub async fn stack_trace( + &self, + thread_id: ThreadId, + ) -> Result<(Vec, Option)> { + let args = requests::StackTraceArguments { + thread_id, + start_frame: None, + levels: None, + format: None, + }; + + let response = self.request::(args).await?; + Ok((response.stack_frames, response.total_frames)) + } + + pub fn threads(&self) -> impl Future> { + self.call::(()) + } + + pub async fn scopes(&self, frame_id: usize) -> Result> { + let args = requests::ScopesArguments { frame_id }; + + let response = self.request::(args).await?; + Ok(response.scopes) + } + + pub async fn variables(&self, variables_reference: usize) -> Result> { + let args = requests::VariablesArguments { + variables_reference, + filter: None, + start: None, + count: None, + format: None, + }; + + let response = self.request::(args).await?; + Ok(response.variables) + } + + pub async fn step_in(&self, thread_id: ThreadId) -> Result<()> { + let args = requests::StepInArguments { + thread_id, + target_id: None, + granularity: None, + }; + + self.request::(args).await + } + + pub async fn step_out(&self, thread_id: ThreadId) -> Result<()> { + let args = requests::StepOutArguments { + thread_id, + granularity: None, + }; + + self.request::(args).await + } + + pub async fn next(&self, thread_id: ThreadId) -> Result<()> { + let args = requests::NextArguments { + thread_id, + granularity: None, + }; + + self.request::(args).await + } + + pub async fn pause(&self, thread_id: ThreadId) -> Result<()> { + let args = requests::PauseArguments { thread_id }; + + self.request::(args).await + } + + pub async fn eval( + &self, + expression: String, + frame_id: Option, + ) -> Result { + let args = requests::EvaluateArguments { + expression, + frame_id, + context: None, + format: None, + }; + + self.request::(args).await + } + + pub async fn set_exception_breakpoints( + &self, + filters: Vec, + ) -> Result>> { + let args = requests::SetExceptionBreakpointsArguments { filters }; + + let response = self + .request::(args) + .await; + + Ok(response.ok().map(|r| r.breakpoints).unwrap_or_default()) + } +} diff --git a/helix-dap/src/lib.rs b/helix-dap/src/lib.rs new file mode 100644 index 000000000000..f60b102c0ccd --- /dev/null +++ b/helix-dap/src/lib.rs @@ -0,0 +1,24 @@ +mod client; +mod transport; +mod types; + +pub use client::Client; +pub use events::Event; +pub use transport::{Payload, Response, Transport}; +pub use types::*; + +use thiserror::Error; +#[derive(Error, Debug)] +pub enum Error { + #[error("failed to parse: {0}")] + Parse(#[from] serde_json::Error), + #[error("IO Error: {0}")] + IO(#[from] std::io::Error), + #[error("request timed out")] + Timeout, + #[error("server closed the stream")] + StreamClosed, + #[error(transparent)] + Other(#[from] anyhow::Error), +} +pub type Result = core::result::Result; diff --git a/helix-dap/src/transport.rs b/helix-dap/src/transport.rs new file mode 100644 index 000000000000..40474e99d899 --- /dev/null +++ b/helix-dap/src/transport.rs @@ -0,0 +1,279 @@ +use crate::{Error, Event, Result}; +use anyhow::Context; +use log::{error, info, warn}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::{ + io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWrite, AsyncWriteExt}, + sync::{ + mpsc::{unbounded_channel, Sender, UnboundedReceiver, UnboundedSender}, + Mutex, + }, +}; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Request { + #[serde(skip)] + pub back_ch: Option>>, + pub seq: u64, + pub command: String, + pub arguments: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +pub struct Response { + // seq is omitted as unused and is not sent by some implementations + pub request_seq: u64, + pub success: bool, + pub command: String, + pub message: Option, + pub body: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "type", rename_all = "camelCase")] +pub enum Payload { + // type = "event" + Event(Event), + // type = "response" + Response(Response), + // type = "request" + Request(Request), +} + +#[derive(Debug)] +pub struct Transport { + id: usize, + pending_requests: Mutex>>>, +} + +impl Transport { + pub fn start( + server_stdout: Box, + server_stdin: Box, + server_stderr: Option>, + id: usize, + ) -> (UnboundedReceiver, UnboundedSender) { + let (client_tx, rx) = unbounded_channel(); + let (tx, client_rx) = unbounded_channel(); + + let transport = Self { + id, + pending_requests: Mutex::new(HashMap::default()), + }; + + let transport = Arc::new(transport); + + tokio::spawn(Self::recv(transport.clone(), server_stdout, client_tx)); + tokio::spawn(Self::send(transport, server_stdin, client_rx)); + if let Some(stderr) = server_stderr { + tokio::spawn(Self::err(stderr)); + } + + (rx, tx) + } + + async fn recv_server_message( + reader: &mut Box, + buffer: &mut String, + ) -> Result { + let mut content_length = None; + loop { + buffer.truncate(0); + if reader.read_line(buffer).await? == 0 { + return Err(Error::StreamClosed); + }; + + if buffer == "\r\n" { + // look for an empty CRLF line + break; + } + + let header = buffer.trim(); + let parts = header.split_once(": "); + + match parts { + Some(("Content-Length", value)) => { + content_length = Some(value.parse().context("invalid content length")?); + } + Some((_, _)) => {} + None => { + // Workaround: Some non-conformant language servers will output logging and other garbage + // into the same stream as JSON-RPC messages. This can also happen from shell scripts that spawn + // the server. Skip such lines and log a warning. + + // warn!("Failed to parse header: {:?}", header); + } + } + } + + let content_length = content_length.context("missing content length")?; + + //TODO: reuse vector + let mut content = vec![0; content_length]; + reader.read_exact(&mut content).await?; + let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?; + + info!("<- DAP {}", msg); + + // try parsing as output (server response) or call (server request) + let output: serde_json::Result = serde_json::from_str(msg); + + Ok(output?) + } + + async fn recv_server_error( + err: &mut (impl AsyncBufRead + Unpin + Send), + buffer: &mut String, + ) -> Result<()> { + buffer.truncate(0); + if err.read_line(buffer).await? == 0 { + return Err(Error::StreamClosed); + }; + error!("err <- {}", buffer); + + Ok(()) + } + + async fn send_payload_to_server( + &self, + server_stdin: &mut Box, + mut payload: Payload, + ) -> Result<()> { + if let Payload::Request(request) = &mut payload { + if let Some(back) = request.back_ch.take() { + self.pending_requests.lock().await.insert(request.seq, back); + } + } + let json = serde_json::to_string(&payload)?; + self.send_string_to_server(server_stdin, json).await + } + + async fn send_string_to_server( + &self, + server_stdin: &mut Box, + request: String, + ) -> Result<()> { + info!("-> DAP {}", request); + + // send the headers + server_stdin + .write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes()) + .await?; + + // send the body + server_stdin.write_all(request.as_bytes()).await?; + + server_stdin.flush().await?; + + Ok(()) + } + + fn process_response(res: Response) -> Result { + if res.success { + info!("<- DAP success in response to {}", res.request_seq); + + Ok(res) + } else { + error!( + "<- DAP error {:?} ({:?}) for command #{} {}", + res.message, res.body, res.request_seq, res.command + ); + + Err(Error::Other(anyhow::format_err!("{:?}", res.body))) + } + } + + async fn process_server_message( + &self, + client_tx: &UnboundedSender, + msg: Payload, + ) -> Result<()> { + match msg { + Payload::Response(res) => { + let request_seq = res.request_seq; + let tx = self.pending_requests.lock().await.remove(&request_seq); + + match tx { + Some(tx) => match tx.send(Self::process_response(res)).await { + Ok(_) => (), + Err(_) => error!( + "Tried sending response into a closed channel (id={:?}), original request likely timed out", + request_seq + ), + } + None => { + warn!("Response to nonexistent request #{}", res.request_seq); + client_tx.send(Payload::Response(res)).expect("Failed to send"); + } + } + + Ok(()) + } + Payload::Request(Request { + ref command, + ref seq, + .. + }) => { + info!("<- DAP request {} #{}", command, seq); + client_tx.send(msg).expect("Failed to send"); + Ok(()) + } + Payload::Event(ref event) => { + info!("<- DAP event {:?}", event); + client_tx.send(msg).expect("Failed to send"); + Ok(()) + } + } + } + + async fn recv( + transport: Arc, + mut server_stdout: Box, + client_tx: UnboundedSender, + ) { + let mut recv_buffer = String::new(); + loop { + match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await { + Ok(msg) => { + transport + .process_server_message(&client_tx, msg) + .await + .unwrap(); + } + Err(err) => { + error!("err: <- {:?}", err); + break; + } + } + } + } + + async fn send( + transport: Arc, + mut server_stdin: Box, + mut client_rx: UnboundedReceiver, + ) { + while let Some(payload) = client_rx.recv().await { + transport + .send_payload_to_server(&mut server_stdin, payload) + .await + .unwrap() + } + } + + async fn err(mut server_stderr: Box) { + let mut recv_buffer = String::new(); + loop { + match Self::recv_server_error(&mut server_stderr, &mut recv_buffer).await { + Ok(_) => {} + Err(err) => { + error!("err: <- {:?}", err); + break; + } + } + } + } +} diff --git a/helix-dap/src/types.rs b/helix-dap/src/types.rs new file mode 100644 index 000000000000..2c3df9c335bb --- /dev/null +++ b/helix-dap/src/types.rs @@ -0,0 +1,707 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use std::path::PathBuf; + +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, +)] +pub struct ThreadId(isize); + +impl std::fmt::Display for ThreadId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +pub trait Request { + type Arguments: serde::de::DeserializeOwned + serde::Serialize; + type Result: serde::de::DeserializeOwned + serde::Serialize; + const COMMAND: &'static str; +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ColumnDescriptor { + pub attribute_name: String, + pub label: String, + pub format: Option, + #[serde(rename = "type")] + pub ty: Option, + pub width: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ExceptionBreakpointsFilter { + pub filter: String, + pub label: String, + pub description: Option, + pub default: Option, + pub supports_condition: Option, + pub condition_description: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DebuggerCapabilities { + pub supports_configuration_done_request: Option, + pub supports_function_breakpoints: Option, + pub supports_conditional_breakpoints: Option, + pub supports_hit_conditional_breakpoints: Option, + pub supports_evaluate_for_hovers: Option, + pub supports_step_back: Option, + pub supports_set_variable: Option, + pub supports_restart_frame: Option, + pub supports_goto_targets_request: Option, + pub supports_step_in_targets_request: Option, + pub supports_completions_request: Option, + pub supports_modules_request: Option, + pub supports_restart_request: Option, + pub supports_exception_options: Option, + pub supports_value_formatting_options: Option, + pub supports_exception_info_request: Option, + pub support_terminate_debuggee: Option, + pub support_suspend_debuggee: Option, + pub supports_delayed_stack_trace_loading: Option, + pub supports_loaded_sources_request: Option, + pub supports_log_points: Option, + pub supports_terminate_threads_request: Option, + pub supports_set_expression: Option, + pub supports_terminate_request: Option, + pub supports_data_breakpoints: Option, + pub supports_read_memory_request: Option, + pub supports_write_memory_request: Option, + pub supports_disassemble_request: Option, + pub supports_cancel_request: Option, + pub supports_breakpoint_locations_request: Option, + pub supports_clipboard_context: Option, + pub supports_stepping_granularity: Option, + pub supports_instruction_breakpoints: Option, + pub supports_exception_filter_options: Option, + pub exception_breakpoint_filters: Option>, + pub completion_trigger_characters: Option>, + pub additional_module_columns: Option>, + pub supported_checksum_algorithms: Option>, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Checksum { + pub algorithm: String, + pub checksum: String, +} + +#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Source { + pub name: Option, + pub path: Option, + pub source_reference: Option, + pub presentation_hint: Option, + pub origin: Option, + pub sources: Option>, + pub adapter_data: Option, + pub checksums: Option>, +} + +#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SourceBreakpoint { + pub line: usize, + pub column: Option, + pub condition: Option, + pub hit_condition: Option, + pub log_message: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Breakpoint { + pub id: Option, + pub verified: bool, + pub message: Option, + pub source: Option, + pub line: Option, + pub column: Option, + pub end_line: Option, + pub end_column: Option, + pub instruction_reference: Option, + pub offset: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StackFrameFormat { + pub parameters: Option, + pub parameter_types: Option, + pub parameter_names: Option, + pub parameter_values: Option, + pub line: Option, + pub module: Option, + pub include_all: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StackFrame { + pub id: usize, + pub name: String, + pub source: Option, + pub line: usize, + pub column: usize, + pub end_line: Option, + pub end_column: Option, + pub can_restart: Option, + pub instruction_pointer_reference: Option, + pub module_id: Option, + pub presentation_hint: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Thread { + pub id: ThreadId, + pub name: String, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Scope { + pub name: String, + pub presentation_hint: Option, + pub variables_reference: usize, + pub named_variables: Option, + pub indexed_variables: Option, + pub expensive: bool, + pub source: Option, + pub line: Option, + pub column: Option, + pub end_line: Option, + pub end_column: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ValueFormat { + pub hex: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VariablePresentationHint { + pub kind: Option, + pub attributes: Option>, + pub visibility: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Variable { + pub name: String, + pub value: String, + #[serde(rename = "type")] + pub ty: Option, + pub presentation_hint: Option, + pub evaluate_name: Option, + pub variables_reference: usize, + pub named_variables: Option, + pub indexed_variables: Option, + pub memory_reference: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Module { + pub id: String, // TODO: || number + pub name: String, + pub path: Option, + pub is_optimized: Option, + pub is_user_code: Option, + pub version: Option, + pub symbol_status: Option, + pub symbol_file_path: Option, + pub date_time_stamp: Option, + pub address_range: Option, +} + +pub mod requests { + use super::*; + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct InitializeArguments { + #[serde(rename = "clientID")] + pub client_id: Option, + pub client_name: Option, + #[serde(rename = "adapterID")] + pub adapter_id: String, + pub locale: Option, + #[serde(rename = "linesStartAt1")] + pub lines_start_at_one: Option, + #[serde(rename = "columnsStartAt1")] + pub columns_start_at_one: Option, + pub path_format: Option, + pub supports_variable_type: Option, + pub supports_variable_paging: Option, + pub supports_run_in_terminal_request: Option, + pub supports_memory_references: Option, + pub supports_progress_reporting: Option, + pub supports_invalidated_event: Option, + } + + #[derive(Debug)] + pub enum Initialize {} + + impl Request for Initialize { + type Arguments = InitializeArguments; + type Result = DebuggerCapabilities; + const COMMAND: &'static str = "initialize"; + } + + #[derive(Debug)] + pub enum Launch {} + + impl Request for Launch { + type Arguments = Value; + type Result = Value; + const COMMAND: &'static str = "launch"; + } + + #[derive(Debug)] + pub enum Attach {} + + impl Request for Attach { + type Arguments = Value; + type Result = Value; + const COMMAND: &'static str = "attach"; + } + + #[derive(Debug)] + pub enum Disconnect {} + + impl Request for Disconnect { + type Arguments = (); + type Result = (); + const COMMAND: &'static str = "disconnect"; + } + + #[derive(Debug)] + pub enum ConfigurationDone {} + + impl Request for ConfigurationDone { + type Arguments = (); + type Result = (); + const COMMAND: &'static str = "configurationDone"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetBreakpointsArguments { + pub source: Source, + pub breakpoints: Option>, + // lines is deprecated + pub source_modified: Option, + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetBreakpointsResponse { + pub breakpoints: Option>, + } + + #[derive(Debug)] + pub enum SetBreakpoints {} + + impl Request for SetBreakpoints { + type Arguments = SetBreakpointsArguments; + type Result = SetBreakpointsResponse; + const COMMAND: &'static str = "setBreakpoints"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ContinueArguments { + pub thread_id: ThreadId, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ContinueResponse { + pub all_threads_continued: Option, + } + + #[derive(Debug)] + pub enum Continue {} + + impl Request for Continue { + type Arguments = ContinueArguments; + type Result = ContinueResponse; + const COMMAND: &'static str = "continue"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StackTraceArguments { + pub thread_id: ThreadId, + pub start_frame: Option, + pub levels: Option, + pub format: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StackTraceResponse { + pub total_frames: Option, + pub stack_frames: Vec, + } + + #[derive(Debug)] + pub enum StackTrace {} + + impl Request for StackTrace { + type Arguments = StackTraceArguments; + type Result = StackTraceResponse; + const COMMAND: &'static str = "stackTrace"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ThreadsResponse { + pub threads: Vec, + } + + #[derive(Debug)] + pub enum Threads {} + + impl Request for Threads { + type Arguments = (); + type Result = ThreadsResponse; + const COMMAND: &'static str = "threads"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ScopesArguments { + pub frame_id: usize, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ScopesResponse { + pub scopes: Vec, + } + + #[derive(Debug)] + pub enum Scopes {} + + impl Request for Scopes { + type Arguments = ScopesArguments; + type Result = ScopesResponse; + const COMMAND: &'static str = "scopes"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct VariablesArguments { + pub variables_reference: usize, + pub filter: Option, + pub start: Option, + pub count: Option, + pub format: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct VariablesResponse { + pub variables: Vec, + } + + #[derive(Debug)] + pub enum Variables {} + + impl Request for Variables { + type Arguments = VariablesArguments; + type Result = VariablesResponse; + const COMMAND: &'static str = "variables"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StepInArguments { + pub thread_id: ThreadId, + pub target_id: Option, + pub granularity: Option, + } + + #[derive(Debug)] + pub enum StepIn {} + + impl Request for StepIn { + type Arguments = StepInArguments; + type Result = (); + const COMMAND: &'static str = "stepIn"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StepOutArguments { + pub thread_id: ThreadId, + pub granularity: Option, + } + + #[derive(Debug)] + pub enum StepOut {} + + impl Request for StepOut { + type Arguments = StepOutArguments; + type Result = (); + const COMMAND: &'static str = "stepOut"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct NextArguments { + pub thread_id: ThreadId, + pub granularity: Option, + } + + #[derive(Debug)] + pub enum Next {} + + impl Request for Next { + type Arguments = NextArguments; + type Result = (); + const COMMAND: &'static str = "next"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct PauseArguments { + pub thread_id: ThreadId, + } + + #[derive(Debug)] + pub enum Pause {} + + impl Request for Pause { + type Arguments = PauseArguments; + type Result = (); + const COMMAND: &'static str = "pause"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct EvaluateArguments { + pub expression: String, + pub frame_id: Option, + pub context: Option, + pub format: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct EvaluateResponse { + pub result: String, + #[serde(rename = "type")] + pub ty: Option, + pub presentation_hint: Option, + pub variables_reference: usize, + pub named_variables: Option, + pub indexed_variables: Option, + pub memory_reference: Option, + } + + #[derive(Debug)] + pub enum Evaluate {} + + impl Request for Evaluate { + type Arguments = EvaluateArguments; + type Result = EvaluateResponse; + const COMMAND: &'static str = "evaluate"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetExceptionBreakpointsArguments { + pub filters: Vec, + // pub filterOptions: Option>, // needs capability + // pub exceptionOptions: Option>, // needs capability + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetExceptionBreakpointsResponse { + pub breakpoints: Option>, + } + + #[derive(Debug)] + pub enum SetExceptionBreakpoints {} + + impl Request for SetExceptionBreakpoints { + type Arguments = SetExceptionBreakpointsArguments; + type Result = SetExceptionBreakpointsResponse; + const COMMAND: &'static str = "setExceptionBreakpoints"; + } + + // Reverse Requests + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct RunInTerminalResponse { + pub process_id: Option, + pub shell_process_id: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct RunInTerminalArguments { + pub kind: Option, + pub title: Option, + pub cwd: Option, + pub args: Vec, + pub env: Option>>, + } + + #[derive(Debug)] + pub enum RunInTerminal {} + + impl Request for RunInTerminal { + type Arguments = RunInTerminalArguments; + type Result = RunInTerminalResponse; + const COMMAND: &'static str = "runInTerminal"; + } +} + +// Events + +pub mod events { + use super::*; + + #[derive(Debug, Clone, Serialize, Deserialize)] + #[serde(rename_all = "camelCase")] + #[serde(tag = "event", content = "body")] + // seq is omitted as unused and is not sent by some implementations + pub enum Event { + Initialized, + Stopped(Stopped), + Continued(Continued), + Exited(Exited), + Terminated(Option), + Thread(Thread), + Output(Output), + Breakpoint(Breakpoint), + Module(Module), + LoadedSource(LoadedSource), + Process(Process), + Capabilities(Capabilities), + // ProgressStart(), + // ProgressUpdate(), + // ProgressEnd(), + // Invalidated(), + Memory(Memory), + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Stopped { + pub reason: String, + pub description: Option, + pub thread_id: Option, + pub preserve_focus_hint: Option, + pub text: Option, + pub all_threads_stopped: Option, + pub hit_breakpoint_ids: Option>, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Continued { + pub thread_id: ThreadId, + pub all_threads_continued: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Exited { + pub exit_code: usize, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Terminated { + pub restart: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Thread { + pub reason: String, + pub thread_id: ThreadId, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Output { + pub output: String, + pub category: Option, + pub group: Option, + pub line: Option, + pub column: Option, + pub variables_reference: Option, + pub source: Option, + pub data: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Breakpoint { + pub reason: String, + pub breakpoint: super::Breakpoint, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Module { + pub reason: String, + pub module: super::Module, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct LoadedSource { + pub reason: String, + pub source: super::Source, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Process { + pub name: String, + pub system_process_id: Option, + pub is_local_process: Option, + pub start_method: Option, // TODO: use enum + pub pointer_size: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Capabilities { + pub capabilities: super::DebuggerCapabilities, + } + + // #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + // #[serde(rename_all = "camelCase")] + // pub struct Invalidated { + // pub areas: Vec, + // pub thread_id: Option, + // pub stack_frame_id: Option, + // } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Memory { + pub memory_reference: String, + pub offset: usize, + pub count: usize, + } +} diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index 5e4619ef8697..83b2978dc57d 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -23,5 +23,5 @@ lsp-types = { version = "0.91", features = ["proposed"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" -tokio = { version = "1.14", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } +tokio = { version = "1.14", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio-stream = "0.1.8" diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index a0079febec81..43268291b40c 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -24,6 +24,7 @@ path = "src/main.rs" helix-core = { version = "0.5", path = "../helix-core" } helix-view = { version = "0.5", path = "../helix-view" } helix-lsp = { version = "0.5", path = "../helix-lsp" } +helix-dap = { version = "0.5", path = "../helix-dap" } anyhow = "1" once_cell = "1.8" @@ -33,7 +34,7 @@ num_cpus = "1" tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } crossterm = { version = "0.22", features = ["event-stream"] } signal-hook = "0.3" - +tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } # Logging @@ -58,7 +59,6 @@ serde = { version = "1.0", features = ["derive"] } # ripgrep for global search grep-regex = "0.1.9" grep-searcher = "0.1.8" -tokio-stream = "0.1.8" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 90330751a145..55e4bb0399c6 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -1,11 +1,13 @@ use helix_core::{merge_toml_values, syntax}; +use helix_dap::{self as dap, Payload, Request}; use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap}; -use helix_view::{theme, Editor}; +use helix_view::{editor::Breakpoint, theme, Editor}; -use crate::{args::Args, compositor::Compositor, config::Config, job::Jobs, ui}; +use crate::{ + args::Args, commands::fetch_stack_trace, compositor::Compositor, config::Config, job::Jobs, ui, +}; use log::{error, warn}; - use std::{ io::{stdin, stdout, Write}, sync::Arc, @@ -219,6 +221,9 @@ impl Application { last_render = Instant::now(); } } + Some(payload) = self.editor.debugger_events.next() => { + self.handle_debugger_message(payload).await; + } Some(callback) = self.jobs.futures.next() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); self.render(); @@ -313,6 +318,186 @@ impl Application { } } + pub async fn handle_debugger_message(&mut self, payload: helix_dap::Payload) { + use crate::commands::dap::{breakpoints_changed, resume_application, select_thread_id}; + use dap::requests::RunInTerminal; + use helix_dap::{events, Event}; + + let debugger = match self.editor.debugger.as_mut() { + Some(debugger) => debugger, + None => return, + }; + match payload { + Payload::Event(ev) => match ev { + Event::Stopped(events::Stopped { + thread_id, + description, + text, + reason, + all_threads_stopped, + .. + }) => { + let all_threads_stopped = all_threads_stopped.unwrap_or_default(); + + if all_threads_stopped { + if let Ok(response) = debugger.request::(()).await { + for thread in response.threads { + fetch_stack_trace(debugger, thread.id).await; + } + select_thread_id( + &mut self.editor, + thread_id.unwrap_or_default(), + false, + ) + .await; + } + } else if let Some(thread_id) = thread_id { + debugger.thread_states.insert(thread_id, reason.clone()); // TODO: dap uses "type" || "reason" here + + // whichever thread stops is made "current" (if no previously selected thread). + select_thread_id(&mut self.editor, thread_id, false).await; + } + + let scope = match thread_id { + Some(id) => format!("Thread {}", id), + None => "Target".to_owned(), + }; + + let mut status = format!("{} stopped because of {}", scope, reason); + if let Some(desc) = description { + status.push_str(&format!(" {}", desc)); + } + if let Some(text) = text { + status.push_str(&format!(" {}", text)); + } + if all_threads_stopped { + status.push_str(" (all threads stopped)"); + } + + self.editor.set_status(status); + } + Event::Continued(events::Continued { thread_id, .. }) => { + debugger + .thread_states + .insert(thread_id, "running".to_owned()); + if debugger.thread_id == Some(thread_id) { + resume_application(debugger) + } + } + Event::Thread(_) => { + // TODO: update thread_states, make threads request + } + Event::Breakpoint(events::Breakpoint { reason, breakpoint }) => { + match &reason[..] { + "new" => { + if let Some(source) = breakpoint.source { + self.editor + .breakpoints + .entry(source.path.unwrap()) // TODO: no unwraps + .or_default() + .push(Breakpoint { + id: breakpoint.id, + verified: breakpoint.verified, + message: breakpoint.message, + line: breakpoint.line.unwrap().saturating_sub(1), // TODO: no unwrap + column: breakpoint.column, + ..Default::default() + }); + } + } + "changed" => { + for breakpoints in self.editor.breakpoints.values_mut() { + if let Some(i) = + breakpoints.iter().position(|b| b.id == breakpoint.id) + { + breakpoints[i].verified = breakpoint.verified; + breakpoints[i].message = breakpoint.message.clone(); + breakpoints[i].line = + breakpoint.line.unwrap().saturating_sub(1); // TODO: no unwrap + breakpoints[i].column = breakpoint.column; + } + } + } + "removed" => { + for breakpoints in self.editor.breakpoints.values_mut() { + if let Some(i) = + breakpoints.iter().position(|b| b.id == breakpoint.id) + { + breakpoints.remove(i); + } + } + } + reason => { + warn!("Unknown breakpoint event: {}", reason); + } + } + } + Event::Output(events::Output { + category, output, .. + }) => { + let prefix = match category { + Some(category) => { + if &category == "telemetry" { + return; + } + format!("Debug ({}):", category) + } + None => "Debug:".to_owned(), + }; + + log::info!("{}", output); + self.editor.set_status(format!("{} {}", prefix, output)); + } + Event::Initialized => { + // send existing breakpoints + for (path, breakpoints) in &mut self.editor.breakpoints { + // TODO: call futures in parallel, await all + let _ = breakpoints_changed(debugger, path.clone(), breakpoints); + } + // TODO: fetch breakpoints (in case we're attaching) + + if debugger.configuration_done().await.is_ok() { + self.editor + .set_status("Debugged application started".to_owned()); + }; // TODO: do we need to handle error? + } + ev => { + log::warn!("Unhandled event {:?}", ev); + return; // return early to skip render + } + }, + Payload::Response(_) => unreachable!(), + Payload::Request(request) => match request.command.as_str() { + RunInTerminal::COMMAND => { + let arguments: dap::requests::RunInTerminalArguments = + serde_json::from_value(request.arguments.unwrap_or_default()).unwrap(); + // TODO: no unwrap + + // TODO: handle cwd + let process = std::process::Command::new("tmux") + .arg("split-window") + .arg(arguments.args.join(" ")) // TODO: first arg is wrong, it uses current dir + .spawn() + .unwrap(); + + let _ = debugger + .reply( + request.seq, + dap::requests::RunInTerminal::COMMAND, + serde_json::to_value(dap::requests::RunInTerminalResponse { + process_id: Some(process.id()), + shell_process_id: None, + }) + .map_err(|e| e.into()), + ) + .await; + } + _ => log::error!("DAP reverse request not implemented: {:?}", request), + }, + } + self.render(); + } + pub async fn handle_language_server_message( &mut self, call: helix_lsp::Call, diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index b4cc9ae9227b..1871c67e18ae 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -1,3 +1,7 @@ +pub(crate) mod dap; + +pub use dap::*; + use helix_core::{ comment, coords_at_pos, find_first_non_whitespace_char, find_root, graphemes, history::UndoKind, @@ -41,7 +45,7 @@ use crate::{ use crate::job::{self, Job, Jobs}; use futures_util::{FutureExt, StreamExt}; use std::num::NonZeroUsize; -use std::{fmt, future::Future}; +use std::{collections::HashMap, fmt, future::Future}; use std::{ borrow::Cow, @@ -110,13 +114,13 @@ impl<'a> Context<'a> { } } -enum Align { +pub enum Align { Top, Center, Bottom, } -fn align_view(doc: &Document, view: &mut View, align: Align) { +pub fn align_view(doc: &Document, view: &mut View, align: Align) { let pos = doc .selection(view.id) .primary() @@ -354,6 +358,21 @@ impl Command { surround_delete, "Surround delete", select_textobject_around, "Select around object", select_textobject_inner, "Select inside object", + dap_launch, "Launch debug target", + dap_toggle_breakpoint, "Toggle breakpoint", + dap_continue, "Continue program execution", + dap_pause, "Pause program execution", + dap_step_in, "Step in", + dap_step_out, "Step out", + dap_next, "Step to next", + dap_variables, "List variables", + dap_terminate, "End debug session", + dap_edit_condition, "Edit condition of the breakpoint on the current line", + dap_edit_log, "Edit log message of the breakpoint on the current line", + dap_switch_thread, "Switch current thread", + dap_switch_stack_frame, "Switch stack frame", + dap_enable_exceptions, "Enable exception breakpoints", + dap_disable_exceptions, "Disable exception breakpoints", shell_pipe, "Pipe selections through shell command", shell_pipe_to, "Pipe selections into shell command, ignoring command output", shell_insert_output, "Insert output of shell command before each selection", @@ -1453,7 +1472,6 @@ fn search_completions(cx: &mut Context, reg: Option) -> Vec { items.into_iter().cloned().collect() } -// TODO: use one function for search vs extend fn search(cx: &mut Context) { searcher(cx, Direction::Forward) } @@ -1461,7 +1479,7 @@ fn search(cx: &mut Context) { fn rsearch(cx: &mut Context) { searcher(cx, Direction::Backward) } -// TODO: use one function for search vs extend + fn searcher(cx: &mut Context, direction: Direction) { let reg = cx.register.unwrap_or('/'); let scrolloff = cx.editor.config.scrolloff; @@ -1673,11 +1691,11 @@ fn global_search(cx: &mut Context) { relative_path.into() } }, - move |editor: &mut Editor, (line_num, path), action| { - match editor.open(path.into(), action) { + move |cx, (line_num, path), action| { + match cx.editor.open(path.into(), action) { Ok(_) => {} Err(e) => { - editor.set_error(format!( + cx.editor.set_error(format!( "Failed to open file '{}': {}", path.display(), e @@ -1687,7 +1705,7 @@ fn global_search(cx: &mut Context) { } let line_num = *line_num; - let (view, doc) = current!(editor); + let (view, doc) = current!(cx.editor); let text = doc.text(); let start = text.line_to_char(line_num); let end = text.line_to_char((line_num + 1).min(text.len_lines())); @@ -1877,7 +1895,6 @@ fn append_mode(cx: &mut Context) { mod cmd { use super::*; - use std::collections::HashMap; use helix_view::editor::Action; use ui::completers::{self, Completer}; @@ -2522,6 +2539,58 @@ mod cmd { Ok(()) } + fn debug_eval( + cx: &mut compositor::Context, + args: &[&str], + _event: PromptEvent, + ) -> anyhow::Result<()> { + if let Some(debugger) = cx.editor.debugger.as_mut() { + let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { + (Some(frame), Some(thread_id)) => (frame, thread_id), + _ => { + bail!("Cannot find current stack frame to access variables") + } + }; + + // TODO: support no frame_id + + let frame_id = debugger.stack_frames[&thread_id][frame].id; + let response = block_on(debugger.eval(args.join(" "), Some(frame_id)))?; + cx.editor.set_status(response.result); + } + Ok(()) + } + + fn debug_start( + cx: &mut compositor::Context, + args: &[&str], + _event: PromptEvent, + ) -> anyhow::Result<()> { + let mut args = args.to_owned(); + let name = match args.len() { + 0 => None, + _ => Some(args.remove(0)), + }; + dap_start_impl(cx, name, None, Some(args)) + } + + fn debug_remote( + cx: &mut compositor::Context, + args: &[&str], + _event: PromptEvent, + ) -> anyhow::Result<()> { + let mut args = args.to_owned(); + let address = match args.len() { + 0 => None, + _ => Some(args.remove(0).parse()?), + }; + let name = match args.len() { + 0 => None, + _ => Some(args.remove(0)), + }; + dap_start_impl(cx, name, address, Some(args)) + } + fn tutor( cx: &mut compositor::Context, _args: &[&str], @@ -2814,6 +2883,27 @@ mod cmd { fun: tree_sitter_scopes, completer: None, }, + TypableCommand { + name: "debug-start", + aliases: &["dbg"], + doc: "Start a debug session from a given template with given parameters.", + fun: debug_start, + completer: None, + }, + TypableCommand { + name: "debug-remote", + aliases: &["dbg-tcp"], + doc: "Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters.", + fun: debug_remote, + completer: None, + }, + TypableCommand { + name: "debug-eval", + aliases: &[], + doc: "Evaluate expression in current debug context.", + fun: debug_eval, + completer: None, + }, TypableCommand { name: "vsplit", aliases: &["vs"], @@ -2994,8 +3084,8 @@ fn buffer_picker(cx: &mut Context) { .map(|(_, doc)| new_meta(doc)) .collect(), BufferMeta::format, - |editor: &mut Editor, meta, _action| { - editor.switch(meta.id, Action::Replace); + |cx, meta, _action| { + cx.editor.switch(meta.id, Action::Replace); }, |editor, meta| { let doc = &editor.documents.get(&meta.id)?; @@ -3062,9 +3152,9 @@ fn symbol_picker(cx: &mut Context) { let mut picker = FilePicker::new( symbols, |symbol| (&symbol.name).into(), - move |editor: &mut Editor, symbol, _action| { - push_jump(editor); - let (view, doc) = current!(editor); + move |cx, symbol, _action| { + push_jump(cx.editor); + let (view, doc) = current!(cx.editor); if let Some(range) = lsp_range_to_range(doc.text(), symbol.location.range, offset_encoding) @@ -3123,10 +3213,10 @@ fn workspace_symbol_picker(cx: &mut Context) { format!("{} ({})", &symbol.name, relative_path).into() } }, - move |editor: &mut Editor, symbol, action| { + move |cx, symbol, action| { let path = symbol.location.uri.to_file_path().unwrap(); - editor.open(path, action).expect("editor.open failed"); - let (view, doc) = current!(editor); + cx.editor.open(path, action).expect("editor.open failed"); + let (view, doc) = current!(cx.editor); if let Some(range) = lsp_range_to_range(doc.text(), symbol.location.range, offset_encoding) @@ -3185,15 +3275,15 @@ pub fn code_action(cx: &mut Context) { } lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(), }, - move |editor, code_action, _action| match code_action { + move |cx, code_action, _action| match code_action { lsp::CodeActionOrCommand::Command(command) => { log::debug!("code action command: {:?}", command); - editor.set_error(String::from("Handling code action command is not implemented yet, see https://github.com/helix-editor/helix/issues/183")); + cx.editor.set_error(String::from("Handling code action command is not implemented yet, see https://github.com/helix-editor/helix/issues/183")); } lsp::CodeActionOrCommand::CodeAction(code_action) => { log::debug!("code action: {:?}", code_action); if let Some(ref workspace_edit) = code_action.edit { - apply_workspace_edit(editor, offset_encoding, workspace_edit) + apply_workspace_edit(cx.editor, offset_encoding, workspace_edit) } } }, @@ -3704,9 +3794,7 @@ fn goto_impl( let line = location.range.start.line; format!("{}:{}", file, line).into() }, - move |editor: &mut Editor, location, action| { - jump_to(editor, location, offset_encoding, action) - }, + move |cx, location, action| jump_to(cx.editor, location, offset_encoding, action), |_editor, location| { let path = location.uri.to_file_path().unwrap(); let line = Some(( diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs new file mode 100644 index 000000000000..8935bc9c460b --- /dev/null +++ b/helix-term/src/commands/dap.rs @@ -0,0 +1,837 @@ +use super::{align_view, Align, Context, Editor}; +use crate::{ + compositor::{self, Compositor}, + job::{Callback, Jobs}, + ui::{self, FilePicker, Picker, Popup, Prompt, PromptEvent, Text}, +}; +use helix_core::{ + syntax::{DebugArgumentValue, DebugConfigCompletion}, + Selection, +}; +use helix_dap::{self as dap, Client, ThreadId}; +use helix_lsp::block_on; +use helix_view::editor::Breakpoint; + +use serde_json::{to_value, Value}; +use tokio_stream::wrappers::UnboundedReceiverStream; + +use std::collections::HashMap; +use std::future::Future; +use std::path::PathBuf; + +use anyhow::{anyhow, bail}; + +#[macro_export] +macro_rules! debugger { + ($editor:expr) => {{ + match &mut $editor.debugger { + Some(debugger) => debugger, + None => return, + } + }}; +} + +// general utils: +pub fn dap_pos_to_pos(doc: &helix_core::Rope, line: usize, column: usize) -> Option { + // 1-indexing to 0 indexing + let line = doc.try_line_to_char(line - 1).ok()?; + let pos = line + column.saturating_sub(1); + // TODO: this is probably utf-16 offsets + Some(pos) +} + +pub fn resume_application(debugger: &mut Client) { + if let Some(thread_id) = debugger.thread_id { + debugger + .thread_states + .insert(thread_id, "running".to_string()); + debugger.stack_frames.remove(&thread_id); + } + debugger.active_frame = None; + debugger.thread_id = None; +} + +pub async fn select_thread_id(editor: &mut Editor, thread_id: ThreadId, force: bool) { + let debugger = debugger!(editor); + + if !force && debugger.thread_id.is_some() { + return; + } + + debugger.thread_id = Some(thread_id); + fetch_stack_trace(debugger, thread_id).await; + + let frame = debugger.stack_frames[&thread_id].get(0).cloned(); + if let Some(frame) = &frame { + jump_to_stack_frame(editor, frame); + } +} + +pub async fn fetch_stack_trace(debugger: &mut Client, thread_id: ThreadId) { + let (frames, _) = match debugger.stack_trace(thread_id).await { + Ok(frames) => frames, + Err(_) => return, + }; + debugger.stack_frames.insert(thread_id, frames); + debugger.active_frame = Some(0); +} + +pub fn jump_to_stack_frame(editor: &mut Editor, frame: &helix_dap::StackFrame) { + let path = if let Some(helix_dap::Source { + path: Some(ref path), + .. + }) = frame.source + { + path.clone() + } else { + return; + }; + + if let Err(e) = editor.open(path, helix_view::editor::Action::Replace) { + editor.set_error(format!("Unable to jump to stack frame: {}", e)); + return; + } + + let (view, doc) = current!(editor); + + let text_end = doc.text().len_chars().saturating_sub(1); + let start = dap_pos_to_pos(doc.text(), frame.line, frame.column).unwrap_or(0); + let end = frame + .end_line + .and_then(|end_line| dap_pos_to_pos(doc.text(), end_line, frame.end_column.unwrap_or(0))) + .unwrap_or(start); + + let selection = Selection::single(start.min(text_end), end.min(text_end)); + doc.set_selection(view.id, selection); + align_view(doc, view, Align::Center); +} + +fn thread_picker( + cx: &mut Context, + callback_fn: impl Fn(&mut Editor, &dap::Thread) + Send + 'static, +) { + let debugger = debugger!(cx.editor); + + let future = debugger.threads(); + dap_callback( + cx.jobs, + future, + move |editor: &mut Editor, + compositor: &mut Compositor, + response: dap::requests::ThreadsResponse| { + let threads = response.threads; + if threads.len() == 1 { + callback_fn(editor, &threads[0]); + return; + } + let debugger = debugger!(editor); + + let thread_states = debugger.thread_states.clone(); + let picker = FilePicker::new( + threads, + move |thread| { + format!( + "{} ({})", + thread.name, + thread_states + .get(&thread.id) + .map(|state| state.as_str()) + .unwrap_or("unknown") + ) + .into() + }, + move |cx, thread, _action| callback_fn(cx.editor, thread), + move |editor, thread| { + let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?; + let frame = frames.get(0)?; + let path = frame.source.as_ref()?.path.clone()?; + let pos = Some(( + frame.line.saturating_sub(1), + frame.end_line.unwrap_or(frame.line).saturating_sub(1), + )); + Some((path, pos)) + }, + ); + compositor.push(Box::new(picker)); + }, + ); +} + +fn get_breakpoint_at_current_line(editor: &mut Editor) -> Option<(usize, Breakpoint)> { + let (view, doc) = current!(editor); + let text = doc.text().slice(..); + + let line = doc.selection(view.id).primary().cursor_line(text); + let path = doc.path()?; + editor.breakpoints.get(path).and_then(|breakpoints| { + let i = breakpoints.iter().position(|b| b.line == line); + i.map(|i| (i, breakpoints[i].clone())) + }) +} + +// -- DAP + +fn dap_callback( + jobs: &mut Jobs, + call: impl Future> + 'static + Send, + callback: F, +) where + T: for<'de> serde::Deserialize<'de> + Send + 'static, + F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, +{ + let callback = Box::pin(async move { + let json = call.await?; + let response = serde_json::from_value(json)?; + let call: Callback = Box::new(move |editor: &mut Editor, compositor: &mut Compositor| { + callback(editor, compositor, response) + }); + Ok(call) + }); + jobs.callback(callback); +} + +pub fn dap_start_impl( + cx: &mut compositor::Context, + name: Option<&str>, + socket: Option, + params: Option>, +) -> Result<(), anyhow::Error> { + let doc = doc!(cx.editor); + + let config = doc + .language_config() + .and_then(|config| config.debugger.as_ref()) + .ok_or(anyhow!("No debug adapter available for language"))?; + + let result = match socket { + Some(socket) => block_on(Client::tcp(socket, 0)), + None => block_on(Client::process( + &config.transport, + &config.command, + config.args.iter().map(|arg| arg.as_str()).collect(), + config.port_arg.as_deref(), + 0, + )), + }; + + let (mut debugger, events) = match result { + Ok(r) => r, + Err(e) => bail!("Failed to start debug session: {}", e), + }; + + let request = debugger.initialize(config.name.clone()); + if let Err(e) = block_on(request) { + bail!("Failed to initialize debug adapter: {}", e); + } + + debugger.quirks = config.quirks.clone(); + + // TODO: avoid refetching all of this... pass a config in + let template = match name { + Some(name) => config.templates.iter().find(|t| t.name == name), + None => config.templates.get(0), + } + .ok_or(anyhow!("No debug config with given name"))?; + + let mut args: HashMap<&str, Value> = HashMap::new(); + + if let Some(params) = params { + for (k, t) in &template.args { + let mut value = t.clone(); + for (i, x) in params.iter().enumerate() { + let mut param = x.to_string(); + if let Some(DebugConfigCompletion::Advanced(cfg)) = template.completion.get(i) { + if matches!(cfg.completion.as_deref(), Some("filename" | "directory")) { + param = std::fs::canonicalize(x) + .ok() + .and_then(|pb| pb.into_os_string().into_string().ok()) + .unwrap_or_else(|| x.to_string()); + } + } + // For param #0 replace {0} in args + let pattern = format!("{{{}}}", i); + value = match value { + // TODO: just use toml::Value -> json::Value + DebugArgumentValue::String(v) => { + DebugArgumentValue::String(v.replace(&pattern, ¶m)) + } + DebugArgumentValue::Array(arr) => DebugArgumentValue::Array( + arr.iter().map(|v| v.replace(&pattern, ¶m)).collect(), + ), + DebugArgumentValue::Boolean(_) => value, + }; + } + + match value { + DebugArgumentValue::String(string) => { + if let Ok(integer) = string.parse::() { + args.insert(k, to_value(integer).unwrap()); + } else { + args.insert(k, to_value(string).unwrap()); + } + } + DebugArgumentValue::Array(arr) => { + args.insert(k, to_value(arr).unwrap()); + } + DebugArgumentValue::Boolean(bool) => { + args.insert(k, to_value(bool).unwrap()); + } + } + } + } + + let args = to_value(args).unwrap(); + + let callback = |_editor: &mut Editor, _compositor: &mut Compositor, _response: Value| { + // if let Err(e) = result { + // editor.set_error(format!("Failed {} target: {}", template.request, e)); + // } + }; + + match &template.request[..] { + "launch" => { + let call = debugger.launch(args); + dap_callback(cx.jobs, call, callback); + } + "attach" => { + let call = debugger.attach(args); + dap_callback(cx.jobs, call, callback); + } + request => bail!("Unsupported request '{}'", request), + }; + + // TODO: either await "initialized" or buffer commands until event is received + cx.editor.debugger = Some(debugger); + let stream = UnboundedReceiverStream::new(events); + cx.editor.debugger_events.push(stream); + Ok(()) +} + +pub fn dap_launch(cx: &mut Context) { + if cx.editor.debugger.is_some() { + cx.editor + .set_error("Debugger is already running".to_string()); + return; + } + + let doc = doc!(cx.editor); + + let config = match doc + .language_config() + .and_then(|config| config.debugger.as_ref()) + { + Some(c) => c, + None => { + cx.editor + .set_error("No debug adapter available for language".to_string()); + return; + } + }; + + let templates = config.templates.clone(); + + cx.push_layer(Box::new(Picker::new( + true, + templates, + |template| template.name.as_str().into(), + |cx, template, _action| { + let completions = template.completion.clone(); + let name = template.name.clone(); + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let prompt = debug_parameter_prompt(completions, name, Vec::new()); + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + }, + ))); // TODO: wrap in popup with fixed size +} + +fn debug_parameter_prompt( + completions: Vec, + config_name: String, + mut params: Vec, +) -> Prompt { + let completion = completions.get(params.len()).unwrap(); + let field_type = if let DebugConfigCompletion::Advanced(cfg) = completion { + cfg.completion.as_deref().unwrap_or("") + } else { + "" + }; + let name = match completion { + DebugConfigCompletion::Advanced(cfg) => cfg.name.as_deref().unwrap_or(field_type), + DebugConfigCompletion::Named(name) => name.as_str(), + }; + let default_val = match completion { + DebugConfigCompletion::Advanced(cfg) => cfg.default.as_deref().unwrap_or(""), + _ => "", + } + .to_owned(); + + let completer = match field_type { + "filename" => ui::completers::filename, + "directory" => ui::completers::directory, + _ => |_input: &str| Vec::new(), + }; + Prompt::new( + format!("{}: ", name).into(), + None, + completer, + move |cx, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } + + let mut value = input.to_owned(); + if value.is_empty() { + value = default_val.clone(); + } + params.push(value); + + if params.len() < completions.len() { + let completions = completions.clone(); + let config_name = config_name.clone(); + let params = params.clone(); + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let prompt = debug_parameter_prompt(completions, config_name, params); + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + } else if let Err(e) = dap_start_impl( + cx, + Some(&config_name), + None, + Some(params.iter().map(|x| x.as_str()).collect()), + ) { + cx.editor.set_error(e.to_string()); + } + }, + ) +} + +pub fn dap_toggle_breakpoint(cx: &mut Context) { + let (view, doc) = current!(cx.editor); + let path = match doc.path() { + Some(path) => path.clone(), + None => { + cx.editor + .set_error("Can't set breakpoint: document has no path".to_string()); + return; + } + }; + let text = doc.text().slice(..); + let line = doc.selection(view.id).primary().cursor_line(text); + dap_toggle_breakpoint_impl(cx, path, line); +} + +pub fn breakpoints_changed( + debugger: &mut dap::Client, + path: PathBuf, + breakpoints: &mut [Breakpoint], +) -> Result<(), anyhow::Error> { + // TODO: handle capabilities correctly again, by filterin breakpoints when emitting + // if breakpoint.condition.is_some() + // && !debugger + // .caps + // .as_ref() + // .unwrap() + // .supports_conditional_breakpoints + // .unwrap_or_default() + // { + // bail!( + // "Can't edit breakpoint: debugger does not support conditional breakpoints" + // ) + // } + // if breakpoint.log_message.is_some() + // && !debugger + // .caps + // .as_ref() + // .unwrap() + // .supports_log_points + // .unwrap_or_default() + // { + // bail!("Can't edit breakpoint: debugger does not support logpoints") + // } + let source_breakpoints = breakpoints + .iter() + .map(|breakpoint| helix_dap::SourceBreakpoint { + line: breakpoint.line + 1, // convert from 0-indexing to 1-indexing (TODO: could set debugger to 0-indexing on init) + ..Default::default() + }) + .collect::>(); + + let request = debugger.set_breakpoints(path, source_breakpoints); + match block_on(request) { + Ok(Some(dap_breakpoints)) => { + for (breakpoint, dap_breakpoint) in breakpoints.iter_mut().zip(dap_breakpoints) { + breakpoint.id = dap_breakpoint.id; + breakpoint.verified = dap_breakpoint.verified; + breakpoint.message = dap_breakpoint.message; + // TODO: handle breakpoint.message + // TODO: verify source matches + breakpoint.line = dap_breakpoint.line.unwrap_or(0).saturating_sub(1); // convert to 0-indexing + // TODO: no unwrap + breakpoint.column = dap_breakpoint.column; + // TODO: verify end_linef/col instruction reference, offset + } + } + Err(e) => anyhow::bail!("Failed to set breakpoints: {}", e), + _ => {} + }; + Ok(()) +} + +pub fn dap_toggle_breakpoint_impl(cx: &mut Context, path: PathBuf, line: usize) { + // TODO: need to map breakpoints over edits and update them? + // we shouldn't really allow editing while debug is running though + + let breakpoints = cx.editor.breakpoints.entry(path.clone()).or_default(); + // TODO: always keep breakpoints sorted and use binary search to determine insertion point + if let Some(pos) = breakpoints + .iter() + .position(|breakpoint| breakpoint.line == line) + { + breakpoints.remove(pos); + } else { + breakpoints.push(Breakpoint { + line, + ..Default::default() + }); + } + + let debugger = debugger!(cx.editor); + + if let Err(e) = breakpoints_changed(debugger, path, breakpoints) { + cx.editor + .set_error(format!("Failed to set breakpoints: {}", e)); + } +} + +pub fn dap_continue(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.continue_thread(thread_id); + if let Err(e) = block_on(request) { + cx.editor.set_error(format!("Failed to continue: {}", e)); + return; + } + resume_application(debugger); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread.".into()); + } +} + +pub fn dap_pause(cx: &mut Context) { + thread_picker(cx, |editor, thread| { + let debugger = debugger!(editor); + let request = debugger.pause(thread.id); + // NOTE: we don't need to set active thread id here because DAP will emit a "stopped" event + if let Err(e) = block_on(request) { + editor.set_error(format!("Failed to pause: {}", e)); + } + }) +} + +pub fn dap_step_in(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.step_in(thread_id); + if let Err(e) = block_on(request) { + cx.editor.set_error(format!("Failed to continue: {}", e)); + return; + } + resume_application(debugger); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread.".into()); + } +} + +pub fn dap_step_out(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.step_out(thread_id); + if let Err(e) = block_on(request) { + cx.editor.set_error(format!("Failed to continue: {}", e)); + return; + } + resume_application(debugger); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread.".into()); + } +} + +pub fn dap_next(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.next(thread_id); + if let Err(e) = block_on(request) { + cx.editor.set_error(format!("Failed to continue: {}", e)); + return; + } + resume_application(debugger); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread.".into()); + } +} + +pub fn dap_variables(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if debugger.thread_id.is_none() { + cx.editor + .set_status("Cannot access variables while target is running".to_owned()); + return; + } + let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { + (Some(frame), Some(thread_id)) => (frame, thread_id), + _ => { + cx.editor + .set_status("Cannot find current stack frame to access variables".to_owned()); + return; + } + }; + + let frame_id = debugger.stack_frames[&thread_id][frame].id; + let scopes = match block_on(debugger.scopes(frame_id)) { + Ok(s) => s, + Err(e) => { + cx.editor.set_error(format!("Failed to get scopes: {}", e)); + return; + } + }; + let mut variables = Vec::new(); + + // TODO: group by scope + // TODO: ui::Text to use tui::text + styled builder + + // let contents = tui::text::Text::new(); + + let theme = &cx.editor.theme; + let scope_style = theme.get("ui.linenr.selected"); + let type_style = theme.get("ui.text"); + let text_style = theme.get("ui.text.focus"); + + for scope in scopes.iter() { + // use helix_view::graphics::Style; + use tui::text::{Span, Spans}; + let response = block_on(debugger.variables(scope.variables_reference)); + + variables.push(Spans::from(Span::styled( + format!("▸ {}", scope.name), + scope_style, + ))); + + if let Ok(vars) = response { + variables.reserve(vars.len()); + for var in vars { + let mut spans = Vec::with_capacity(5); + + spans.push(Span::styled(var.name.to_owned(), text_style)); + if let Some(ty) = var.ty { + spans.push(Span::raw(": ")); + spans.push(Span::styled(ty.to_owned(), type_style)); + } + spans.push(Span::raw(" = ")); + spans.push(Span::styled(var.value.to_owned(), text_style)); + variables.push(Spans::from(spans)); + } + } + } + + let contents = Text::from(tui::text::Text::from(variables)); + let popup = Popup::new(contents); + cx.push_layer(Box::new(popup)); +} + +pub fn dap_terminate(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let request = debugger.disconnect(); + if let Err(e) = block_on(request) { + cx.editor.set_error(format!("Failed to disconnect: {}", e)); + return; + } + cx.editor.debugger = None; +} + +pub fn dap_enable_exceptions(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let filters = match &debugger.capabilities().exception_breakpoint_filters { + Some(filters) => filters.iter().map(|f| f.filter.clone()).collect(), + None => return, + }; + + if let Err(e) = block_on(debugger.set_exception_breakpoints(filters)) { + cx.editor + .set_error(format!("Failed to set up exception breakpoints: {}", e)); + } +} + +pub fn dap_disable_exceptions(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Err(e) = block_on(debugger.set_exception_breakpoints(Vec::new())) { + cx.editor + .set_error(format!("Failed to set up exception breakpoints: {}", e)); + } +} + +// TODO: both edit condition and edit log need to be stable: we might get new breakpoints from the debugger which can change offsets +pub fn dap_edit_condition(cx: &mut Context) { + if let Some((pos, breakpoint)) = get_breakpoint_at_current_line(cx.editor) { + let path = match doc!(cx.editor).path() { + Some(path) => path.clone(), + None => return, + }; + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let mut prompt = Prompt::new( + "condition:".into(), + None, + |_input: &str| Vec::new(), + move |cx, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } + + let breakpoints = &mut cx.editor.breakpoints.get_mut(&path).unwrap(); + breakpoints[pos].condition = match input { + "" => None, + input => Some(input.to_owned()), + }; + + let debugger = debugger!(cx.editor); + + if let Err(e) = breakpoints_changed(debugger, path.clone(), breakpoints) + { + cx.editor + .set_error(format!("Failed to set breakpoints: {}", e)); + } + }, + ); + if let Some(condition) = breakpoint.condition { + prompt.insert_str(&condition) + } + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + } +} + +pub fn dap_edit_log(cx: &mut Context) { + if let Some((pos, breakpoint)) = get_breakpoint_at_current_line(cx.editor) { + let path = match doc!(cx.editor).path() { + Some(path) => path.clone(), + None => return, + }; + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let mut prompt = Prompt::new( + "log-message:".into(), + None, + |_input: &str| Vec::new(), + move |cx, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } + + let breakpoints = &mut cx.editor.breakpoints.get_mut(&path).unwrap(); + breakpoints[pos].log_message = match input { + "" => None, + input => Some(input.to_owned()), + }; + + let debugger = debugger!(cx.editor); + if let Err(e) = breakpoints_changed(debugger, path.clone(), breakpoints) + { + cx.editor + .set_error(format!("Failed to set breakpoints: {}", e)); + } + }, + ); + if let Some(log_message) = breakpoint.log_message { + prompt.insert_str(&log_message); + } + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + } +} + +pub fn dap_switch_thread(cx: &mut Context) { + thread_picker(cx, |editor, thread| { + block_on(select_thread_id(editor, thread.id, true)); + }) +} +pub fn dap_switch_stack_frame(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let thread_id = match debugger.thread_id { + Some(thread_id) => thread_id, + None => { + cx.editor + .set_error("No thread is currently active".to_owned()); + return; + } + }; + + let frames = debugger.stack_frames[&thread_id].clone(); + + let picker = FilePicker::new( + frames, + |frame| frame.name.clone().into(), // TODO: include thread_states in the label + move |cx, frame, _action| { + let debugger = debugger!(cx.editor); + // TODO: this should be simpler to find + let pos = debugger.stack_frames[&thread_id] + .iter() + .position(|f| f.id == frame.id); + debugger.active_frame = pos; + + let frame = debugger.stack_frames[&thread_id] + .get(pos.unwrap_or(0)) + .cloned(); + if let Some(frame) = &frame { + jump_to_stack_frame(cx.editor, frame); + } + }, + move |_editor, frame| { + frame + .source + .as_ref() + .and_then(|source| source.path.clone()) + .map(|path| { + ( + path, + Some(( + frame.line.saturating_sub(1), + frame.end_line.unwrap_or(frame.line).saturating_sub(1), + )), + ) + }) + }, + ); + cx.push_layer(Box::new(picker)) +} diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs index 06639dcd929e..b317242da814 100644 --- a/helix-term/src/keymap.rs +++ b/helix-term/src/keymap.rs @@ -649,6 +649,26 @@ impl Default for Keymaps { "S" => workspace_symbol_picker, "a" => code_action, "'" => last_picker, + "d" => { "Debug" sticky=true + "l" => dap_launch, + "b" => dap_toggle_breakpoint, + "c" => dap_continue, + "h" => dap_pause, + "i" => dap_step_in, + "o" => dap_step_out, + "n" => dap_next, + "v" => dap_variables, + "t" => dap_terminate, + "C-c" => dap_edit_condition, + "C-l" => dap_edit_log, + "s" => { "Switch" + "t" => dap_switch_thread, + "f" => dap_switch_stack_frame, + // sl, sb + }, + "e" => dap_enable_exceptions, + "E" => dap_disable_exceptions, + }, "w" => { "Window" "C-w" | "w" => rotate_view, "C-s" | "s" => hsplit, diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 83be816f9954..ac11d298b806 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -59,22 +59,55 @@ impl EditorView { &mut self.spinners } - #[allow(clippy::too_many_arguments)] pub fn render_view( &self, + editor: &Editor, doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, - theme: &Theme, is_focused: bool, - loader: &syntax::Loader, - config: &helix_view::editor::Config, ) { let inner = view.inner_area(); let area = view.area; + let theme = &editor.theme; + + // DAP: Highlight current stack frame position + let stack_frame = editor.debugger.as_ref().and_then(|debugger| { + if let (Some(frame), Some(thread_id)) = (debugger.active_frame, debugger.thread_id) { + debugger + .stack_frames + .get(&thread_id) + .and_then(|bt| bt.get(frame)) + } else { + None + } + }); + if let Some(frame) = stack_frame { + if doc.path().is_some() + && frame + .source + .as_ref() + .and_then(|source| source.path.as_ref()) + == doc.path() + { + let line = frame.line - 1; // convert to 0-indexing + if line >= view.offset.row && line < view.offset.row + area.height as usize { + surface.set_style( + Rect::new( + area.x, + area.y + (line - view.offset.row) as u16, + area.width, + 1, + ), + theme.get("ui.highlight"), + ); + } + } + } - let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, loader); + let highlights = + Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, &editor.syn_loader); let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme)); let highlights: Box> = if is_focused { Box::new(syntax::merge( @@ -86,7 +119,7 @@ impl EditorView { }; Self::render_text_highlights(doc, view.offset, inner, surface, theme, highlights); - Self::render_gutter(doc, view, view.area, surface, theme, is_focused, config); + Self::render_gutter(editor, doc, view, view.area, surface, theme, is_focused); if is_focused { Self::render_focused_view_elements(view, doc, inner, theme, surface); @@ -117,7 +150,6 @@ impl EditorView { /// Get syntax highlights for a document in a view represented by the first line /// and column (`offset`) and the last line. This is done instead of using a view /// directly to enable rendering syntax highlighted docs anywhere (eg. picker preview) - #[allow(clippy::too_many_arguments)] pub fn doc_syntax_highlights<'doc>( doc: &'doc Document, offset: Position, @@ -398,15 +430,14 @@ impl EditorView { } } - #[allow(clippy::too_many_arguments)] pub fn render_gutter( + editor: &Editor, doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, theme: &Theme, is_focused: bool, - config: &helix_view::editor::Config, ) { let text = doc.text().slice(..); let last_line = view.last_line(doc); @@ -426,7 +457,7 @@ impl EditorView { let mut text = String::with_capacity(8); for (constructor, width) in view.gutters() { - let gutter = constructor(doc, view, theme, config, is_focused, *width); + let gutter = constructor(editor, doc, view, theme, is_focused, *width); text.reserve(*width); // ensure there's enough space for the gutter for (i, line) in (view.offset.row..(last_line + 1)).enumerate() { let selected = cursors.contains(&line); @@ -442,6 +473,7 @@ impl EditorView { } text.clear(); } + offset += *width as u16; } } @@ -501,7 +533,6 @@ impl EditorView { ); } - #[allow(clippy::too_many_arguments)] pub fn render_statusline( &self, doc: &Document, @@ -653,7 +684,6 @@ impl EditorView { cxt: &mut commands::Context, event: KeyEvent, ) -> Option { - self.autoinfo = None; let key_result = self.keymaps.get_mut(&mode).unwrap().get(event); self.autoinfo = key_result.sticky.map(|node| node.infobox()); @@ -802,6 +832,31 @@ impl EditorView { return EventResult::Consumed(None); } + let result = editor.tree.views().find_map(|(view, _focus)| { + view.gutter_coords_at_screen_coords(row, column) + .map(|coords| (coords, view.id)) + }); + + if let Some((coords, view_id)) = result { + editor.tree.focus = view_id; + + let view = editor.tree.get(view_id); + let doc = editor.documents.get_mut(&view.doc).unwrap(); + + let path = match doc.path() { + Some(path) => path.clone(), + None => { + return EventResult::Ignored; + } + }; + + let line = coords.row + view.offset.row; + if line < doc.text().len_lines() { + commands::dap_toggle_breakpoint_impl(cxt, path, line); + return EventResult::Consumed(None); + } + } + EventResult::Ignored } @@ -877,6 +932,38 @@ impl EditorView { EventResult::Consumed(None) } + MouseEvent { + kind: MouseEventKind::Up(MouseButton::Right), + row, + column, + modifiers, + .. + } => { + let result = cxt.editor.tree.views().find_map(|(view, _focus)| { + view.gutter_coords_at_screen_coords(row, column) + .map(|coords| (coords, view.id)) + }); + + if let Some((coords, view_id)) = result { + cxt.editor.tree.focus = view_id; + + let view = cxt.editor.tree.get(view_id); + let doc = cxt.editor.documents.get_mut(&view.doc).unwrap(); + let line = coords.row + view.offset.row; + if let Ok(pos) = doc.text().try_line_to_char(line) { + doc.set_selection(view_id, Selection::point(pos)); + if modifiers == crossterm::event::KeyModifiers::ALT { + commands::Command::dap_edit_log.execute(cxt); + } else { + commands::Command::dap_edit_condition.execute(cxt); + } + + return EventResult::Consumed(None); + } + } + EventResult::Ignored + } + MouseEvent { kind: MouseEventKind::Up(MouseButton::Middle), row, @@ -1043,17 +1130,7 @@ impl Component for EditorView { for (view, is_focused) in cx.editor.tree.views() { let doc = cx.editor.document(view.doc).unwrap(); - let loader = &cx.editor.syn_loader; - self.render_view( - doc, - view, - area, - surface, - &cx.editor.theme, - is_focused, - loader, - &cx.editor.config, - ); + self.render_view(cx.editor, doc, view, area, surface, is_focused); } if cx.editor.config.auto_info { diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index cdf423110704..3c203326cc01 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -21,7 +21,7 @@ pub use text::Text; use helix_core::regex::Regex; use helix_core::regex::RegexBuilder; -use helix_view::{Document, Editor, View}; +use helix_view::{Document, View}; use std::path::PathBuf; @@ -161,8 +161,8 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi .unwrap() .into() }, - move |editor: &mut Editor, path: &PathBuf, action| { - editor + move |cx, path: &PathBuf, action| { + cx.editor .open(path.into(), action) .expect("editor.open failed"); }, diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index 6b1c58321ab7..eaca470e9706 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -86,7 +86,7 @@ impl FilePicker { pub fn new( options: Vec, format_fn: impl Fn(&T) -> Cow + 'static, - callback_fn: impl Fn(&mut Editor, &T, Action) + 'static, + callback_fn: impl Fn(&mut Context, &T, Action) + 'static, preview_fn: impl Fn(&Editor, &T) -> Option + 'static, ) -> Self { Self { @@ -284,7 +284,7 @@ pub struct Picker { pub truncate_start: bool, format_fn: Box Cow>, - callback_fn: Box, + callback_fn: Box, } impl Picker { @@ -292,7 +292,7 @@ impl Picker { render_centered: bool, options: Vec, format_fn: impl Fn(&T) -> Cow + 'static, - callback_fn: impl Fn(&mut Editor, &T, Action) + 'static, + callback_fn: impl Fn(&mut Context, &T, Action) + 'static, ) -> Self { let prompt = Prompt::new( "".into(), @@ -421,19 +421,19 @@ impl Component for Picker { } key!(Enter) => { if let Some(option) = self.selection() { - (self.callback_fn)(&mut cx.editor, option, Action::Replace); + (self.callback_fn)(cx, option, Action::Replace); } return close_fn; } ctrl!('s') => { if let Some(option) = self.selection() { - (self.callback_fn)(&mut cx.editor, option, Action::HorizontalSplit); + (self.callback_fn)(cx, option, Action::HorizontalSplit); } return close_fn; } ctrl!('v') => { if let Some(option) = self.selection() { - (self.callback_fn)(&mut cx.editor, option, Action::VerticalSplit); + (self.callback_fn)(cx, option, Action::VerticalSplit); } return close_fn; } diff --git a/helix-term/src/ui/text.rs b/helix-term/src/ui/text.rs index 4641fae1fde2..caece049c96f 100644 --- a/helix-term/src/ui/text.rs +++ b/helix-term/src/ui/text.rs @@ -4,13 +4,23 @@ use tui::buffer::Buffer as Surface; use helix_view::graphics::Rect; pub struct Text { - contents: String, + contents: tui::text::Text<'static>, size: (u16, u16), viewport: (u16, u16), } impl Text { pub fn new(contents: String) -> Self { + Self { + contents: tui::text::Text::from(contents), + size: (0, 0), + viewport: (0, 0), + } + } +} + +impl From> for Text { + fn from(contents: tui::text::Text<'static>) -> Self { Self { contents, size: (0, 0), @@ -18,12 +28,12 @@ impl Text { } } } + impl Component for Text { fn render(&mut self, area: Rect, surface: &mut Surface, _cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let contents = tui::text::Text::from(self.contents.clone()); - let par = Paragraph::new(contents).wrap(Wrap { trim: false }); + let par = Paragraph::new(self.contents.clone()).wrap(Wrap { trim: false }); // .scroll(x, y) offsets par.render(area, surface); @@ -31,9 +41,8 @@ impl Component for Text { fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { if viewport != self.viewport { - let contents = tui::text::Text::from(self.contents.clone()); - let width = std::cmp::min(contents.width() as u16, viewport.0); - let height = std::cmp::min(contents.height() as u16, viewport.1); + let width = std::cmp::min(self.contents.width() as u16, viewport.0); + let height = std::cmp::min(self.contents.height() as u16, viewport.1); self.size = (width, height); self.viewport = viewport; } diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 34f55eb60e58..ffe6a111c9a1 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -18,6 +18,7 @@ bitflags = "1.3" anyhow = "1" helix-core = { version = "0.5", path = "../helix-core" } helix-lsp = { version = "0.5", path = "../helix-lsp"} +helix-dap = { version = "0.5", path = "../helix-dap"} crossterm = { version = "0.22", optional = true } # Conversion traits @@ -25,6 +26,7 @@ once_cell = "1.8" url = "2" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } +tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } slotmap = "1" diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index d5913a51dd32..c7b3baefbc00 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -8,8 +8,11 @@ use crate::{ }; use futures_util::future; +use futures_util::stream::select_all::SelectAll; +use tokio_stream::wrappers::UnboundedReceiverStream; + use std::{ - collections::BTreeMap, + collections::{BTreeMap, HashMap}, io::stdin, num::NonZeroUsize, path::{Path, PathBuf}, @@ -25,6 +28,7 @@ pub use helix_core::diagnostic::Severity; pub use helix_core::register::Registers; use helix_core::syntax; use helix_core::{Position, Selection}; +use helix_dap as dap; use serde::Deserialize; @@ -152,6 +156,19 @@ impl std::fmt::Debug for Motion { } } +#[derive(Debug, Clone, Default)] +pub struct Breakpoint { + pub id: Option, + pub verified: bool, + pub message: Option, + + pub line: usize, + pub column: Option, + pub condition: Option, + pub hit_condition: Option, + pub log_message: Option, +} + #[derive(Debug)] pub struct Editor { pub tree: Tree, @@ -162,6 +179,11 @@ pub struct Editor { pub registers: Registers, pub theme: Theme, pub language_servers: helix_lsp::Registry, + + pub debugger: Option, + pub debugger_events: SelectAll>, + pub breakpoints: HashMap>, + pub clipboard_provider: Box, pub syn_loader: Arc, @@ -205,6 +227,9 @@ impl Editor { selected_register: None, theme: theme_loader.default(), language_servers, + debugger: None, + debugger_events: SelectAll::new(), + breakpoints: HashMap::new(), syn_loader, theme_loader, registers: Registers::default(), diff --git a/helix-view/src/gutter.rs b/helix-view/src/gutter.rs index 86773c1db217..e156b9e55092 100644 --- a/helix-view/src/gutter.rs +++ b/helix-view/src/gutter.rs @@ -1,16 +1,19 @@ use std::fmt::Write; -use crate::{editor::Config, graphics::Style, Document, Theme, View}; +use crate::{ + graphics::{Color, Modifier, Style}, + Document, Editor, Theme, View, +}; pub type GutterFn<'doc> = Box Option