diff --git a/.zed/tasks.json b/.zed/tasks.json index 463f529..0f630b0 100644 --- a/.zed/tasks.json +++ b/.zed/tasks.json @@ -11,23 +11,14 @@ // "NEXTEST_SUCCESS_OUTPUT": "immediate" }, }, - { - "label": "cargo nextest tests::$ZED_CUSTOM_RUST_TEST_NAME", - "command": "cargo nextest run tests::$ZED_CUSTOM_RUST_TEST_NAME", - "tags": ["rust-test"], - "env": { - // "NEXTEST_FAILURE_OUTPUT": "immediate", - // "NEXTEST_SUCCESS_OUTPUT": "immediate" - }, - }, { "label": "cargo nextest $ZED_STEM (package: $ZED_CUSTOM_RUST_PACKAGE)", "command": "cargo nextest run -p $ZED_CUSTOM_RUST_PACKAGE $ZED_STEM::$ZED_SYMBOL", "tags": ["rust-mod-test"], }, { - "label": "cargo nextest tests", - "command": "cargo nextest run tests", + "label": "cargo nextest tests (crate root)", + "command": "cargo nextest run -E 'test(/^tests/)'", "tags": ["rust-mod-test"], }, { diff --git a/Cargo.lock b/Cargo.lock index dcc0f19..e05cc30 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2061,6 +2061,12 @@ dependencies = [ "windows-link", ] +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + [[package]] name = "paths" version = "0.1.0" @@ -4157,13 +4163,19 @@ name = "zed-settings-sync-lsp" version = "0.0.1-alpha.5" dependencies = [ "anyhow", + "assert_fs", "common", + "ctor", "debug-ignore", "jsonc-parser", + "mockall", + "mockall_double", "notify", "octocrab", + "paste", "paths", "serde", + "test_support", "tokio", "tower-lsp", "tracing", diff --git a/Cargo.toml b/Cargo.toml index 5a793b0..d681748 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ mockall = { version = "0.14.0" } assert_fs = "1.1.3" ctor = { version = "0.6.3", default-features = false, features = ["proc_macro"] } paths = { git = "https://github.com/zed-industries/zed.git" } +mockall_double = "0.3.1" [dependencies] zed_extension_api = { workspace = true } diff --git a/ROADMAP.md b/ROADMAP.md index fa1c9cb..53dafa0 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -9,17 +9,25 @@ - [x] Write a test a script to locally build the LSP binary and copy it to the Zed's folder for the extension (see Zed Discord Presence LSP server info for the full path, also check out the Zed sources, namely paths.rs) - [x] Forbid the usage of unwrap and expect for Option, Result - [x] Use serde_json from zed_extension_api, not directly +- [x] To support multiple sync providers, implement a Client trait and use it for all sync operations. Move it to the "common" crate. - [~] **Add unit and integration tests** - [ ] Test all error-returning code paths and ensure that all error conditions are either properly logged and/or reported back to Zed in form or a JSON-RPC error response - [ ] Revamp README before publishing the extension to make it easier to consume and start using the extension immediately +- [ ] Add secrecy crate and use it for all usages of the Github token + +## LSP server + - [ ] Manually save a settings file on its open (before adding to the watch list) to handle the case when the LSP server is restarted after the initialization_options are changes in settings.json file. - [ ] Ensure that restarting or shutting down the LSP server doesn't prevent the last coming updates from getting synced; otherwise, mitigate that +- [ ] Report a sync error in a visible way (crash the server? know how to report an LSP error inside Zed?) +- [ ] Create a true integration test when a server is spawned, it's fed with JSON-RPC messages, and Github API URL is mocked via env var to point to a local mock server as another process - [ ] After implementing naive changes persistence (sync files as FS events come), seek the ways to improve it (e.g. queuing events) – [ ] (experimental) Rewrite the LSP server to use structured async concurrency, thread-per-code async runtime, and get rid of Arc's around every data structure - [ ] Add secrecy crate and use it for all usages of the Github token - [ ] To support multiple sync providers, implement a SyncClient trait and use it for all sync operations. Move it to the "common" crate. - [ ] Report a sync error in a visible way (crash the server? know how to report an LSP error inside Zed?) - [ ] Backup installed themes to the gist automatically +- [ ] Use type-state pattern to guarantee that one cannot watch/unwatch a path using non-started WatcherSet or PathStore ### CLI tool @@ -32,9 +40,9 @@ ## CI - [ ] Enable ["cancel-in-progress"](https://share.google/Vk4zJKCbkerc5BAfC) for CI builds -- [ ] Add matrix to compile for Windows on ARM64 -- [ ] Speed up the build if possible (caching, Docker images, etc.) -- [ ] Extract "compile" as a separate local Github action +- [x] Add matrix to compile for Windows on ARM64 +- [x] Speed up the build if possible (caching, Docker images, etc.) +- [ ] ~~Extract "compile" as a separate local Github action~~ No need for that, because "compile" is used only for release. - [ ] Optimize binaries size ## Documentation diff --git a/lsp/Cargo.toml b/lsp/Cargo.toml index 490f6dc..9e0d479 100644 --- a/lsp/Cargo.toml +++ b/lsp/Cargo.toml @@ -13,7 +13,7 @@ jsonc-parser = { workspace = true } octocrab = { workspace = true } zed_extension_api = { workspace = true } serde = { workspace = true } -tokio = { workspace = true, features = ["io-std"] } +tokio = { workspace = true, features = ["io-std", "fs"] } tracing = { workspace = true } paths = { workspace = true } common = { path = "../common" } @@ -21,3 +21,12 @@ debug-ignore = "1.0.5" notify = "8.2.0" tower-lsp = "0.20.0" # TODO: upgrade tracing-subscriber = { version = "0.3.20", features = ["env-filter", "json", "chrono"] } +mockall_double = { workspace = true } + +[dev-dependencies] +common = { path = "../common", features = ["test-support"] } +test_support = { path = "../test_support" } +mockall = { workspace = true } +assert_fs = { workspace = true } +ctor = { workspace = true } +paste = "1.0.15" diff --git a/lsp/src/app_state.rs b/lsp/src/app_state.rs index 9c36dd9..3045d5e 100644 --- a/lsp/src/app_state.rs +++ b/lsp/src/app_state.rs @@ -3,18 +3,18 @@ use std::sync::Arc; use anyhow::Result; use common::sync::GithubClient; -use crate::watching::Store; +use crate::watching::PathStore; #[derive(Debug)] pub struct AppState { - pub watcher_store: Arc, + pub watched_paths: PathStore, } impl AppState { pub fn new(gist_id: String, github_token: String) -> Result { let sync_client = Arc::new(GithubClient::new(gist_id, github_token)?); - let watcher_store = Arc::new(Store::new(sync_client)?); + let watched_paths = PathStore::new(sync_client)?; - Ok(Self { watcher_store }) + Ok(Self { watched_paths }) } } diff --git a/lsp/src/backend.rs b/lsp/src/backend.rs index c4b6c54..595f869 100644 --- a/lsp/src/backend.rs +++ b/lsp/src/backend.rs @@ -1,5 +1,5 @@ use std::path::PathBuf; -use std::sync::{Arc, OnceLock}; +use std::sync::{Mutex, OnceLock}; use anyhow::Result; use common::config::Config; @@ -23,45 +23,47 @@ const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); #[derive(Debug)] pub struct Backend { - // Arc is needed for cross-thread ownership (Tokio), OnceLock - for delayed initialization; - // both of them - because of how the LanguageServer trait is defined by tower-lsp: - // its methods don't mutate self. - app_state: Arc>, + // OnceLock is needed for cross-thread sync (Tokio) and for delayed initialization. + // Mutex is needed for interior mutability over a shared reference + // because LanguageServer trait methods accept &self (not &mut self). + app_state: OnceLock>, } impl Backend { pub fn new(_client: Client) -> Self { Self { - app_state: Arc::new(OnceLock::new()), + app_state: OnceLock::new(), } } - async fn watch_path(&self, path: PathBuf) -> Result<()> { + fn watch_path(&self, path: PathBuf) -> Result<()> { let info_msg = format!("Watching path: {}", path.display()); #[allow(clippy::expect_used)] self.app_state .get() - .expect("App state must be already initialized") - .watcher_store - .watch(path) - .await?; + .expect("App state must already be initialized") + .lock() + .expect("Watched paths store mutex is poisoned") + .watched_paths + .watch(path)?; info!("{}", info_msg); Ok(()) } - async fn unwatch_path(&self, path: PathBuf) -> Result<()> { + fn unwatch_path(&self, path: &PathBuf) -> Result<()> { let info_msg = format!("Unwatching path: {}", path.display()); #[allow(clippy::expect_used)] self.app_state .get() .expect("App state must be already initialized") - .watcher_store - .unwatch(path) - .await?; + .lock() + .expect("Watched paths store mutex is poisoned") + .watched_paths + .unwatch(path)?; info!("{}", info_msg); @@ -91,16 +93,17 @@ impl LanguageServer for Backend { #[allow(clippy::expect_used)] self.app_state - .set(app_state) + .set(Mutex::new(app_state)) .expect("AppState should not yet be initialized"); #[allow(clippy::expect_used)] self.app_state .get() .expect("App state should have been already initialized") - .watcher_store - .start_watcher() - .await; + .lock() + .expect("Watched paths store mutex is poisoned") + .watched_paths + .start_watcher(); Ok(InitializeResult { server_info: Some(ServerInfo { @@ -141,8 +144,9 @@ impl LanguageServer for Backend { match ZedConfigFilePath::from_file_uri(¶ms.text_document.uri) { Ok(path) => { let path_to_watch = path.to_watched_path_buf(); - // TODO: expose sync_client in app state and sync file explicitly after opening (quick'n'dirty way to fight losing last changes on LSP restart on settings update) - if let Err(err) = self.watch_path(path_to_watch).await { + // TODO: expose sync_client in app state and sync file explicitly after opening + // (quick'n'dirty way to fight losing last changes on LSP restart on settings update) + if let Err(err) = self.watch_path(path_to_watch) { error!("Failed to start watching path: {}", err); } } @@ -165,7 +169,7 @@ impl LanguageServer for Backend { match ZedConfigFilePath::from_file_uri(¶ms.text_document.uri) { Ok(path) => { let path_to_watch = path.to_watched_path_buf(); - if let Err(err) = self.unwatch_path(path_to_watch).await { + if let Err(err) = self.unwatch_path(&path_to_watch) { error!("Failed to stop watching path: {}", err); } } diff --git a/lsp/src/main.rs b/lsp/src/main.rs index 2b2f859..9bd5804 100644 --- a/lsp/src/main.rs +++ b/lsp/src/main.rs @@ -8,6 +8,9 @@ mod backend; mod logger; mod watching; +#[cfg(test)] +test_support::nextest_only!(); + #[tokio::main] async fn main() { logger::init_logger(); diff --git a/lsp/src/watching.rs b/lsp/src/watching.rs index facd183..3291f61 100644 --- a/lsp/src/watching.rs +++ b/lsp/src/watching.rs @@ -1,7 +1,9 @@ +mod path_store; mod path_watcher; -mod store; +mod watched_set; mod zed_config; +pub use path_store::*; pub use path_watcher::*; -pub use store::*; +pub use watched_set::*; pub use zed_config::*; diff --git a/lsp/src/watching/path_store.rs b/lsp/src/watching/path_store.rs new file mode 100644 index 0000000..ce8ff42 --- /dev/null +++ b/lsp/src/watching/path_store.rs @@ -0,0 +1,253 @@ +use std::{path::PathBuf, pin::Pin, sync::Arc}; + +use anyhow::Result; +use anyhow::{Context, anyhow}; +use common::sync::{Client, LocalFileData}; +use mockall_double::double; +use notify::{Event, EventKind, event::ModifyKind}; +use tokio::fs; +use tracing::{debug, error}; + +#[double] +use crate::watching::WatchedSet; + +#[derive(Debug)] +pub struct PathStore { + watched_set: WatchedSet, +} + +impl PathStore { + pub fn new(client: Arc) -> Result { + let event_handler = Box::new(move |event| { + let client_clone = Arc::clone(&client); + Box::pin(async move { + match process_event(&event).await { + Ok(data) => { + let Some(data) = data else { + return; + }; + + if let Err(err) = client_clone.sync_file(data).await { + error!("{}", err); + } + } + Err(err) => error!("Could not process file event: {err}"), + } + }) as Pin + Send>> + }); + + Ok(Self { + watched_set: WatchedSet::new(event_handler)?, + }) + } + + pub fn start_watcher(&mut self) { + self.watched_set.start_watcher(); + } + + // no need to stop watcher or clear the store because it will be stopped (when dropped) on the store drop + + pub fn watch(&mut self, file_path: PathBuf) -> anyhow::Result<()> { + self.watched_set.watch(file_path) + } + + pub fn unwatch(&mut self, file_path: &PathBuf) -> anyhow::Result<()> { + self.watched_set.unwatch(file_path) + } +} + +async fn process_event(event: &Event) -> Result> { + debug!("Processing file watcher event: {event:?}"); + + let EventKind::Modify(ModifyKind::Data(_)) = event.kind else { + debug!("Got not a file data modify event, skipping: {event:?}"); + return Ok(None); + }; + + let path = event.paths.first().cloned().ok_or(anyhow!( + "event did not provide the path of the modified file" + ))?; + + let body = fs::read_to_string(&path) + .await + .with_context(|| format!("Could not read the modified file: {}", path.display()))?; + + Ok(Some(LocalFileData::new(path, body)?)) +} + +#[cfg(test)] +mod tests { + #![allow(clippy::unwrap_used)] + + use assert_fs::{TempDir, prelude::*}; + use common::sync::MockGithubClient; + use mockall::{Sequence, predicate}; + use paste::paste; + + use super::*; + use crate::watching::MockWatchedSet; + + #[tokio::test] + async fn test_successful_creation() { + let ctx = MockWatchedSet::new_context(); + ctx.expect().returning(|_| Ok(MockWatchedSet::default())); + + assert!(PathStore::new(Arc::new(MockGithubClient::default())).is_ok()); + } + + #[tokio::test] + async fn test_unsuccessful_creation_when_watched_set_creation_failed() { + let ctx = MockWatchedSet::new_context(); + ctx.expect() + .returning(|_| Err(anyhow!("Failed to create watched set"))); + + assert!(PathStore::new(Arc::new(MockGithubClient::default())).is_err()); + } + + macro_rules! setup_watched_set_mock { + ($method:ident, $path:expr) => { + paste! { + let ctx = MockWatchedSet::new_context(); + ctx.expect().returning(move |_| { + let mut seq = Sequence::new(); + let mut mock_watched_set = MockWatchedSet::default(); + mock_watched_set + .expect_start_watcher() + .in_sequence(&mut seq) + .returning(|| ()); + mock_watched_set + .[]() + .with(predicate::eq($path)) + .in_sequence(&mut seq) + .returning(|_| Ok(())); + + Ok(mock_watched_set) + }); + } + }; + ($method:ident, $path:expr, $err_msg:expr) => { + paste! { + let ctx = MockWatchedSet::new_context(); + ctx.expect().returning(move |_| { + let mut seq = Sequence::new(); + let mut mock_watched_set = MockWatchedSet::default(); + mock_watched_set + .expect_start_watcher() + .in_sequence(&mut seq) + .returning(|| ()); + mock_watched_set + .[]() + .with(predicate::eq($path)) + .in_sequence(&mut seq) + .returning(|_| Err(anyhow!($err_msg))); + + Ok(mock_watched_set) + }); + } + }; + } + + #[tokio::test] + async fn test_successful_watch_path() -> Result<()> { + let dir = TempDir::new()?; + dir.child("foobar").touch()?; + let path = dir.path().to_path_buf(); + let path_clone = path.clone(); + + setup_watched_set_mock!(watch, path.clone()); + + let mut store = PathStore::new(Arc::new(MockGithubClient::default()))?; + store.start_watcher(); + store.watch(path_clone)?; + + Ok(()) + } + + #[tokio::test] + async fn test_unsuccessful_watch_path() -> Result<()> { + let dir = TempDir::new()?; + dir.child("foobar").touch()?; + let path = dir.path().to_path_buf(); + let path_clone = path.clone(); + + setup_watched_set_mock!(watch, path.clone(), "Path already being watched"); + + let mut store = PathStore::new(Arc::new(MockGithubClient::default()))?; + store.start_watcher(); + + assert_eq!( + store.watch(path_clone).unwrap_err().to_string(), + "Path already being watched" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_successful_unwatch_path() -> Result<()> { + let dir = TempDir::new()?; + dir.child("foobar").touch()?; + let path = dir.path().to_path_buf(); + let path_clone = path.clone(); + + setup_watched_set_mock!(unwatch, path.clone()); + + let mut store = PathStore::new(Arc::new(MockGithubClient::default()))?; + store.start_watcher(); + store.unwatch(&path_clone)?; + + Ok(()) + } + + #[tokio::test] + async fn test_unsuccessful_unwatch_path() -> Result<()> { + let dir = TempDir::new()?; + dir.child("foobar").touch()?; + let path = dir.path().to_path_buf(); + let path_clone = path.clone(); + + setup_watched_set_mock!(unwatch, path.clone(), "Path was not watched"); + + let mut store = PathStore::new(Arc::new(MockGithubClient::default()))?; + store.start_watcher(); + + assert_eq!( + store.unwatch(&path_clone).unwrap_err().to_string(), + "Path was not watched" + ); + + Ok(()) + } + + /* + + Integration tests, here or just for WatchedSet (TODO) + + - events handling + - test create file does not trigger event handler + - create a store with the MockGithubClient passed + - start watcher + - add a new path to watch (assert_fs::TempDir), maybe with an already existing file + - create a new file in that dir + - ensure event was not triggered (MockGithubClient) + - test delete file does not trigger event handler + - create a store with the MockGithubClient passed + - start watcher + - add a new path to watch (assert_fs::TempDir), with an already existing file + - delete the file + - ensure event was not triggered (MockGithubClient) + - test modify file data triggers event handler + - create a store with the MockGithubClient passed + - start watcher + - add a new path to watch (assert_fs::TempDir), with an already existing file + - modify the file data + - ensure event was triggered (MockGithubClient) + - test modify file data outside of watched paths does not trigger event handler + - create a store with the MockGithubClient passed + - start watcher + - add a new path to watch (assert_fs::TempDir) + - create another assert_fs::TempDir with an existing file + - modify that file data + - ensure event was not triggered (MockGithubClient) + */ +} diff --git a/lsp/src/watching/store.rs b/lsp/src/watching/store.rs deleted file mode 100644 index 16ab0c2..0000000 --- a/lsp/src/watching/store.rs +++ /dev/null @@ -1,117 +0,0 @@ -use std::{collections::HashSet, fs, path::PathBuf, pin::Pin, sync::Arc}; - -use anyhow::Result; -use anyhow::{Context, anyhow, bail}; -use common::sync::{Client, LocalFileData}; -use notify::{Event, EventKind, event::ModifyKind}; -use tokio::sync::Mutex; -use tracing::{debug, error}; - -use crate::watching::{EventHandler, PathWatcher}; - -#[derive(Debug)] -struct WatchedSet { - paths: HashSet, - watcher: PathWatcher, -} - -impl WatchedSet { - fn new(event_handler: EventHandler) -> Result { - let watcher = PathWatcher::new(event_handler)?; - - Ok(Self { - paths: HashSet::new(), - watcher, - }) - } -} - -#[derive(Debug)] -pub struct Store { - // behind mutex to control the simultaneous change of paths set and path watcher - watched_set: Mutex, -} - -impl Store { - pub fn new(client: Arc) -> Result { - let event_handler = Box::new(move |event| { - let client_clone = Arc::clone(&client); - Box::pin(async move { - match process_event(&event) { - Ok(data) => { - let Some(data) = data else { - return; - }; - - if let Err(err) = client_clone.sync_file(data).await { - error!("{}", err); - } - } - Err(err) => error!("Could not process file event: {err}"), - } - }) as Pin + Send>> - }); - - Ok(Self { - watched_set: Mutex::new(WatchedSet::new(event_handler)?), - }) - } - - pub async fn watch(&self, file_path: PathBuf) -> anyhow::Result<()> { - { - let mut watched_set = self.watched_set.lock().await; - - if watched_set.paths.contains(&file_path) { - bail!("Path is already being watched: {}", file_path.display()); - } - - watched_set.watcher.watch(&file_path)?; - watched_set.paths.insert(file_path); - } - - Ok(()) - } - - pub async fn unwatch(&self, file_path: PathBuf) -> anyhow::Result<()> { - { - let mut watched_set = self.watched_set.lock().await; - - if !watched_set.paths.contains(&file_path) { - bail!( - "Path is not being watched, failed to unwatch: {}", - file_path.display() - ); - } - - watched_set.watcher.unwatch(&file_path)?; - watched_set.paths.remove(&file_path); - } - - Ok(()) - } - - pub async fn start_watcher(&self) { - let mut watched_set = self.watched_set.lock().await; - - watched_set.watcher.start(); - } - - // no need to stop watcher or clear the store because it will be stopped (when dropped) on the store drop -} - -fn process_event(event: &Event) -> Result> { - debug!("Processing file watcher event: {event:?}"); - - let EventKind::Modify(ModifyKind::Data(_)) = event.kind else { - debug!("Got not a file data modify event, skipping: {event:?}"); - return Ok(None); - }; - - let path = event.paths.first().cloned().ok_or(anyhow!( - "event did not provide the path of the modified file" - ))?; - - let body = fs::read_to_string(&path).with_context(|| "Could not read the modified file")?; - - Ok(Some(LocalFileData::new(path, body)?)) -} diff --git a/lsp/src/watching/watched_set.rs b/lsp/src/watching/watched_set.rs new file mode 100644 index 0000000..07346e4 --- /dev/null +++ b/lsp/src/watching/watched_set.rs @@ -0,0 +1,66 @@ +use std::{collections::HashSet, path::PathBuf, sync::Mutex}; + +use anyhow::{Result, bail}; + +use crate::watching::{EventHandler, PathWatcher}; + +#[derive(Debug)] +pub struct WatchedSet { + paths: HashSet, + watcher: PathWatcher, + mx: Mutex<()>, +} + +#[cfg_attr(test, mockall::automock)] +impl WatchedSet { + pub fn new(event_handler: EventHandler) -> Result { + let watcher = PathWatcher::new(event_handler)?; + + Ok(Self { + paths: HashSet::new(), + watcher, + mx: Mutex::new(()), + }) + } + + pub fn start_watcher(&mut self) { + #[allow(clippy::expect_used)] + let _lock = self.mx.lock().expect("Watched set mutex is poisoned"); + + self.watcher.start(); + } + + pub fn watch(&mut self, path: PathBuf) -> Result<()> { + #[allow(clippy::expect_used)] + let _lock = self.mx.lock().expect("Watched set mutex is poisoned"); + + if self.paths.contains(&path) { + bail!("Path is already being watched: {}", path.display()); + } + + self.watcher.watch(&path)?; + self.paths.insert(path); + + Ok(()) + } + + pub fn unwatch(&mut self, path: &PathBuf) -> Result<()> { + #[allow(clippy::expect_used)] + let _lock = self.mx.lock().expect("Watched set mutex is poisoned"); + + if !self.paths.contains(path) { + bail!( + "Path is not being watched, failed to unwatch: {}", + path.display() + ); + } + + self.watcher.unwatch(path)?; + self.paths.remove(path); + + Ok(()) + } +} + +#[cfg(test)] +mod tests {}