From 03a1c8d2b8c1e1a623702b70796d1c5b1d265aff Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Sat, 26 Oct 2024 14:59:46 +0200 Subject: [PATCH 01/87] markdown preview: Fix infinite loop in parser when parsing list items (#19785) Release Notes: - Fixed an issue with the markdown parser when opening a markdown preview file that contained HTML tags inside a list item --- crates/markdown_preview/src/markdown_parser.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 10e910036b..0b3c361fd2 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -626,6 +626,8 @@ impl<'a> MarkdownParser<'a> { // Otherwise we need to insert the block after all the nested items // that have been parsed so far items.extend(block); + } else { + self.cursor += 1; } } } From 2e32f1c8a19525004cb9b3b7ac9cc53c9aa4fdd0 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 26 Oct 2024 21:57:22 +0300 Subject: [PATCH 02/87] Restore horizontal scrollbar checks (#19767) Closes https://github.com/zed-industries/zed/issues/19637 Follow-up of https://github.com/zed-industries/zed/pull/18927 , restores the condition that removed the horizontal scrollbar when panel's items are not long enough. Release Notes: - Fixed horizontal scrollbar not being hidden ([#19637](https://github.com/zed-industries/zed/issues/19637)) --- crates/project_panel/src/project_panel.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 50c9d2d126..355e8780cc 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2821,6 +2821,17 @@ impl ProjectPanel { return None; } + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + Some( div() .occlude() From c12a9f26733e0791ba99b015c3695712ae40322a Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sat, 26 Oct 2024 21:57:55 -0400 Subject: [PATCH 03/87] Add fold_at_level test (#19800) --- crates/editor/src/editor_tests.rs | 106 ++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 99b5cb6637..d56b22b454 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -1080,6 +1080,112 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_fold_at_level(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple( + &" + class Foo: + # Hello! + + def a(): + print(1) + + def b(): + print(2) + + + class Bar: + # World! + + def a(): + print(1) + + def b(): + print(2) + + + " + .unindent(), + cx, + ); + build_editor(buffer.clone(), cx) + }); + + _ = view.update(cx, |view, cx| { + view.fold_at_level(&FoldAtLevel { level: 2 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo: + # Hello! + + def a():⋯ + + def b():⋯ + + + class Bar: + # World! + + def a():⋯ + + def b():⋯ + + + " + .unindent(), + ); + + view.fold_at_level(&FoldAtLevel { level: 1 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo:⋯ + + + class Bar:⋯ + + + " + .unindent(), + ); + + view.unfold_all(&UnfoldAll, cx); + view.fold_at_level(&FoldAtLevel { level: 0 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo: + # Hello! + + def a(): + print(1) + + def b(): + print(2) + + + class Bar: + # World! + + def a(): + print(1) + + def b(): + print(2) + + + " + .unindent(), + ); + + assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text()); + }); +} + #[gpui::test] fn test_move_cursor(cx: &mut TestAppContext) { init_test(cx, |_| {}); From db61711753665a7a9763e033d167cb926e2f7835 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 27 Oct 2024 13:04:52 +0000 Subject: [PATCH 04/87] ci: Don't run GitHub Actions workflows on forks (#19789) - Closes: https://github.com/zed-industries/zed/issues/19351 Release Notes: - N/A --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3843a3343b..ba475f88ab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -91,6 +91,7 @@ jobs: macos_tests: timeout-minutes: 60 name: (macOS) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: - self-hosted - test @@ -126,6 +127,7 @@ jobs: linux_tests: timeout-minutes: 60 name: (Linux) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: - buildjet-16vcpu-ubuntu-2204 steps: @@ -158,6 +160,7 @@ jobs: build_remote_server: timeout-minutes: 60 name: (Linux) Build Remote Server + if: github.repository_owner == 'zed-industries' runs-on: - buildjet-16vcpu-ubuntu-2204 steps: @@ -185,6 +188,7 @@ jobs: windows_tests: timeout-minutes: 60 name: (Windows) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: hosted-windows-1 steps: - name: Checkout repo From b13940720a9091793ebe60bfbe5c12c6114ce2af Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Sun, 27 Oct 2024 14:34:59 +0100 Subject: [PATCH 05/87] markdown preview: Ignore inline HTML tags in text (#19804) Follow up to #19785 This PR ensures that we explicitly ignore inline HTML tags so that we can still extract the text between the tags and show them to the user Release Notes: - N/A --- .../markdown_preview/src/markdown_parser.rs | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 0b3c361fd2..d514b89e52 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -234,6 +234,10 @@ impl<'a> MarkdownParser<'a> { text.push('\n'); } + // We want to ignore any inline HTML tags in the text but keep + // the text between them + Event::InlineHtml(_) => {} + Event::Text(t) => { text.push_str(t.as_ref()); @@ -849,6 +853,16 @@ mod tests { ); } + #[gpui::test] + async fn test_text_with_inline_html() { + let parsed = parse("This is a paragraph with an inline HTML tag.").await; + + assert_eq!( + parsed.children, + vec![p("This is a paragraph with an inline HTML tag.", 0..63),], + ); + } + #[gpui::test] async fn test_raw_links_detection() { let parsed = parse("Checkout this https://zed.dev link").await; @@ -1092,6 +1106,26 @@ Some other content ); } + #[gpui::test] + async fn test_list_item_with_inline_html() { + let parsed = parse( + "\ +* This is a list item with an inline HTML tag. +", + ) + .await; + + assert_eq!( + parsed.children, + vec![list_item( + 0..67, + 1, + Unordered, + vec![p("This is a list item with an inline HTML tag.", 4..44),], + ),], + ); + } + #[gpui::test] async fn test_nested_list_with_paragraph_inside() { let parsed = parse( From 5506669b0654add16dca8447332f4804a4cea0d8 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Mon, 28 Oct 2024 01:15:23 +0800 Subject: [PATCH 06/87] windows: Fix more windows platform test (#19802) Release Notes: - N/A --------- Co-authored-by: Kirill Bulatov --- crates/editor/src/test/editor_test_context.rs | 18 ++++++++++++--- crates/fs/src/fs.rs | 22 ++++++++++++------- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 7234d97c5b..de5065d265 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -17,6 +17,7 @@ use project::{FakeFs, Project}; use std::{ any::TypeId, ops::{Deref, DerefMut, Range}, + path::Path, sync::{ atomic::{AtomicUsize, Ordering}, Arc, @@ -42,17 +43,18 @@ impl EditorTestContext { pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext { let fs = FakeFs::new(cx.executor()); // fs.insert_file("/file", "".to_owned()).await; + let root = Self::root_path(); fs.insert_tree( - "/root", + root, serde_json::json!({ "file": "", }), ) .await; - let project = Project::test(fs, ["/root".as_ref()], cx).await; + let project = Project::test(fs, [root], cx).await; let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/root/file", cx) + project.open_local_buffer(root.join("file"), cx) }) .await .unwrap(); @@ -71,6 +73,16 @@ impl EditorTestContext { } } + #[cfg(target_os = "windows")] + fn root_path() -> &'static Path { + Path::new("C:\\root") + } + + #[cfg(not(target_os = "windows"))] + fn root_path() -> &'static Path { + Path::new("/root") + } + pub async fn for_editor(editor: WindowHandle, cx: &mut gpui::TestAppContext) -> Self { let editor_view = editor.root_view(cx).unwrap(); Self { diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 5ee2947448..8483e5c02a 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -875,9 +875,11 @@ impl FakeFsState { canonical_path.clear(); match prefix { Some(prefix_component) => { - canonical_path.push(prefix_component.as_os_str()); + canonical_path = PathBuf::from(prefix_component.as_os_str()); + // Prefixes like `C:\\` are represented without their trailing slash, so we have to re-add it. + canonical_path.push(std::path::MAIN_SEPARATOR_STR); } - None => canonical_path.push("/"), + None => canonical_path = PathBuf::from(std::path::MAIN_SEPARATOR_STR), } } Component::CurDir => {} @@ -900,7 +902,7 @@ impl FakeFsState { } } entry_stack.push(entry.clone()); - canonical_path.push(name); + canonical_path = canonical_path.join(name); } else { return None; } @@ -962,6 +964,10 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> = #[cfg(any(test, feature = "test-support"))] impl FakeFs { + /// We need to use something large enough for Windows and Unix to consider this a new file. + /// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior + const SYSTEMTIME_INTERVAL: u64 = 100; + pub fn new(executor: gpui::BackgroundExecutor) -> Arc { Arc::new(Self { executor, @@ -995,7 +1001,7 @@ impl FakeFs { let new_mtime = state.next_mtime; let new_inode = state.next_inode; state.next_inode += 1; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state .write_path(path, move |entry| { match entry { @@ -1048,7 +1054,7 @@ impl FakeFs { let inode = state.next_inode; let mtime = state.next_mtime; state.next_inode += 1; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, @@ -1399,7 +1405,7 @@ impl Fs for FakeFs { let inode = state.next_inode; let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state.next_inode += 1; state.write_path(&cur_path, |entry| { entry.or_insert_with(|| { @@ -1425,7 +1431,7 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let inode = state.next_inode; let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state.next_inode += 1; let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, @@ -1560,7 +1566,7 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let mtime = state.next_mtime; let inode = util::post_inc(&mut state.next_inode); - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let source_entry = state.read_path(&source)?; let content = source_entry.lock().file_content(&source)?.clone(); let mut kind = Some(PathEventKind::Created); From c69da2df7071c4c327f0d18af9201f9dd876aea1 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 27 Oct 2024 15:50:54 -0700 Subject: [PATCH 07/87] Add support for git branches on remote projects (#19755) Release Notes: - Fixed a bug where the branch switcher could not be used remotely. --- Cargo.lock | 1 + crates/collab/src/rpc.rs | 2 + crates/collab/src/tests/integration_tests.rs | 92 ++++++ .../remote_editing_collaboration_tests.rs | 132 +++++++- crates/fs/src/fs.rs | 56 +++- crates/git/src/repository.rs | 78 ++++- crates/gpui/src/app.rs | 13 + crates/gpui/src/app/entity_map.rs | 9 + crates/gpui/src/app/test_context.rs | 6 + crates/gpui/src/global.rs | 1 + crates/gpui/src/gpui.rs | 1 + crates/project/src/project.rs | 28 +- crates/project/src/worktree_store.rs | 189 +++++++++++ crates/proto/proto/zed.proto | 28 +- crates/proto/src/proto.rs | 9 +- crates/recent_projects/src/ssh_connections.rs | 2 +- crates/remote_server/src/headless_project.rs | 2 +- .../remote_server/src/remote_editing_tests.rs | 312 +++++++++++++++--- crates/rpc/src/proto_client.rs | 21 +- crates/settings/src/settings_store.rs | 2 + crates/title_bar/src/title_bar.rs | 2 +- crates/util/src/arc_cow.rs | 6 + crates/vcs_menu/Cargo.toml | 1 + crates/vcs_menu/src/lib.rs | 121 ++++--- crates/worktree/src/worktree.rs | 6 + 25 files changed, 993 insertions(+), 127 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 75d058db38..7c81f692ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12843,6 +12843,7 @@ dependencies = [ "git", "gpui", "picker", + "project", "ui", "util", "workspace", diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 90277242f1..d091f04326 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -308,6 +308,8 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler( forward_mutating_project_request::, diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 80cc2500f5..c905c440cf 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -6575,3 +6575,95 @@ async fn test_context_collaboration_with_reconnect( assert!(context.buffer().read(cx).read_only()); }); } + +#[gpui::test] +async fn test_remote_git_branches( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/project", serde_json::json!({ ".git":{} })) + .await; + let branches = ["main", "dev", "feature-1"]; + client_a + .fs() + .insert_branches(Path::new("/project/.git"), &branches); + + let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let root_path = ProjectPath::root_path(worktree_id); + // Client A sees that a guest has joined. + executor.run_until_parked(); + + let branches_b = cx_b + .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx))) + .await + .unwrap(); + + let new_branch = branches[2]; + + let branches_b = branches_b + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&branches_b, &branches); + + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let host_branch = cx_a.update(|cx| { + project_a.update(cx, |project, cx| { + project.worktree_store().update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(host_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let host_branch = cx_a.update(|cx| { + project_a.update(cx, |project, cx| { + project.worktree_store().update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(host_branch.as_ref(), "totally-new-branch"); +} diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 0e13c88d94..9fe546ffcd 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -1,7 +1,7 @@ use crate::tests::TestServer; use call::ActiveCall; use fs::{FakeFs, Fs as _}; -use gpui::{Context as _, TestAppContext}; +use gpui::{BackgroundExecutor, Context as _, TestAppContext}; use http_client::BlockedHttpClient; use language::{language_settings::language_settings, LanguageRegistry}; use node_runtime::NodeRuntime; @@ -174,3 +174,133 @@ async fn test_sharing_an_ssh_remote_project( ); }); } + +#[gpui::test] +async fn test_ssh_collaboration_git_branches( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + // Set up project on remote FS + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree("/project", serde_json::json!({ ".git":{} })) + .await; + + let branches = ["main", "dev", "feature-1"]; + remote_fs.insert_branches(Path::new("/project/.git"), &branches); + + // User A connects to the remote project via SSH. + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let node = NodeRuntime::unavailable(); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let headless_project = server_cx.new_model(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: node, + languages, + }, + cx, + ) + }); + + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; + let (project_a, worktree_id) = client_a + .build_ssh_project("/project", client_ssh, cx_a) + .await; + + // While the SSH worktree is being scanned, user A shares the remote project. + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // User B joins the project. + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + // Give client A sometime to see that B has joined, and that the headless server + // has some git repositories + executor.run_until_parked(); + + let root_path = ProjectPath::root_path(worktree_id); + + let branches_b = cx_b + .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx))) + .await + .unwrap(); + + let new_branch = branches[2]; + + let branches_b = branches_b + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&branches_b, &branches); + + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), "totally-new-branch"); +} diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 8483e5c02a..4a84c27dfd 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -813,6 +813,7 @@ struct FakeFsState { root: Arc>, next_inode: u64, next_mtime: SystemTime, + git_event_tx: smol::channel::Sender, event_txs: Vec>>, events_paused: bool, buffered_events: Vec, @@ -969,8 +970,10 @@ impl FakeFs { const SYSTEMTIME_INTERVAL: u64 = 100; pub fn new(executor: gpui::BackgroundExecutor) -> Arc { - Arc::new(Self { - executor, + let (tx, mut rx) = smol::channel::bounded::(10); + + let this = Arc::new(Self { + executor: executor.clone(), state: Mutex::new(FakeFsState { root: Arc::new(Mutex::new(FakeFsEntry::Dir { inode: 0, @@ -979,6 +982,7 @@ impl FakeFs { entries: Default::default(), git_repo_state: None, })), + git_event_tx: tx, next_mtime: SystemTime::UNIX_EPOCH, next_inode: 1, event_txs: Default::default(), @@ -987,7 +991,22 @@ impl FakeFs { read_dir_call_count: 0, metadata_call_count: 0, }), - }) + }); + + executor.spawn({ + let this = this.clone(); + async move { + while let Some(git_event) = rx.next().await { + if let Some(mut state) = this.state.try_lock() { + state.emit_event([(git_event, None)]); + } else { + panic!("Failed to lock file system state, this execution would have caused a test hang"); + } + } + } + }).detach(); + + this } pub fn set_next_mtime(&self, next_mtime: SystemTime) { @@ -1181,7 +1200,12 @@ impl FakeFs { let mut entry = entry.lock(); if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { - let repo_state = git_repo_state.get_or_insert_with(Default::default); + let repo_state = git_repo_state.get_or_insert_with(|| { + Arc::new(Mutex::new(FakeGitRepositoryState::new( + dot_git.to_path_buf(), + state.git_event_tx.clone(), + ))) + }); let mut repo_state = repo_state.lock(); f(&mut repo_state); @@ -1196,7 +1220,22 @@ impl FakeFs { pub fn set_branch_name(&self, dot_git: &Path, branch: Option>) { self.with_git_state(dot_git, true, |state| { - state.branch_name = branch.map(Into::into) + let branch = branch.map(Into::into); + state.branches.extend(branch.clone()); + state.current_branch_name = branch.map(Into::into) + }) + } + + pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) { + self.with_git_state(dot_git, true, |state| { + if let Some(first) = branches.first() { + if state.current_branch_name.is_none() { + state.current_branch_name = Some(first.to_string()) + } + } + state + .branches + .extend(branches.iter().map(ToString::to_string)); }) } @@ -1836,7 +1875,12 @@ impl Fs for FakeFs { let mut entry = entry.lock(); if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { let state = git_repo_state - .get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default()))) + .get_or_insert_with(|| { + Arc::new(Mutex::new(FakeGitRepositoryState::new( + abs_dot_git.to_path_buf(), + state.git_event_tx.clone(), + ))) + }) .clone(); Some(git::repository::FakeGitRepository::open(state)) } else { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 1b3686f021..fe65816cc5 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,8 +1,9 @@ use crate::GitHostingProviderRegistry; use crate::{blame::Blame, status::GitStatus}; use anyhow::{Context, Result}; -use collections::HashMap; +use collections::{HashMap, HashSet}; use git2::BranchType; +use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; use serde::{Deserialize, Serialize}; @@ -17,7 +18,7 @@ use util::ResultExt; #[derive(Clone, Debug, Hash, PartialEq)] pub struct Branch { pub is_head: bool, - pub name: Box, + pub name: SharedString, /// Timestamp of most recent commit, normalized to Unix Epoch format. pub unix_timestamp: Option, } @@ -41,6 +42,7 @@ pub trait GitRepository: Send + Sync { fn branches(&self) -> Result>; fn change_branch(&self, _: &str) -> Result<()>; fn create_branch(&self, _: &str) -> Result<()>; + fn branch_exits(&self, _: &str) -> Result; fn blame(&self, path: &Path, content: Rope) -> Result; } @@ -132,6 +134,18 @@ impl GitRepository for RealGitRepository { GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes) } + fn branch_exits(&self, name: &str) -> Result { + let repo = self.repository.lock(); + let branch = repo.find_branch(name, BranchType::Local); + match branch { + Ok(_) => Ok(true), + Err(e) => match e.code() { + git2::ErrorCode::NotFound => Ok(false), + _ => Err(anyhow::anyhow!(e)), + }, + } + } + fn branches(&self) -> Result> { let repo = self.repository.lock(); let local_branches = repo.branches(Some(BranchType::Local))?; @@ -139,7 +153,11 @@ impl GitRepository for RealGitRepository { .filter_map(|branch| { branch.ok().and_then(|(branch, _)| { let is_head = branch.is_head(); - let name = branch.name().ok().flatten().map(Box::from)?; + let name = branch + .name() + .ok() + .flatten() + .map(|name| name.to_string().into())?; let timestamp = branch.get().peel_to_commit().ok()?.time(); let unix_timestamp = timestamp.seconds(); let timezone_offset = timestamp.offset_minutes(); @@ -201,17 +219,20 @@ impl GitRepository for RealGitRepository { } } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct FakeGitRepository { state: Arc>, } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct FakeGitRepositoryState { + pub path: PathBuf, + pub event_emitter: smol::channel::Sender, pub index_contents: HashMap, pub blames: HashMap, pub worktree_statuses: HashMap, - pub branch_name: Option, + pub current_branch_name: Option, + pub branches: HashSet, } impl FakeGitRepository { @@ -220,6 +241,20 @@ impl FakeGitRepository { } } +impl FakeGitRepositoryState { + pub fn new(path: PathBuf, event_emitter: smol::channel::Sender) -> Self { + FakeGitRepositoryState { + path, + event_emitter, + index_contents: Default::default(), + blames: Default::default(), + worktree_statuses: Default::default(), + current_branch_name: Default::default(), + branches: Default::default(), + } + } +} + impl GitRepository for FakeGitRepository { fn reload_index(&self) {} @@ -234,7 +269,7 @@ impl GitRepository for FakeGitRepository { fn branch_name(&self) -> Option { let state = self.state.lock(); - state.branch_name.clone() + state.current_branch_name.clone() } fn head_sha(&self) -> Option { @@ -264,18 +299,41 @@ impl GitRepository for FakeGitRepository { } fn branches(&self) -> Result> { - Ok(vec![]) + let state = self.state.lock(); + let current_branch = &state.current_branch_name; + Ok(state + .branches + .iter() + .map(|branch_name| Branch { + is_head: Some(branch_name) == current_branch.as_ref(), + name: branch_name.into(), + unix_timestamp: None, + }) + .collect()) + } + + fn branch_exits(&self, name: &str) -> Result { + let state = self.state.lock(); + Ok(state.branches.contains(name)) } fn change_branch(&self, name: &str) -> Result<()> { let mut state = self.state.lock(); - state.branch_name = Some(name.to_owned()); + state.current_branch_name = Some(name.to_owned()); + state + .event_emitter + .try_send(state.path.clone()) + .expect("Dropped repo change event"); Ok(()) } fn create_branch(&self, name: &str) -> Result<()> { let mut state = self.state.lock(); - state.branch_name = Some(name.to_owned()); + state.branches.insert(name.to_owned()); + state + .event_emitter + .try_send(state.path.clone()) + .expect("Dropped repo change event"); Ok(()) } diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index f81a2092d5..096f495a88 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -256,6 +256,9 @@ pub struct AppContext { pub(crate) layout_id_buffer: Vec, // We recycle this memory across layout requests. pub(crate) propagate_event: bool, pub(crate) prompt_builder: Option, + + #[cfg(any(test, feature = "test-support", debug_assertions))] + pub(crate) name: Option<&'static str>, } impl AppContext { @@ -309,6 +312,9 @@ impl AppContext { layout_id_buffer: Default::default(), propagate_event: true, prompt_builder: Some(PromptBuilder::Default), + + #[cfg(any(test, feature = "test-support", debug_assertions))] + name: None, }), }); @@ -988,6 +994,7 @@ impl AppContext { } /// Move the global of the given type to the stack. + #[track_caller] pub(crate) fn lease_global(&mut self) -> GlobalLease { GlobalLease::new( self.globals_by_type @@ -1319,6 +1326,12 @@ impl AppContext { (task, is_first) } + + /// Get the name for this App. + #[cfg(any(test, feature = "test-support", debug_assertions))] + pub fn get_name(&self) -> &'static str { + self.name.as_ref().unwrap() + } } impl Context for AppContext { diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index 4d5452acc0..07aa466295 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -536,6 +536,15 @@ impl AnyWeakModel { } } +impl std::fmt::Debug for AnyWeakModel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct(type_name::()) + .field("entity_id", &self.entity_id) + .field("entity_type", &self.entity_type) + .finish() + } +} + impl From> for AnyWeakModel { fn from(model: WeakModel) -> Self { model.any_model diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index f46cdc8e34..34449c91ec 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -478,6 +478,12 @@ impl TestAppContext { .await .unwrap(); } + + /// Set a name for this App. + #[cfg(any(test, feature = "test-support"))] + pub fn set_name(&mut self, name: &'static str) { + self.update(|cx| cx.name = Some(name)) + } } impl Model { diff --git a/crates/gpui/src/global.rs b/crates/gpui/src/global.rs index 05f1598364..96f5d5fed5 100644 --- a/crates/gpui/src/global.rs +++ b/crates/gpui/src/global.rs @@ -57,6 +57,7 @@ pub trait UpdateGlobal { } impl UpdateGlobal for T { + #[track_caller] fn update_global(cx: &mut C, update: F) -> R where C: BorrowAppContext, diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 7ba3ce055e..2952f4af8a 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -306,6 +306,7 @@ where self.borrow_mut().set_global(global) } + #[track_caller] fn update_global(&mut self, f: impl FnOnce(&mut G, &mut Self) -> R) -> R where G: Global, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 167d5c1d49..b2fc8c5304 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -288,6 +288,13 @@ impl ProjectPath { path: self.path.to_string_lossy().to_string(), } } + + pub fn root_path(worktree_id: WorktreeId) -> Self { + Self { + worktree_id, + path: Path::new("").into(), + } + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -701,7 +708,7 @@ impl Project { let ssh_proto = ssh.read(cx).proto_client(); let worktree_store = - cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), 0)); + cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), SSH_PROJECT_ID)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -3370,6 +3377,25 @@ impl Project { worktree.get_local_repo(&root_entry)?.repo().clone().into() } + pub fn branches( + &self, + project_path: ProjectPath, + cx: &AppContext, + ) -> Task>> { + self.worktree_store().read(cx).branches(project_path, cx) + } + + pub fn update_or_create_branch( + &self, + repository: ProjectPath, + new_branch: String, + cx: &AppContext, + ) -> Task> { + self.worktree_store() + .read(cx) + .update_or_create_branch(repository, new_branch, cx) + } + pub fn blame_buffer( &self, buffer: &Model, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index df190d03f3..dc67eedbc1 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -73,6 +73,8 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_copy_project_entry); client.add_model_request_handler(Self::handle_delete_project_entry); client.add_model_request_handler(Self::handle_expand_project_entry); + client.add_model_request_handler(Self::handle_git_branches); + client.add_model_request_handler(Self::handle_update_branch); } pub fn local(retain_worktrees: bool, fs: Arc) -> Self { @@ -127,6 +129,13 @@ impl WorktreeStore { .find(|worktree| worktree.read(cx).id() == id) } + pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option> { + self.worktree_for_id(repository.worktree_id, cx)? + .read(cx) + .git_entry(repository.path)? + .branch() + } + pub fn worktree_for_entry( &self, entry_id: ProjectEntryId, @@ -836,6 +845,131 @@ impl WorktreeStore { Ok(()) } + pub fn branches( + &self, + project_path: ProjectPath, + cx: &AppContext, + ) -> Task>> { + let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree found for ProjectPath"))); + }; + + match worktree.read(cx) { + Worktree::Local(local_worktree) => { + let branches = util::maybe!({ + let worktree_error = |error| { + format!( + "{} for worktree {}", + error, + local_worktree.abs_path().to_string_lossy() + ) + }; + + let entry = local_worktree + .git_entry(project_path.path) + .with_context(|| worktree_error("No git entry found"))?; + + let repo = local_worktree + .get_local_repo(&entry) + .with_context(|| worktree_error("No repository found"))? + .repo() + .clone(); + + repo.branches() + }); + + Task::ready(branches) + } + Worktree::Remote(remote_worktree) => { + let request = remote_worktree.client().request(proto::GitBranches { + project_id: remote_worktree.project_id(), + repository: Some(proto::ProjectPath { + worktree_id: project_path.worktree_id.to_proto(), + path: project_path.path.to_string_lossy().to_string(), // Root path + }), + }); + + cx.background_executor().spawn(async move { + let response = request.await?; + + let branches = response + .branches + .into_iter() + .map(|proto_branch| git::repository::Branch { + is_head: proto_branch.is_head, + name: proto_branch.name.into(), + unix_timestamp: proto_branch + .unix_timestamp + .map(|timestamp| timestamp as i64), + }) + .collect(); + + Ok(branches) + }) + } + } + } + + pub fn update_or_create_branch( + &self, + repository: ProjectPath, + new_branch: String, + cx: &AppContext, + ) -> Task> { + let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree found for ProjectPath"))); + }; + + match worktree.read(cx) { + Worktree::Local(local_worktree) => { + let result = util::maybe!({ + let worktree_error = |error| { + format!( + "{} for worktree {}", + error, + local_worktree.abs_path().to_string_lossy() + ) + }; + + let entry = local_worktree + .git_entry(repository.path) + .with_context(|| worktree_error("No git entry found"))?; + + let repo = local_worktree + .get_local_repo(&entry) + .with_context(|| worktree_error("No repository found"))? + .repo() + .clone(); + + if !repo.branch_exits(&new_branch)? { + repo.create_branch(&new_branch)?; + } + + repo.change_branch(&new_branch)?; + + Ok(()) + }); + + Task::ready(result) + } + Worktree::Remote(remote_worktree) => { + let request = remote_worktree.client().request(proto::UpdateGitBranch { + project_id: remote_worktree.project_id(), + repository: Some(proto::ProjectPath { + worktree_id: repository.worktree_id.to_proto(), + path: repository.path.to_string_lossy().to_string(), // Root path + }), + branch_name: new_branch, + }); + + cx.background_executor().spawn(async move { + request.await?; + Ok(()) + }) + } + } + } + async fn filter_paths( fs: &Arc, mut input: Receiver, @@ -917,6 +1051,61 @@ impl WorktreeStore { .ok_or_else(|| anyhow!("invalid request"))?; Worktree::handle_expand_entry(worktree, envelope.payload, cx).await } + + pub async fn handle_git_branches( + this: Model, + branches: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let project_path = branches + .payload + .repository + .clone() + .context("Invalid GitBranches call")?; + let project_path = ProjectPath { + worktree_id: WorktreeId::from_proto(project_path.worktree_id), + path: Path::new(&project_path.path).into(), + }; + + let branches = this + .read_with(&cx, |this, cx| this.branches(project_path, cx))? + .await?; + + Ok(proto::GitBranchesResponse { + branches: branches + .into_iter() + .map(|branch| proto::Branch { + is_head: branch.is_head, + name: branch.name.to_string(), + unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64), + }) + .collect(), + }) + } + + pub async fn handle_update_branch( + this: Model, + update_branch: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let project_path = update_branch + .payload + .repository + .clone() + .context("Invalid GitBranches call")?; + let project_path = ProjectPath { + worktree_id: WorktreeId::from_proto(project_path.worktree_id), + path: Path::new(&project_path.path).into(), + }; + let new_branch = update_branch.payload.branch_name; + + this.read_with(&cx, |this, cx| { + this.update_or_create_branch(project_path, new_branch, cx) + })? + .await?; + + Ok(proto::Ack {}) + } } #[derive(Clone, Debug)] diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 5635eb8800..c61a14cdbf 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -281,7 +281,12 @@ message Envelope { FlushBufferedMessages flush_buffered_messages = 267; LanguageServerPromptRequest language_server_prompt_request = 268; - LanguageServerPromptResponse language_server_prompt_response = 269; // current max + LanguageServerPromptResponse language_server_prompt_response = 269; + + GitBranches git_branches = 270; + GitBranchesResponse git_branches_response = 271; + + UpdateGitBranch update_git_branch = 272; // current max } @@ -2432,3 +2437,24 @@ message LanguageServerPromptRequest { message LanguageServerPromptResponse { optional uint64 action_response = 1; } + +message Branch { + bool is_head = 1; + string name = 2; + optional uint64 unix_timestamp = 3; +} + +message GitBranches { + uint64 project_id = 1; + ProjectPath repository = 2; +} + +message GitBranchesResponse { + repeated Branch branches = 1; +} + +message UpdateGitBranch { + uint64 project_id = 1; + string branch_name = 2; + ProjectPath repository = 3; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 7a31e7cc7a..3807e04bd5 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -357,6 +357,9 @@ messages!( (FlushBufferedMessages, Foreground), (LanguageServerPromptRequest, Foreground), (LanguageServerPromptResponse, Foreground), + (GitBranches, Background), + (GitBranchesResponse, Background), + (UpdateGitBranch, Background) ); request_messages!( @@ -473,6 +476,8 @@ request_messages!( (GetPermalinkToLine, GetPermalinkToLineResponse), (FlushBufferedMessages, Ack), (LanguageServerPromptRequest, LanguageServerPromptResponse), + (GitBranches, GitBranchesResponse), + (UpdateGitBranch, Ack) ); entity_messages!( @@ -550,7 +555,9 @@ entity_messages!( HideToast, OpenServerSettings, GetPermalinkToLine, - LanguageServerPromptRequest + LanguageServerPromptRequest, + GitBranches, + UpdateGitBranch ); entity_messages!( diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 55204e14b9..7dc2853650 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -631,7 +631,7 @@ impl SshClientDelegate { self.update_status( Some(&format!( - "Building remote server binary from source for {}", + "Building remote server binary from source for {} with Docker", &triple )), cx, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 4385dac1fe..81be01b6a6 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Result}; use fs::Fs; -use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, PromptLevel}; +use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, PromptLevel}; use http_client::HttpClient; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; use node_runtime::NodeRuntime; diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index f7420ef5b0..82e3824eb0 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -26,7 +26,29 @@ use std::{ #[gpui::test] async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + "project2": { + "README.md": "# project 2", + }, + }), + ) + .await; + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], + ); + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -128,7 +150,22 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test #[gpui::test] async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, _) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; project .update(cx, |project, cx| { @@ -193,7 +230,22 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes #[gpui::test] async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; cx.update_global(|settings_store: &mut SettingsStore, cx| { settings_store.set_user_settings( @@ -304,7 +356,22 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo #[gpui::test] async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; fs.insert_tree( "/code/project1/.zed", @@ -463,7 +530,22 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext #[gpui::test] async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -523,7 +605,22 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont #[gpui::test] async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -566,7 +663,22 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut #[gpui::test(iterations = 10)] async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -597,7 +709,25 @@ async fn test_adding_then_removing_then_adding_worktrees( cx: &mut TestAppContext, server_cx: &mut TestAppContext, ) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + "project2": { + "README.md": "# project 2", + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (_worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -636,9 +766,25 @@ async fn test_adding_then_removing_then_adding_worktrees( #[gpui::test] async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let buffer = project.update(cx, |project, cx| project.open_server_settings(cx)); cx.executor().run_until_parked(); + let buffer = buffer.await.unwrap(); cx.update(|cx| { @@ -651,7 +797,22 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test #[gpui::test(iterations = 20)] async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { @@ -690,19 +851,8 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) ); } -fn init_logger() { - if std::env::var("RUST_LOG").is_ok() { - env_logger::try_init().ok(); - } -} - -async fn init_test( - cx: &mut TestAppContext, - server_cx: &mut TestAppContext, -) -> (Model, Model, Arc) { - init_logger(); - - let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); +#[gpui::test] +async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", @@ -710,32 +860,109 @@ async fn init_test( "project1": { ".git": {}, "README.md": "# project 1", - "src": { - "lib.rs": "fn one() -> usize { 1 }" - } - }, - "project2": { - "README.md": "# project 2", }, }), ) .await; - fs.set_index_for_repo( - Path::new("/code/project1/.git"), - &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], - ); - server_cx.update(HeadlessProject::init); + let (project, headless_project) = init_test(&fs, cx, server_cx).await; + let branches = ["main", "dev", "feature-1"]; + fs.insert_branches(Path::new("/code/project1/.git"), &branches); + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + let root_path = ProjectPath::root_path(worktree_id); + // Give the worktree a bit of time to index the file system + cx.run_until_parked(); + + let remote_branches = project + .update(cx, |project, cx| project.branches(root_path.clone(), cx)) + .await + .unwrap(); + + let new_branch = branches[2]; + + let remote_branches = remote_branches + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&remote_branches, &branches); + + cx.update(|cx| { + project.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx.update(|cx| { + project.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), "totally-new-branch"); +} + +pub async fn init_test( + server_fs: &Arc, + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) -> (Model, Model) { + let server_fs = server_fs.clone(); + init_logger(); + + let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); let http_client = Arc::new(BlockedHttpClient); let node_runtime = NodeRuntime::unavailable(); let languages = Arc::new(LanguageRegistry::new(cx.executor())); + server_cx.update(HeadlessProject::init); let headless = server_cx.new_model(|cx| { client::init_settings(cx); HeadlessProject::new( crate::HeadlessAppState { session: ssh_server_client, - fs: fs.clone(), + fs: server_fs.clone(), http_client, node_runtime, languages, @@ -752,13 +979,21 @@ async fn init_test( |_, cx| cx.on_release(|_, _| drop(headless)) }) .detach(); - (project, headless, fs) + (project, headless) +} + +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } } fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model { cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); + if !cx.has_global::() { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + } }); let client = cx.update(|cx| { @@ -773,6 +1008,7 @@ fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model< let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); let languages = Arc::new(LanguageRegistry::test(cx.executor())); let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { Project::init(&client, cx); language::init(cx); diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 56b13688ba..9288416d57 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -123,7 +123,6 @@ impl ProtoMessageHandlerSet { let extract_entity_id = *this.entity_id_extractors.get(&payload_type_id)?; let entity_type_id = *this.entity_types_by_message_type.get(&payload_type_id)?; let entity_id = (extract_entity_id)(message.as_ref()); - match this .entities_by_type_and_remote_id .get_mut(&(entity_type_id, entity_id))? @@ -145,6 +144,26 @@ pub enum EntityMessageSubscriber { Pending(Vec>), } +impl std::fmt::Debug for EntityMessageSubscriber { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + EntityMessageSubscriber::Entity { handle } => f + .debug_struct("EntityMessageSubscriber::Entity") + .field("handle", handle) + .finish(), + EntityMessageSubscriber::Pending(vec) => f + .debug_struct("EntityMessageSubscriber::Pending") + .field( + "envelopes", + &vec.iter() + .map(|envelope| envelope.payload_type_name()) + .collect::>(), + ) + .finish(), + } + } +} + impl From> for AnyProtoClient where T: ProtoClient + 'static, diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 0130adf99c..620055a971 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -61,6 +61,7 @@ pub trait Settings: 'static + Send + Sync { anyhow::anyhow!("missing default") } + #[track_caller] fn register(cx: &mut AppContext) where Self: Sized, @@ -271,6 +272,7 @@ impl SettingsStore { pub fn register_setting(&mut self, cx: &mut AppContext) { let setting_type_id = TypeId::of::(); let entry = self.setting_values.entry(setting_type_id); + if matches!(entry, hash_map::Entry::Occupied(_)) { return; } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 74c5b2812a..f58eaa89a0 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -447,7 +447,7 @@ impl TitleBar { }) .on_click(move |_, cx| { let _ = workspace.update(cx, |this, cx| { - BranchList::open(this, &Default::default(), cx) + BranchList::open(this, &Default::default(), cx); }); }), ) diff --git a/crates/util/src/arc_cow.rs b/crates/util/src/arc_cow.rs index 02ad1fa1f0..06a2fa9cd0 100644 --- a/crates/util/src/arc_cow.rs +++ b/crates/util/src/arc_cow.rs @@ -75,6 +75,12 @@ impl From for ArcCow<'_, str> { } } +impl From<&String> for ArcCow<'_, str> { + fn from(value: &String) -> Self { + Self::Owned(value.clone().into()) + } +} + impl<'a> From> for ArcCow<'a, str> { fn from(value: Cow<'a, str>) -> Self { match value { diff --git a/crates/vcs_menu/Cargo.toml b/crates/vcs_menu/Cargo.toml index 75dcad83df..11de371868 100644 --- a/crates/vcs_menu/Cargo.toml +++ b/crates/vcs_menu/Cargo.toml @@ -14,6 +14,7 @@ fuzzy.workspace = true git.workspace = true gpui.workspace = true picker.workspace = true +project.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/vcs_menu/src/lib.rs b/crates/vcs_menu/src/lib.rs index 720a427ae9..3ee289df0e 100644 --- a/crates/vcs_menu/src/lib.rs +++ b/crates/vcs_menu/src/lib.rs @@ -2,24 +2,23 @@ use anyhow::{Context, Result}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::repository::Branch; use gpui::{ - actions, rems, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, - InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription, - Task, View, ViewContext, VisualContext, WindowContext, + actions, rems, AnyElement, AppContext, AsyncAppContext, DismissEvent, EventEmitter, + FocusHandle, FocusableView, InteractiveElement, IntoElement, ParentElement, Render, + SharedString, Styled, Subscription, Task, View, ViewContext, VisualContext, WindowContext, }; use picker::{Picker, PickerDelegate}; +use project::ProjectPath; use std::{ops::Not, sync::Arc}; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::ResultExt; -use workspace::notifications::NotificationId; -use workspace::{ModalView, Toast, Workspace}; +use workspace::notifications::DetachAndPromptErr; +use workspace::{ModalView, Workspace}; actions!(branches, [OpenRecent]); pub fn init(cx: &mut AppContext) { cx.observe_new_views(|workspace: &mut Workspace, _| { - workspace.register_action(|workspace, action, cx| { - BranchList::open(workspace, action, cx).log_err(); - }); + workspace.register_action(BranchList::open); }) .detach(); } @@ -31,6 +30,21 @@ pub struct BranchList { } impl BranchList { + pub fn open(_: &mut Workspace, _: &OpenRecent, cx: &mut ViewContext) { + let this = cx.view().clone(); + cx.spawn(|_, mut cx| async move { + // Modal branch picker has a longer trailoff than a popover one. + let delegate = BranchListDelegate::new(this.clone(), 70, &cx).await?; + + this.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx)) + })?; + + Ok(()) + }) + .detach_and_prompt_err("Failed to read branches", cx, |_, _| None) + } + fn new(delegate: BranchListDelegate, rem_width: f32, cx: &mut ViewContext) -> Self { let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent)); @@ -40,17 +54,6 @@ impl BranchList { _subscription, } } - pub fn open( - workspace: &mut Workspace, - _: &OpenRecent, - cx: &mut ViewContext, - ) -> Result<()> { - // Modal branch picker has a longer trailoff than a popover one. - let delegate = BranchListDelegate::new(workspace, cx.view().clone(), 70, cx)?; - workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx)); - - Ok(()) - } } impl ModalView for BranchList {} impl EventEmitter for BranchList {} @@ -100,36 +103,32 @@ pub struct BranchListDelegate { } impl BranchListDelegate { - fn new( - workspace: &Workspace, - handle: View, + async fn new( + workspace: View, branch_name_trailoff_after: usize, - cx: &AppContext, + cx: &AsyncAppContext, ) -> Result { - let project = workspace.project().read(cx); - let repo = project - .get_first_worktree_root_repo(cx) - .context("failed to get root repository for first worktree")?; + let all_branches_request = cx.update(|cx| { + let project = workspace.read(cx).project().read(cx); + let first_worktree = project + .visible_worktrees(cx) + .next() + .context("No worktrees found")?; + let project_path = ProjectPath::root_path(first_worktree.read(cx).id()); + anyhow::Ok(project.branches(project_path, cx)) + })??; + + let all_branches = all_branches_request.await?; - let all_branches = repo.branches()?; Ok(Self { matches: vec![], - workspace: handle, + workspace, all_branches, selected_index: 0, last_query: Default::default(), branch_name_trailoff_after, }) } - - fn display_error_toast(&self, message: String, cx: &mut WindowContext<'_>) { - self.workspace.update(cx, |model, ctx| { - struct GitCheckoutFailure; - let id = NotificationId::unique::(); - - model.show_toast(Toast::new(id, message), ctx) - }); - } } impl PickerDelegate for BranchListDelegate { @@ -235,40 +234,32 @@ impl PickerDelegate for BranchListDelegate { cx.spawn({ let branch = branch.clone(); |picker, mut cx| async move { - picker - .update(&mut cx, |this, cx| { - let project = this.delegate.workspace.read(cx).project().read(cx); - let repo = project - .get_first_worktree_root_repo(cx) - .context("failed to get root repository for first worktree")?; + let branch_change_task = picker.update(&mut cx, |this, cx| { + let project = this.delegate.workspace.read(cx).project().read(cx); - let branch_to_checkout = match branch { - BranchEntry::Branch(branch) => branch.string, - BranchEntry::NewBranch { name: branch_name } => { - let status = repo.create_branch(&branch_name); - if status.is_err() { - this.delegate.display_error_toast(format!("Failed to create branch '{branch_name}', check for conflicts or unstashed files"), cx); - status?; - } + let branch_to_checkout = match branch { + BranchEntry::Branch(branch) => branch.string, + BranchEntry::NewBranch { name: branch_name } => branch_name, + }; + let worktree = project + .worktrees(cx) + .next() + .context("worktree disappeared")?; + let repository = ProjectPath::root_path(worktree.read(cx).id()); - branch_name - } - }; + anyhow::Ok(project.update_or_create_branch(repository, branch_to_checkout, cx)) + })??; - let status = repo.change_branch(&branch_to_checkout); - if status.is_err() { - this.delegate.display_error_toast(format!("Failed to checkout branch '{branch_to_checkout}', check for conflicts or unstashed files"), cx); - status?; - } + branch_change_task.await?; - cx.emit(DismissEvent); + picker.update(&mut cx, |_, cx| { + cx.emit(DismissEvent); - Ok::<(), anyhow::Error>(()) - }) - .log_err(); + Ok::<(), anyhow::Error>(()) + }) } }) - .detach(); + .detach_and_prompt_err("Failed to change branch", cx, |_, _| None); } fn dismissed(&mut self, cx: &mut ViewContext>) { diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 722a7b3f0a..ba65eae87c 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2385,6 +2385,12 @@ impl Snapshot { .map(|entry| entry.to_owned()) } + pub fn git_entry(&self, work_directory_path: Arc) -> Option { + self.repository_entries + .get(&RepositoryWorkDirectory(work_directory_path)) + .map(|entry| entry.to_owned()) + } + pub fn git_entries(&self) -> impl Iterator { self.repository_entries.values() } From 2d16d2d0363ad69653ab0c1a3736de74a07f476e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 28 Oct 2024 02:31:02 +0200 Subject: [PATCH 08/87] Fixed outline panel panicking on filtering (#19811) Closes https://github.com/zed-industries/zed/issues/19732 Release Notes: - Fixed outline panel panicking on filtering ([#19732](https://github.com/zed-industries/zed/issues/19732)) --- crates/outline_panel/src/outline_panel.rs | 313 +++++++++------------- 1 file changed, 132 insertions(+), 181 deletions(-) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 25dd5cba8d..72b97c8f69 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -2825,7 +2825,6 @@ impl OutlinePanel { cx.spawn(|outline_panel, mut cx| async move { let mut entries = Vec::new(); let mut match_candidates = Vec::new(); - let mut added_contexts = HashSet::default(); let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; @@ -2947,7 +2946,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, new_folded_dirs, folded_depth, @@ -2986,7 +2984,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3012,7 +3009,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3049,7 +3045,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::Fs(entry.clone()), depth, @@ -3063,7 +3058,6 @@ impl OutlinePanel { outline_panel.add_search_entries( &mut entries, &mut match_candidates, - &mut added_contexts, entry.clone(), depth, query.clone(), @@ -3097,7 +3091,6 @@ impl OutlinePanel { query.as_deref(), &mut entries, &mut match_candidates, - &mut added_contexts, cx, ); } @@ -3113,7 +3106,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::Fs(entry.clone()), 0, @@ -3132,7 +3124,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3144,22 +3135,10 @@ impl OutlinePanel { return Vec::new(); }; - outline_panel - .update(&mut cx, |outline_panel, _| { - if matches!(outline_panel.mode, ItemsDisplayMode::Search(_)) { - cleanup_fs_entries_without_search_children( - &outline_panel.collapsed_entries, - &mut entries, - &mut match_candidates, - &mut added_contexts, - ); - } - }) - .ok(); - let Some(query) = query else { return entries; }; + let mut matched_ids = match_strings( &match_candidates, &query, @@ -3195,7 +3174,6 @@ impl OutlinePanel { &self, entries: &mut Vec, match_candidates: &mut Vec, - added_contexts: &mut HashSet, track_matches: bool, entry: PanelEntry, depth: usize, @@ -3221,47 +3199,39 @@ impl OutlinePanel { if let Some(file_name) = self.relative_path(fs_entry, cx).as_deref().map(file_name) { - if added_contexts.insert(file_name.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: file_name.to_string(), - char_bag: file_name.chars().collect(), - }); - } + match_candidates.push(StringMatchCandidate { + id, + string: file_name.to_string(), + char_bag: file_name.chars().collect(), + }); } } PanelEntry::FoldedDirs(worktree_id, entries) => { let dir_names = self.dir_names_string(entries, *worktree_id, cx); { - if added_contexts.insert(dir_names.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: dir_names.clone(), - char_bag: dir_names.chars().collect(), - }); - } + match_candidates.push(StringMatchCandidate { + id, + string: dir_names.clone(), + char_bag: dir_names.chars().collect(), + }); } } PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Outline(_, _, outline) => { - if added_contexts.insert(outline.text.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: outline.text.clone(), - char_bag: outline.text.chars().collect(), - }); - } + match_candidates.push(StringMatchCandidate { + id, + string: outline.text.clone(), + char_bag: outline.text.chars().collect(), + }); } OutlineEntry::Excerpt(..) => {} }, PanelEntry::Search(new_search_entry) => { - if added_contexts.insert(new_search_entry.render_data.context_text.clone()) { - match_candidates.push(StringMatchCandidate { - id, - char_bag: new_search_entry.render_data.context_text.chars().collect(), - string: new_search_entry.render_data.context_text.clone(), - }); - } + match_candidates.push(StringMatchCandidate { + id, + char_bag: new_search_entry.render_data.context_text.chars().collect(), + string: new_search_entry.render_data.context_text.clone(), + }); } } } @@ -3408,7 +3378,6 @@ impl OutlinePanel { query: Option<&str>, entries: &mut Vec, match_candidates: &mut Vec, - added_contexts: &mut HashSet, cx: &mut ViewContext, ) { if let Some(excerpts) = self.excerpts.get(&buffer_id) { @@ -3420,7 +3389,6 @@ impl OutlinePanel { self.push_entry( entries, match_candidates, - added_contexts, track_matches, PanelEntry::Outline(OutlineEntry::Excerpt( buffer_id, @@ -3448,7 +3416,6 @@ impl OutlinePanel { self.push_entry( entries, match_candidates, - added_contexts, track_matches, PanelEntry::Outline(OutlineEntry::Outline( buffer_id, @@ -3468,7 +3435,6 @@ impl OutlinePanel { &mut self, entries: &mut Vec, match_candidates: &mut Vec, - added_contexts: &mut HashSet, parent_entry: FsEntry, parent_depth: usize, filter_query: Option, @@ -3556,7 +3522,6 @@ impl OutlinePanel { self.push_entry( entries, match_candidates, - added_contexts, filter_query.is_some(), PanelEntry::Search(new_search_entry), depth, @@ -3618,131 +3583,6 @@ impl OutlinePanel { } } -fn cleanup_fs_entries_without_search_children( - collapsed_entries: &HashSet, - entries: &mut Vec, - string_match_candidates: &mut Vec, - added_contexts: &mut HashSet, -) { - let mut match_ids_to_remove = BTreeSet::new(); - let mut previous_entry = None::<&PanelEntry>; - for (id, entry) in entries.iter().enumerate().rev() { - let has_search_items = match (previous_entry, &entry.entry) { - (Some(PanelEntry::Outline(_)), _) => unreachable!(), - (_, PanelEntry::Outline(_)) => false, - (_, PanelEntry::Search(_)) => true, - (None, PanelEntry::FoldedDirs(_, _) | PanelEntry::Fs(_)) => false, - ( - Some(PanelEntry::Search(_)), - PanelEntry::FoldedDirs(_, _) | PanelEntry::Fs(FsEntry::Directory(..)), - ) => false, - (Some(PanelEntry::FoldedDirs(..)), PanelEntry::FoldedDirs(..)) => true, - ( - Some(PanelEntry::Search(_)), - PanelEntry::Fs(FsEntry::File(..) | FsEntry::ExternalFile(..)), - ) => true, - ( - Some(PanelEntry::Fs(previous_fs)), - PanelEntry::FoldedDirs(folded_worktree, folded_dirs), - ) => { - let expected_parent = folded_dirs.last().map(|dir_entry| dir_entry.path.as_ref()); - match previous_fs { - FsEntry::ExternalFile(..) => false, - FsEntry::File(file_worktree, file_entry, ..) => { - file_worktree == folded_worktree - && file_entry.path.parent() == expected_parent - } - FsEntry::Directory(directory_wortree, directory_entry) => { - directory_wortree == folded_worktree - && directory_entry.path.parent() == expected_parent - } - } - } - ( - Some(PanelEntry::FoldedDirs(folded_worktree, folded_dirs)), - PanelEntry::Fs(fs_entry), - ) => match fs_entry { - FsEntry::File(..) | FsEntry::ExternalFile(..) => false, - FsEntry::Directory(directory_wortree, maybe_parent_directory) => { - directory_wortree == folded_worktree - && Some(maybe_parent_directory.path.as_ref()) - == folded_dirs - .first() - .and_then(|dir_entry| dir_entry.path.parent()) - } - }, - (Some(PanelEntry::Fs(previous_entry)), PanelEntry::Fs(maybe_parent_entry)) => { - match (previous_entry, maybe_parent_entry) { - (FsEntry::ExternalFile(..), _) | (_, FsEntry::ExternalFile(..)) => false, - (FsEntry::Directory(..) | FsEntry::File(..), FsEntry::File(..)) => false, - ( - FsEntry::Directory(previous_worktree, previous_directory), - FsEntry::Directory(new_worktree, maybe_parent_directory), - ) => { - previous_worktree == new_worktree - && previous_directory.path.parent() - == Some(maybe_parent_directory.path.as_ref()) - } - ( - FsEntry::File(previous_worktree, previous_file, ..), - FsEntry::Directory(new_worktree, maybe_parent_directory), - ) => { - previous_worktree == new_worktree - && previous_file.path.parent() - == Some(maybe_parent_directory.path.as_ref()) - } - } - } - }; - - if has_search_items { - previous_entry = Some(&entry.entry); - } else { - let collapsed_entries_to_check = match &entry.entry { - PanelEntry::FoldedDirs(worktree_id, entries) => entries - .iter() - .map(|entry| CollapsedEntry::Dir(*worktree_id, entry.id)) - .collect(), - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => { - vec![CollapsedEntry::Dir(*worktree_id, entry.id)] - } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - vec![CollapsedEntry::ExternalFile(*buffer_id)] - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - vec![CollapsedEntry::File(*worktree_id, *buffer_id)] - } - PanelEntry::Search(_) | PanelEntry::Outline(_) => Vec::new(), - }; - if !collapsed_entries_to_check.is_empty() - && collapsed_entries_to_check - .iter() - .any(|collapsed_entry| collapsed_entries.contains(collapsed_entry)) - { - previous_entry = Some(&entry.entry); - continue; - } - match_ids_to_remove.insert(id); - previous_entry = None; - } - } - - if match_ids_to_remove.is_empty() { - return; - } - - string_match_candidates.retain(|candidate| { - let retain = !match_ids_to_remove.contains(&candidate.id); - if !retain { - added_contexts.remove(&candidate.string); - } - retain - }); - match_ids_to_remove.into_iter().rev().for_each(|id| { - entries.remove(id); - }); -} - fn workspace_active_editor( workspace: &Workspace, cx: &AppContext, @@ -4374,6 +4214,117 @@ mod tests { }); } + #[gpui::test] + async fn test_item_filtering(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + populate_with_test_ra_project(&fs, "/rust-analyzer").await; + let project = Project::test(fs.clone(), ["/rust-analyzer".as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(Arc::new(rust_lang())) + }); + let workspace = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let outline_panel = outline_panel(&workspace, cx); + outline_panel.update(cx, |outline_panel, cx| outline_panel.set_active(true, cx)); + + workspace + .update(cx, |workspace, cx| { + ProjectSearchView::deploy_search(workspace, &workspace::DeploySearch::default(), cx) + }) + .unwrap(); + let search_view = workspace + .update(cx, |workspace, cx| { + workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()) + .expect("Project search view expected to appear after new search event trigger") + }) + .unwrap(); + + let query = "param_names_for_lifetime_elision_hints"; + perform_project_search(&search_view, query, cx); + search_view.update(cx, |search_view, cx| { + search_view + .results_editor() + .update(cx, |results_editor, cx| { + assert_eq!( + results_editor.display_text(cx).match_indices(query).count(), + 9 + ); + }); + }); + let all_matches = r#"/ + crates/ + ide/src/ + inlay_hints/ + fn_lifetime_fn.rs + search: match config.param_names_for_lifetime_elision_hints { + search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + search: Some(it) if config.param_names_for_lifetime_elision_hints => { + search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + inlay_hints.rs + search: pub param_names_for_lifetime_elision_hints: bool, + search: param_names_for_lifetime_elision_hints: self + static_index.rs + search: param_names_for_lifetime_elision_hints: false, + rust-analyzer/src/ + cli/ + analysis_stats.rs + search: param_names_for_lifetime_elision_hints: true, + config.rs + search: param_names_for_lifetime_elision_hints: self"#; + + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None,), + all_matches, + ); + }); + + let filter_text = "a"; + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.filter_editor.update(cx, |filter_editor, cx| { + filter_editor.set_text(filter_text, cx); + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None), + all_matches + .lines() + .filter(|item| item.contains(filter_text)) + .collect::>() + .join("\n"), + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.filter_editor.update(cx, |filter_editor, cx| { + filter_editor.set_text("", cx); + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None,), + all_matches, + ); + }); + } + #[gpui::test] async fn test_frontend_repo_structure(cx: &mut TestAppContext) { init_test(cx); From ffe36c9beb40caf128535a536619b12e6153aa6a Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 27 Oct 2024 19:44:21 -0700 Subject: [PATCH 09/87] Remove hosted projects (#19754) Release Notes: - N/A --- crates/channel/src/channel_store.rs | 76 +---------------- crates/collab/src/db.rs | 1 - crates/collab/src/db/queries.rs | 1 - crates/collab/src/db/queries/channels.rs | 5 -- .../collab/src/db/queries/hosted_projects.rs | 85 ------------------- crates/collab/src/db/queries/projects.rs | 34 -------- crates/collab/src/db/tables.rs | 1 - crates/collab/src/db/tables/hosted_project.rs | 27 ------ crates/collab/src/db/tables/project.rs | 15 +--- crates/collab/src/rpc.rs | 30 ------- crates/collab_ui/src/collab_panel.rs | 60 +------------ crates/project/src/project.rs | 53 +----------- crates/proto/proto/zed.proto | 23 +---- crates/proto/src/proto.rs | 2 - crates/workspace/src/workspace.rs | 54 +----------- 15 files changed, 8 insertions(+), 459 deletions(-) delete mode 100644 crates/collab/src/db/queries/hosted_projects.rs delete mode 100644 crates/collab/src/db/tables/hosted_project.rs diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index fc5b12cfae..d627d8fe15 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -3,7 +3,7 @@ mod channel_index; use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage}; use anyhow::{anyhow, Result}; use channel_index::ChannelIndex; -use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore}; +use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore}; use collections::{hash_map, HashMap, HashSet}; use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt}; use gpui::{ @@ -33,30 +33,11 @@ struct NotesVersion { version: clock::Global, } -#[derive(Debug, Clone)] -pub struct HostedProject { - project_id: ProjectId, - channel_id: ChannelId, - name: SharedString, - _visibility: proto::ChannelVisibility, -} -impl From for HostedProject { - fn from(project: proto::HostedProject) -> Self { - Self { - project_id: ProjectId(project.project_id), - channel_id: ChannelId(project.channel_id), - _visibility: project.visibility(), - name: project.name.into(), - } - } -} pub struct ChannelStore { pub channel_index: ChannelIndex, channel_invitations: Vec>, channel_participants: HashMap>>, channel_states: HashMap, - hosted_projects: HashMap, - outgoing_invites: HashSet<(ChannelId, UserId)>, update_channels_tx: mpsc::UnboundedSender, opened_buffers: HashMap>, @@ -85,7 +66,6 @@ pub struct ChannelState { observed_notes_version: NotesVersion, observed_chat_message: Option, role: Option, - projects: HashSet, } impl Channel { @@ -216,7 +196,6 @@ impl ChannelStore { channel_invitations: Vec::default(), channel_index: ChannelIndex::default(), channel_participants: Default::default(), - hosted_projects: Default::default(), outgoing_invites: Default::default(), opened_buffers: Default::default(), opened_chats: Default::default(), @@ -316,19 +295,6 @@ impl ChannelStore { self.channel_index.by_id().get(&channel_id) } - pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> { - let mut projects: Vec<(SharedString, ProjectId)> = self - .channel_states - .get(&channel_id) - .map(|state| state.projects.clone()) - .unwrap_or_default() - .into_iter() - .flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id))) - .collect(); - projects.sort(); - projects - } - pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool { if let Some(buffer) = self.opened_buffers.get(&channel_id) { if let OpenedModelHandle::Open(buffer) = buffer { @@ -1102,9 +1068,7 @@ impl ChannelStore { let channels_changed = !payload.channels.is_empty() || !payload.delete_channels.is_empty() || !payload.latest_channel_message_ids.is_empty() - || !payload.latest_channel_buffer_versions.is_empty() - || !payload.hosted_projects.is_empty() - || !payload.deleted_hosted_projects.is_empty(); + || !payload.latest_channel_buffer_versions.is_empty(); if channels_changed { if !payload.delete_channels.is_empty() { @@ -1161,34 +1125,6 @@ impl ChannelStore { .or_default() .update_latest_message_id(latest_channel_message.message_id); } - - for hosted_project in payload.hosted_projects { - let hosted_project: HostedProject = hosted_project.into(); - if let Some(old_project) = self - .hosted_projects - .insert(hosted_project.project_id, hosted_project.clone()) - { - self.channel_states - .entry(old_project.channel_id) - .or_default() - .remove_hosted_project(old_project.project_id); - } - self.channel_states - .entry(hosted_project.channel_id) - .or_default() - .add_hosted_project(hosted_project.project_id); - } - - for hosted_project_id in payload.deleted_hosted_projects { - let hosted_project_id = ProjectId(hosted_project_id); - - if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) { - self.channel_states - .entry(old_project.channel_id) - .or_default() - .remove_hosted_project(old_project.project_id); - } - } } cx.notify(); @@ -1295,12 +1231,4 @@ impl ChannelState { }; } } - - fn add_hosted_project(&mut self, project_id: ProjectId) { - self.projects.insert(project_id); - } - - fn remove_hosted_project(&mut self, project_id: ProjectId) { - self.projects.remove(&project_id); - } } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 9c02e0c801..ef85f91fe1 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -617,7 +617,6 @@ pub struct ChannelsForUser { pub channels: Vec, pub channel_memberships: Vec, pub channel_participants: HashMap>, - pub hosted_projects: Vec, pub invited_channels: Vec, pub observed_buffer_versions: Vec, diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 79523444ab..bfcd111e3f 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -10,7 +10,6 @@ pub mod contacts; pub mod contributors; pub mod embeddings; pub mod extensions; -pub mod hosted_projects; pub mod messages; pub mod notifications; pub mod processed_stripe_events; diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs index f9da0187fe..10120ea814 100644 --- a/crates/collab/src/db/queries/channels.rs +++ b/crates/collab/src/db/queries/channels.rs @@ -615,15 +615,10 @@ impl Database { .observed_channel_messages(&channel_ids, user_id, tx) .await?; - let hosted_projects = self - .get_hosted_projects(&channel_ids, &roles_by_channel_id, tx) - .await?; - Ok(ChannelsForUser { channel_memberships, channels, invited_channels, - hosted_projects, channel_participants, latest_buffer_versions, latest_channel_messages, diff --git a/crates/collab/src/db/queries/hosted_projects.rs b/crates/collab/src/db/queries/hosted_projects.rs deleted file mode 100644 index eb38eaa9cc..0000000000 --- a/crates/collab/src/db/queries/hosted_projects.rs +++ /dev/null @@ -1,85 +0,0 @@ -use rpc::{proto, ErrorCode}; - -use super::*; - -impl Database { - pub async fn get_hosted_projects( - &self, - channel_ids: &[ChannelId], - roles: &HashMap, - tx: &DatabaseTransaction, - ) -> Result> { - let projects = hosted_project::Entity::find() - .find_also_related(project::Entity) - .filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0))) - .all(tx) - .await? - .into_iter() - .flat_map(|(hosted_project, project)| { - if hosted_project.deleted_at.is_some() { - return None; - } - match hosted_project.visibility { - ChannelVisibility::Public => {} - ChannelVisibility::Members => { - let is_visible = roles - .get(&hosted_project.channel_id) - .map(|role| role.can_see_all_descendants()) - .unwrap_or(false); - if !is_visible { - return None; - } - } - }; - Some(proto::HostedProject { - project_id: project?.id.to_proto(), - channel_id: hosted_project.channel_id.to_proto(), - name: hosted_project.name.clone(), - visibility: hosted_project.visibility.into(), - }) - }) - .collect(); - - Ok(projects) - } - - pub async fn get_hosted_project( - &self, - hosted_project_id: HostedProjectId, - user_id: UserId, - tx: &DatabaseTransaction, - ) -> Result<(hosted_project::Model, ChannelRole)> { - let project = hosted_project::Entity::find_by_id(hosted_project_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?; - let channel = channel::Entity::find_by_id(project.channel_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?; - - let role = match project.visibility { - ChannelVisibility::Public => { - self.check_user_is_channel_participant(&channel, user_id, tx) - .await? - } - ChannelVisibility::Members => { - self.check_user_is_channel_member(&channel, user_id, tx) - .await? - } - }; - - Ok((project, role)) - } - - pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result { - self.transaction(|tx| async move { - Ok(project::Entity::find_by_id(project_id) - .one(&*tx) - .await? - .map(|project| project.hosted_project_id.is_some()) - .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 27bec21ca1..9ea42dd9bf 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -68,7 +68,6 @@ impl Database { connection.owner_id as i32, ))), id: ActiveValue::NotSet, - hosted_project_id: ActiveValue::Set(None), } .insert(&*tx) .await?; @@ -536,39 +535,6 @@ impl Database { .await } - /// Adds the given connection to the specified hosted project - pub async fn join_hosted_project( - &self, - id: ProjectId, - user_id: UserId, - connection: ConnectionId, - ) -> Result<(Project, ReplicaId)> { - self.transaction(|tx| async move { - let (project, hosted_project) = project::Entity::find_by_id(id) - .find_also_related(hosted_project::Entity) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("hosted project is no longer shared"))?; - - let Some(hosted_project) = hosted_project else { - return Err(anyhow!("project is not hosted"))?; - }; - - let channel = channel::Entity::find_by_id(hosted_project.channel_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such channel"))?; - - let role = self - .check_user_is_channel_participant(&channel, user_id, &tx) - .await?; - - self.join_project_internal(project, user_id, connection, role, &tx) - .await - }) - .await - } - pub async fn get_project(&self, id: ProjectId) -> Result { self.transaction(|tx| async move { Ok(project::Entity::find_by_id(id) diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index 23dced800b..8a4ec29998 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -18,7 +18,6 @@ pub mod extension; pub mod extension_version; pub mod feature_flag; pub mod follower; -pub mod hosted_project; pub mod language_server; pub mod notification; pub mod notification_kind; diff --git a/crates/collab/src/db/tables/hosted_project.rs b/crates/collab/src/db/tables/hosted_project.rs deleted file mode 100644 index dd7cb1b5b1..0000000000 --- a/crates/collab/src/db/tables/hosted_project.rs +++ /dev/null @@ -1,27 +0,0 @@ -use crate::db::{ChannelId, ChannelVisibility, HostedProjectId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "hosted_projects")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: HostedProjectId, - pub channel_id: ChannelId, - pub name: String, - pub visibility: ChannelVisibility, - pub deleted_at: Option, -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_one = "super::project::Entity")] - Project, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Project.def() - } -} diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index a357634aff..10e3da50e1 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -1,4 +1,4 @@ -use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId}; +use crate::db::{ProjectId, Result, RoomId, ServerId, UserId}; use anyhow::anyhow; use rpc::ConnectionId; use sea_orm::entity::prelude::*; @@ -12,7 +12,6 @@ pub struct Model { pub host_user_id: Option, pub host_connection_id: Option, pub host_connection_server_id: Option, - pub hosted_project_id: Option, } impl Model { @@ -50,12 +49,6 @@ pub enum Relation { Collaborators, #[sea_orm(has_many = "super::language_server::Entity")] LanguageServers, - #[sea_orm( - belongs_to = "super::hosted_project::Entity", - from = "Column::HostedProjectId", - to = "super::hosted_project::Column::Id" - )] - HostedProject, } impl Related for Entity { @@ -88,10 +81,4 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::HostedProject.def() - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index d091f04326..0b90bfa0c9 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -287,7 +287,6 @@ impl Server { .add_request_handler(share_project) .add_message_handler(unshare_project) .add_request_handler(join_project) - .add_request_handler(join_hosted_project) .add_message_handler(leave_project) .add_request_handler(update_project) .add_request_handler(update_worktree) @@ -1795,11 +1794,6 @@ impl JoinProjectInternalResponse for Response { Response::::send(self, result) } } -impl JoinProjectInternalResponse for Response { - fn send(self, result: proto::JoinProjectResponse) -> Result<()> { - Response::::send(self, result) - } -} fn join_project_internal( response: impl JoinProjectInternalResponse, @@ -1923,11 +1917,6 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result let sender_id = session.connection_id; let project_id = ProjectId::from_proto(request.project_id); let db = session.db().await; - if db.is_hosted_project(project_id).await? { - let project = db.leave_hosted_project(project_id, sender_id).await?; - project_left(&project, &session); - return Ok(()); - } let (room, project) = &*db.leave_project(project_id, sender_id).await?; tracing::info!( @@ -1943,24 +1932,6 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result Ok(()) } -async fn join_hosted_project( - request: proto::JoinHostedProject, - response: Response, - session: Session, -) -> Result<()> { - let (mut project, replica_id) = session - .db() - .await - .join_hosted_project( - ProjectId(request.project_id as i32), - session.user_id(), - session.connection_id, - ) - .await?; - - join_project_internal(response, session, &mut project, &replica_id) -} - /// Updates other participants with changes to the project async fn update_project( request: proto::UpdateProject, @@ -4202,7 +4173,6 @@ fn build_channels_update(channels: ChannelsForUser) -> proto::UpdateChannels { update.channel_invitations.push(channel.to_proto()); } - update.hosted_projects = channels.hosted_projects; update } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 59f83e0654..f188aaf921 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal; use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings}; use call::ActiveCall; use channel::{Channel, ChannelEvent, ChannelStore}; -use client::{ChannelId, Client, Contact, ProjectId, User, UserStore}; +use client::{ChannelId, Client, Contact, User, UserStore}; use contact_finder::ContactFinder; use db::kvp::KEY_VALUE_STORE; use editor::{Editor, EditorElement, EditorStyle}; @@ -182,10 +182,6 @@ enum ListEntry { ChannelEditor { depth: usize, }, - HostedProject { - id: ProjectId, - name: SharedString, - }, Contact { contact: Arc, calling: bool, @@ -566,7 +562,6 @@ impl CollabPanel { } } - let hosted_projects = channel_store.projects_for_id(channel.id); let has_children = channel_store .channel_at_index(mat.candidate_id + 1) .map_or(false, |next_channel| { @@ -600,10 +595,6 @@ impl CollabPanel { }); } } - - for (name, id) in hosted_projects { - self.entries.push(ListEntry::HostedProject { id, name }); - } } } @@ -1029,40 +1020,6 @@ impl CollabPanel { .tooltip(move |cx| Tooltip::text("Open Chat", cx)) } - fn render_channel_project( - &self, - id: ProjectId, - name: &SharedString, - is_selected: bool, - cx: &mut ViewContext, - ) -> impl IntoElement { - ListItem::new(ElementId::NamedInteger( - "channel-project".into(), - id.0 as usize, - )) - .indent_level(2) - .indent_step_size(px(20.)) - .selected(is_selected) - .on_click(cx.listener(move |this, _, cx| { - if let Some(workspace) = this.workspace.upgrade() { - let app_state = workspace.read(cx).app_state().clone(); - workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err( - "Failed to open project", - cx, - |_, _| None, - ) - } - })) - .start_slot( - h_flex() - .relative() - .gap_1() - .child(IconButton::new(0, IconName::FileTree)), - ) - .child(Label::new(name.clone())) - .tooltip(move |cx| Tooltip::text("Open Project", cx)) - } - fn has_subchannels(&self, ix: usize) -> bool { self.entries.get(ix).map_or(false, |entry| { if let ListEntry::Channel { has_children, .. } = entry { @@ -1538,12 +1495,6 @@ impl CollabPanel { ListEntry::ChannelChat { channel_id } => { self.join_channel_chat(*channel_id, cx) } - ListEntry::HostedProject { - id: _id, - name: _name, - } => { - // todo() - } ListEntry::OutgoingRequest(_) => {} ListEntry::ChannelEditor { .. } => {} } @@ -2157,10 +2108,6 @@ impl CollabPanel { ListEntry::ChannelChat { channel_id } => self .render_channel_chat(*channel_id, is_selected, cx) .into_any_element(), - - ListEntry::HostedProject { id, name } => self - .render_channel_project(*id, name, is_selected, cx) - .into_any_element(), } } @@ -2898,11 +2845,6 @@ impl PartialEq for ListEntry { return channel_1.id == channel_2.id; } } - ListEntry::HostedProject { id, .. } => { - if let ListEntry::HostedProject { id: other_id, .. } = other { - return id == other_id; - } - } ListEntry::ChannelNotes { channel_id } => { if let ListEntry::ChannelNotes { channel_id: other_id, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b2fc8c5304..49f4b7c6f3 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -24,9 +24,7 @@ mod yarn; use anyhow::{anyhow, Context as _, Result}; use buffer_store::{BufferStore, BufferStoreEvent}; -use client::{ - proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore, -}; +use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore}; use clock::ReplicaId; use collections::{BTreeSet, HashMap, HashSet}; use debounced_delay::DebouncedDelay; @@ -154,7 +152,6 @@ pub struct Project { remotely_created_models: Arc>, terminals: Terminals, node: Option, - hosted_project_id: Option, search_history: SearchHistory, search_included_history: SearchHistory, search_excluded_history: SearchHistory, @@ -678,7 +675,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - hosted_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), @@ -796,7 +792,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - hosted_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), @@ -993,7 +988,6 @@ impl Project { local_handles: Vec::new(), }, node: None, - hosted_project_id: None, search_history: Self::new_search_history(), search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), @@ -1045,47 +1039,6 @@ impl Project { Ok(this) } - pub async fn hosted( - remote_id: ProjectId, - user_store: Model, - client: Arc, - languages: Arc, - fs: Arc, - cx: AsyncAppContext, - ) -> Result> { - client.authenticate_and_connect(true, &cx).await?; - - let subscriptions = [ - EntitySubscription::Project(client.subscribe_to_entity::(remote_id.0)?), - EntitySubscription::BufferStore( - client.subscribe_to_entity::(remote_id.0)?, - ), - EntitySubscription::WorktreeStore( - client.subscribe_to_entity::(remote_id.0)?, - ), - EntitySubscription::LspStore(client.subscribe_to_entity::(remote_id.0)?), - EntitySubscription::SettingsObserver( - client.subscribe_to_entity::(remote_id.0)?, - ), - ]; - let response = client - .request_envelope(proto::JoinHostedProject { - project_id: remote_id.0, - }) - .await?; - Self::from_join_project_response( - response, - subscriptions, - client, - true, - user_store, - languages, - fs, - cx, - ) - .await - } - fn new_search_history() -> SearchHistory { SearchHistory::new( Some(MAX_PROJECT_SEARCH_HISTORY_SIZE), @@ -1290,10 +1243,6 @@ impl Project { } } - pub fn hosted_project_id(&self) -> Option { - self.hosted_project_id - } - pub fn supports_terminal(&self, _cx: &AppContext) -> bool { if self.is_local() { return true; diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index c61a14cdbf..53aaa6ef6d 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -196,8 +196,6 @@ message Envelope { GetImplementation get_implementation = 162; GetImplementationResponse get_implementation_response = 163; - JoinHostedProject join_hosted_project = 164; - CountLanguageModelTokens count_language_model_tokens = 230; CountLanguageModelTokensResponse count_language_model_tokens_response = 231; GetCachedEmbeddings get_cached_embeddings = 189; @@ -292,6 +290,7 @@ message Envelope { reserved 87 to 88; reserved 158 to 161; + reserved 164; reserved 166 to 169; reserved 177 to 185; reserved 188; @@ -523,11 +522,6 @@ message JoinProject { uint64 project_id = 1; } -message JoinHostedProject { - uint64 project_id = 1; -} - - message ListRemoteDirectory { uint64 dev_server_id = 1; string path = 2; @@ -1294,13 +1288,7 @@ message UpdateChannels { repeated ChannelMessageId latest_channel_message_ids = 8; repeated ChannelBufferVersion latest_channel_buffer_versions = 9; - repeated HostedProject hosted_projects = 10; - repeated uint64 deleted_hosted_projects = 11; - - reserved 12; - reserved 13; - reserved 14; - reserved 15; + reserved 10 to 15; } message UpdateUserChannels { @@ -1329,13 +1317,6 @@ message ChannelParticipants { repeated uint64 participant_user_ids = 2; } -message HostedProject { - uint64 project_id = 1; - uint64 channel_id = 2; - string name = 3; - ChannelVisibility visibility = 4; -} - message JoinChannel { uint64 channel_id = 1; } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 3807e04bd5..a7140cc7ed 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -228,7 +228,6 @@ messages!( (JoinChannelChat, Foreground), (JoinChannelChatResponse, Foreground), (JoinProject, Foreground), - (JoinHostedProject, Foreground), (JoinProjectResponse, Foreground), (JoinRoom, Foreground), (JoinRoomResponse, Foreground), @@ -411,7 +410,6 @@ request_messages!( (JoinChannel, JoinRoomResponse), (JoinChannelBuffer, JoinChannelBufferResponse), (JoinChannelChat, JoinChannelChatResponse), - (JoinHostedProject, JoinProjectResponse), (JoinProject, JoinProjectResponse), (JoinRoom, JoinRoomResponse), (LeaveChannelBuffer, Ack), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a81174020b..b92417b293 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -16,7 +16,7 @@ use anyhow::{anyhow, Context as _, Result}; use call::{call_settings::CallSettings, ActiveCall}; use client::{ proto::{self, ErrorCode, PanelId, PeerId}, - ChannelId, Client, ErrorExt, ProjectId, Status, TypedEnvelope, UserStore, + ChannelId, Client, ErrorExt, Status, TypedEnvelope, UserStore, }; use collections::{hash_map, HashMap, HashSet}; use derive_more::{Deref, DerefMut}; @@ -5469,58 +5469,6 @@ pub fn create_and_open_local_file( }) } -pub fn join_hosted_project( - hosted_project_id: ProjectId, - app_state: Arc, - cx: &mut AppContext, -) -> Task> { - cx.spawn(|mut cx| async move { - let existing_window = cx.update(|cx| { - cx.windows().into_iter().find_map(|window| { - let workspace = window.downcast::()?; - workspace - .read(cx) - .is_ok_and(|workspace| { - workspace.project().read(cx).hosted_project_id() == Some(hosted_project_id) - }) - .then_some(workspace) - }) - })?; - - let workspace = if let Some(existing_window) = existing_window { - existing_window - } else { - let project = Project::hosted( - hosted_project_id, - app_state.user_store.clone(), - app_state.client.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx.clone(), - ) - .await?; - - let window_bounds_override = window_bounds_env_override(); - cx.update(|cx| { - let mut options = (app_state.build_window_options)(None, cx); - options.window_bounds = window_bounds_override.map(WindowBounds::Windowed); - cx.open_window(options, |cx| { - cx.new_view(|cx| { - Workspace::new(Default::default(), project, app_state.clone(), cx) - }) - }) - })?? - }; - - workspace.update(&mut cx, |_, cx| { - cx.activate(true); - cx.activate_window(); - })?; - - Ok(()) - }) -} - pub fn open_ssh_project( window: WindowHandle, connection_options: SshConnectionOptions, From e86b096b92a01361b96267c58825aca92fd02477 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 28 Oct 2024 09:45:19 +0100 Subject: [PATCH 10/87] docs: Add `indent_guides` setting to project panel docs (#19819) Follow up to #18260 Release Notes: - N/A --- docs/src/configuring-zed.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 78c7c62c94..784cb631ca 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2042,6 +2042,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "folder_icons": true, "git_status": true, "indent_size": 20, + "indent_guides": true, "auto_reveal_entries": true, "auto_fold_dirs": true, "scrollbar": { @@ -2163,6 +2164,12 @@ Run the `theme selector: toggle` action in the command palette to see a current - Setting: `indent_size` - Default: `20` +### Indent Guides + +- Description: Whether to show indent guides in the project panel. +- Setting: `indent_guides` +- Default: `true` + ### Scrollbar - Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. From 888fec9299b4e36c37f6f6144345def1b4e43425 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 28 Oct 2024 09:54:18 +0100 Subject: [PATCH 11/87] outline panel: Add indent guides (#19719) See #12673 | File | Search | |--------|--------| | image | image | Release Notes: - Added indent guides to the outline panel --- Cargo.lock | 2 + assets/settings/default.json | 2 + crates/gpui/src/elements/uniform_list.rs | 2 + crates/outline_panel/Cargo.toml | 2 + crates/outline_panel/src/outline_panel.rs | 122 +++++++++++- .../src/outline_panel_settings.rs | 5 + crates/ui/src/components/indent_guides.rs | 186 +++++++++++------- docs/src/configuring-zed.md | 1 + 8 files changed, 236 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c81f692ee..91b76f33e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7728,8 +7728,10 @@ dependencies = [ "serde", "serde_json", "settings", + "smallvec", "smol", "theme", + "ui", "util", "workspace", "worktree", diff --git a/assets/settings/default.json b/assets/settings/default.json index 32f46ce714..cd4e3db15c 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -388,6 +388,8 @@ "git_status": true, // Amount of indentation for nested items. "indent_size": 20, + // Whether to show indent guides in the outline panel. + "indent_guides": true, // Whether to reveal it in the outline panel automatically, // when a corresponding outline entry becomes active. // Gitignored entries are never auto revealed. diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 9ce85aab23..2379ee9f81 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -340,6 +340,7 @@ impl Element for UniformList { visible_range.clone(), bounds, item_height, + self.item_count, cx, ); let available_space = size( @@ -396,6 +397,7 @@ pub trait UniformListDecoration { visible_range: Range, bounds: Bounds, item_height: Pixels, + item_count: usize, cx: &mut WindowContext, ) -> AnyElement; } diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index 824ea70735..be7653db68 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -30,8 +30,10 @@ search.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +smallvec.workspace = true smol.workspace = true theme.workspace = true +ui.workspace = true util.workspace = true worktree.workspace = true workspace.workspace = true diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 72b97c8f69..6def76bb38 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -24,12 +24,12 @@ use editor::{ use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, - AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, - EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, - KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, - SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, - VisualContext, WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, + AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, + Div, ElementId, EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, + IntoElement, KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, + Render, SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, + ViewContext, VisualContext, WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; @@ -42,6 +42,7 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; use smol::channel; use theme::{SyntaxTheme, ThemeSettings}; +use ui::{IndentGuideColors, IndentGuideLayout}; use util::{debug_panic, RangeExt, ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, @@ -254,14 +255,14 @@ impl SearchState { #[derive(Debug)] enum SelectedEntry { Invalidated(Option), - Valid(PanelEntry), + Valid(PanelEntry, usize), None, } impl SelectedEntry { fn invalidate(&mut self) { match std::mem::replace(self, SelectedEntry::None) { - Self::Valid(entry) => *self = Self::Invalidated(Some(entry)), + Self::Valid(entry, _) => *self = Self::Invalidated(Some(entry)), Self::None => *self = Self::Invalidated(None), other => *self = other, } @@ -3568,7 +3569,7 @@ impl OutlinePanel { fn selected_entry(&self) -> Option<&PanelEntry> { match &self.selected_entry { SelectedEntry::Invalidated(entry) => entry.as_ref(), - SelectedEntry::Valid(entry) => Some(entry), + SelectedEntry::Valid(entry, _) => Some(entry), SelectedEntry::None => None, } } @@ -3577,7 +3578,16 @@ impl OutlinePanel { if focus { self.focus_handle.focus(cx); } - self.selected_entry = SelectedEntry::Valid(entry); + let ix = self + .cached_entries + .iter() + .enumerate() + .find(|(_, cached_entry)| &cached_entry.entry == &entry) + .map(|(i, _)| i) + .unwrap_or_default(); + + self.selected_entry = SelectedEntry::Valid(entry, ix); + self.autoscroll(cx); cx.notify(); } @@ -3736,6 +3746,9 @@ impl Render for OutlinePanel { let project = self.project.read(cx); let query = self.query(cx); let pinned = self.pinned; + let settings = OutlinePanelSettings::get_global(cx); + let indent_size = settings.indent_size; + let show_indent_guides = settings.indent_guides; let outline_panel = v_flex() .id("outline-panel") @@ -3901,6 +3914,61 @@ impl Render for OutlinePanel { }) .size_full() .track_scroll(self.scroll_handle.clone()) + .when(show_indent_guides, |list| { + list.with_decoration( + ui::indent_guides( + cx.view().clone(), + px(indent_size), + IndentGuideColors::panel(cx), + |outline_panel, range, _| { + let entries = outline_panel.cached_entries.get(range); + if let Some(entries) = entries { + entries.into_iter().map(|item| item.depth).collect() + } else { + smallvec::SmallVec::new() + } + }, + ) + .with_render_fn( + cx.view().clone(), + move |outline_panel, params, _| { + const LEFT_OFFSET: f32 = 14.; + + let indent_size = params.indent_size; + let item_height = params.item_height; + let active_indent_guide_ix = find_active_indent_guide_ix( + outline_panel, + ¶ms.indent_guides, + ); + + params + .indent_guides + .into_iter() + .enumerate() + .map(|(ix, layout)| { + let bounds = Bounds::new( + point( + px(layout.offset.x as f32) * indent_size + + px(LEFT_OFFSET), + px(layout.offset.y as f32) * item_height, + ), + size( + px(1.), + px(layout.length as f32) * item_height, + ), + ); + ui::RenderedIndentGuide { + bounds, + layout, + is_active: active_indent_guide_ix == Some(ix), + hitbox: None, + } + }) + .collect() + }, + ), + ) + }) }) } .children(self.context_menu.as_ref().map(|(menu, position, _)| { @@ -3945,6 +4013,40 @@ impl Render for OutlinePanel { } } +fn find_active_indent_guide_ix( + outline_panel: &OutlinePanel, + candidates: &[IndentGuideLayout], +) -> Option { + let SelectedEntry::Valid(_, target_ix) = &outline_panel.selected_entry else { + return None; + }; + let target_depth = outline_panel + .cached_entries + .get(*target_ix) + .map(|cached_entry| cached_entry.depth)?; + + let (target_ix, target_depth) = if let Some(target_depth) = outline_panel + .cached_entries + .get(target_ix + 1) + .filter(|cached_entry| cached_entry.depth > target_depth) + .map(|entry| entry.depth) + { + (target_ix + 1, target_depth.saturating_sub(1)) + } else { + (*target_ix, target_depth.saturating_sub(1)) + }; + + candidates + .iter() + .enumerate() + .find(|(_, guide)| { + guide.offset.y <= target_ix + && target_ix < guide.offset.y + guide.length + && guide.offset.x == target_depth + }) + .map(|(ix, _)| ix) +} + fn subscribe_for_editor_events( editor: &View, cx: &mut ViewContext, diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index e19fc3c008..e165978fc7 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -19,6 +19,7 @@ pub struct OutlinePanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, + pub indent_guides: bool, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, } @@ -53,6 +54,10 @@ pub struct OutlinePanelSettingsContent { /// /// Default: 20 pub indent_size: Option, + /// Whether to show indent guides in the outline panel. + /// + /// Default: true + pub indent_guides: Option, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. diff --git a/crates/ui/src/components/indent_guides.rs b/crates/ui/src/components/indent_guides.rs index e45404429c..caab92053c 100644 --- a/crates/ui/src/components/indent_guides.rs +++ b/crates/ui/src/components/indent_guides.rs @@ -140,13 +140,18 @@ mod uniform_list { visible_range: Range, bounds: Bounds, item_height: Pixels, + item_count: usize, cx: &mut WindowContext, ) -> AnyElement { let mut visible_range = visible_range.clone(); - visible_range.end += 1; + let includes_trailing_indent = visible_range.end < item_count; + // Check if we have entries after the visible range, + // if so extend the visible range so we can fetch a trailing indent, + // which is needed to compute indent guides correctly. + if includes_trailing_indent { + visible_range.end += 1; + } let visible_entries = &(self.compute_indents_fn)(visible_range.clone(), cx); - // Check if we have an additional indent that is outside of the visible range - let includes_trailing_indent = visible_entries.len() == visible_range.len(); let indent_guides = compute_indent_guides( &visible_entries, visible_range.start, @@ -198,8 +203,12 @@ mod uniform_list { on_hovered_indent_guide_click: Option>, } - struct IndentGuidesElementPrepaintState { - hitboxes: SmallVec<[Hitbox; 12]>, + enum IndentGuidesElementPrepaintState { + Static, + Interactive { + hitboxes: Rc>, + on_hovered_indent_guide_click: Rc, + }, } impl Element for IndentGuidesElement { @@ -225,11 +234,21 @@ mod uniform_list { _request_layout: &mut Self::RequestLayoutState, cx: &mut WindowContext, ) -> Self::PrepaintState { - let mut hitboxes = SmallVec::new(); - for guide in self.indent_guides.as_ref().iter() { - hitboxes.push(cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)); + if let Some(on_hovered_indent_guide_click) = self.on_hovered_indent_guide_click.clone() + { + let hitboxes = self + .indent_guides + .as_ref() + .iter() + .map(|guide| cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)) + .collect(); + Self::PrepaintState::Interactive { + hitboxes: Rc::new(hitboxes), + on_hovered_indent_guide_click, + } + } else { + Self::PrepaintState::Static } - Self::PrepaintState { hitboxes } } fn paint( @@ -240,81 +259,96 @@ mod uniform_list { prepaint: &mut Self::PrepaintState, cx: &mut WindowContext, ) { - let callback = self.on_hovered_indent_guide_click.clone(); - if let Some(callback) = callback { - cx.on_mouse_event({ - let hitboxes = prepaint.hitboxes.clone(); - let indent_guides = self.indent_guides.clone(); - move |event: &MouseDownEvent, phase, cx| { - if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { - let mut active_hitbox_ix = None; - for (i, hitbox) in hitboxes.iter().enumerate() { + match prepaint { + IndentGuidesElementPrepaintState::Static => { + for indent_guide in self.indent_guides.as_ref() { + let fill_color = if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); + } + } + IndentGuidesElementPrepaintState::Interactive { + hitboxes, + on_hovered_indent_guide_click, + } => { + cx.on_mouse_event({ + let hitboxes = hitboxes.clone(); + let indent_guides = self.indent_guides.clone(); + let on_hovered_indent_guide_click = on_hovered_indent_guide_click.clone(); + move |event: &MouseDownEvent, phase, cx| { + if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { + let mut active_hitbox_ix = None; + for (i, hitbox) in hitboxes.iter().enumerate() { + if hitbox.is_hovered(cx) { + active_hitbox_ix = Some(i); + break; + } + } + + let Some(active_hitbox_ix) = active_hitbox_ix else { + return; + }; + + let active_indent_guide = &indent_guides[active_hitbox_ix].layout; + on_hovered_indent_guide_click(active_indent_guide, cx); + + cx.stop_propagation(); + cx.prevent_default(); + } + } + }); + let mut hovered_hitbox_id = None; + for (i, hitbox) in hitboxes.iter().enumerate() { + cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); + let indent_guide = &self.indent_guides[i]; + let fill_color = if hitbox.is_hovered(cx) { + hovered_hitbox_id = Some(hitbox.id); + self.colors.hover + } else if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); + } + + cx.on_mouse_event({ + let prev_hovered_hitbox_id = hovered_hitbox_id; + let hitboxes = hitboxes.clone(); + move |_: &MouseMoveEvent, phase, cx| { + let mut hovered_hitbox_id = None; + for hitbox in hitboxes.as_ref() { if hitbox.is_hovered(cx) { - active_hitbox_ix = Some(i); + hovered_hitbox_id = Some(hitbox.id); break; } } - - let Some(active_hitbox_ix) = active_hitbox_ix else { - return; - }; - - let active_indent_guide = &indent_guides[active_hitbox_ix].layout; - callback(active_indent_guide, cx); - - cx.stop_propagation(); - cx.prevent_default(); - } - } - }); - } - - let mut hovered_hitbox_id = None; - for (i, hitbox) in prepaint.hitboxes.iter().enumerate() { - cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); - let indent_guide = &self.indent_guides[i]; - let fill_color = if hitbox.is_hovered(cx) { - hovered_hitbox_id = Some(hitbox.id); - self.colors.hover - } else if indent_guide.is_active { - self.colors.active - } else { - self.colors.default - }; - - cx.paint_quad(fill(indent_guide.bounds, fill_color)); - } - - cx.on_mouse_event({ - let prev_hovered_hitbox_id = hovered_hitbox_id; - let hitboxes = prepaint.hitboxes.clone(); - move |_: &MouseMoveEvent, phase, cx| { - let mut hovered_hitbox_id = None; - for hitbox in &hitboxes { - if hitbox.is_hovered(cx) { - hovered_hitbox_id = Some(hitbox.id); - break; - } - } - if phase == DispatchPhase::Capture { - // If the hovered hitbox has changed, we need to re-paint the indent guides. - match (prev_hovered_hitbox_id, hovered_hitbox_id) { - (Some(prev_id), Some(id)) => { - if prev_id != id { - cx.refresh(); + if phase == DispatchPhase::Capture { + // If the hovered hitbox has changed, we need to re-paint the indent guides. + match (prev_hovered_hitbox_id, hovered_hitbox_id) { + (Some(prev_id), Some(id)) => { + if prev_id != id { + cx.refresh(); + } + } + (None, Some(_)) => { + cx.refresh(); + } + (Some(_), None) => { + cx.refresh(); + } + (None, None) => {} } } - (None, Some(_)) => { - cx.refresh(); - } - (Some(_), None) => { - cx.refresh(); - } - (None, None) => {} } - } + }); } - }); + } } } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 784cb631ca..f149fa5cf1 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2237,6 +2237,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "folder_icons": true, "git_status": true, "indent_size": 20, + "indent_guides": true, "auto_reveal_entries": true, "auto_fold_dirs": true, } From 2ab0b3b81976367961048e75d93a029cebc866d8 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 11:02:46 +0100 Subject: [PATCH 12/87] remote server: Fix language servers not starting (#19821) PR #19653 change the code in this diff, which lead to the remote_server binary trying to load language grammars, which in turn failed, and stopped languages from being loaded correctly. That then lead to language servers not starting up. This change reintroduces what #19653 removed, so that we don't load the grammar on the remote_server, by ignoring the grammar name from the config. The tests still all work. Release Notes: - N/A Co-authored-by: Bennet --- crates/languages/src/lib.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 7e8c09c8ad..03c4735d6d 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -288,6 +288,15 @@ fn load_config(name: &str) -> LanguageConfig { .with_context(|| format!("failed to load config.toml for language {name:?}")) .unwrap(); + #[cfg(not(feature = "load-grammars"))] + { + config = LanguageConfig { + name: config.name, + matcher: config.matcher, + ..Default::default() + } + } + config } From 177dfdf9002fcfc32e8356102879373e4460a1f5 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 28 Oct 2024 14:53:40 +0200 Subject: [PATCH 13/87] Declare RUSTFLAGS env var for all CI jobs (#19826) Follow-up of https://github.com/zed-industries/zed/pull/19149 Makes RUSTFLAGS propagation uniform, to ensure all `cargo ...` jobs get the same RUSTFLAGS env set. Release Notes: - N/A --- .github/workflows/ci.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ba475f88ab..dc38baeae9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,6 +25,7 @@ env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: 0 RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" jobs: migration_checks: @@ -116,13 +117,13 @@ jobs: uses: ./.github/actions/run_tests - name: Build collab - run: RUSTFLAGS="-D warnings" cargo build -p collab + run: cargo build -p collab - name: Build other binaries and features run: | - RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features + cargo build --workspace --bins --all-features cargo check -p gpui --features "macos-blade" - RUSTFLAGS="-D warnings" cargo build -p remote_server + cargo build -p remote_server linux_tests: timeout-minutes: 60 @@ -155,7 +156,7 @@ jobs: uses: ./.github/actions/run_tests - name: Build Zed - run: RUSTFLAGS="-D warnings" cargo build -p zed + run: cargo build -p zed build_remote_server: timeout-minutes: 60 @@ -182,7 +183,7 @@ jobs: run: ./script/remote-server && ./script/install-mold 2.34.0 - name: Build Remote Server - run: RUSTFLAGS="-D warnings" cargo build -p remote_server + run: cargo build -p remote_server # todo(windows): Actually run the tests windows_tests: @@ -207,7 +208,7 @@ jobs: run: cargo xtask clippy - name: Build Zed - run: $env:RUSTFLAGS="-D warnings"; cargo build + run: cargo build bundle-mac: timeout-minutes: 60 From 03bd95405b3398691c64d16b02783b6dce214c18 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 14:14:51 +0100 Subject: [PATCH 14/87] docs: Add diagram to remote development docs (#19827) Release Notes: - N/A --- docs/src/remote-development.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 708d0e0b39..9dc1777f39 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -8,6 +8,10 @@ Remote Development allows you to code at the speed of thought, even when your co Remote development requires two computers, your local machine that runs the Zed UI and the remote server which runs a Zed headless server. The two communicate over SSH, so you will need to be able to SSH from your local machine into the remote server to use this feature. +![Architectural overview of Zed Remote Development](https://zed.dev/img/remote-development/diagram.png) + +On your local machine, Zed runs its UI, talks to language models, uses Tree-sitter to parse and syntax-highlight code, and store unsaved changes and recent projects. The source code, language servers, tasks, and the terminal all run on the remote server. + > **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use that mode. ## Setup @@ -15,7 +19,7 @@ Remote development requires two computers, your local machine that runs the Zed 1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). You need at least Zed v0.159. 1. Open the remote projects dialogue with cmd-shift-p remote or cmd-control-o. 1. Click "Connect New Server" and enter the command you use to SSH into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass. -1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, it will download the latest version of the Zed server and upload it to the remote over SSH. +1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, Zed will download the server on the remote host and start it. 1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server. > **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos. From cdddb4d3603691406ac846f97bc66699c3dc093a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 28 Oct 2024 15:34:03 +0100 Subject: [PATCH 15/87] Add language toolchains (#19576) This PR adds support for selecting toolchains for a given language (e.g. Rust toolchains or Python virtual environments) with support for SSH projects provided out of the box. For Python we piggy-back off of [PET](https://github.com/microsoft/python-environment-tools), a library maintained by Microsoft. Closes #16421 Closes #7646 Release Notes: - Added toolchain selector to the status bar (with initial support for Python virtual environments) --- .github/workflows/ci.yml | 3 + Cargo.lock | 493 ++++++++++++++++-- Cargo.toml | 7 + assets/settings/default.json | 1 + crates/extension/src/extension_lsp_adapter.rs | 4 +- crates/extension/src/extension_store.rs | 23 +- crates/language/src/language.rs | 16 +- crates/language/src/language_registry.rs | 63 ++- crates/language/src/toolchain.rs | 65 +++ crates/languages/Cargo.toml | 5 + crates/languages/src/json.rs | 5 +- crates/languages/src/lib.rs | 55 +- crates/languages/src/python.rs | 115 +++- crates/languages/src/tailwind.rs | 3 +- crates/languages/src/typescript.rs | 4 +- crates/languages/src/vtsls.rs | 3 +- crates/languages/src/yaml.rs | 4 +- crates/project/src/lsp_store.rs | 135 +++-- crates/project/src/project.rs | 65 ++- crates/project/src/toolchain_store.rs | 416 +++++++++++++++ crates/proto/proto/zed.proto | 49 +- crates/proto/src/proto.rs | 17 +- crates/remote_server/src/headless_project.rs | 7 +- crates/toolchain_selector/Cargo.toml | 24 + crates/toolchain_selector/LICENSE-GPL | 1 + .../src/active_toolchain.rs | 173 ++++++ .../src/toolchain_selector.rs | 343 ++++++++++++ crates/workspace/src/persistence.rs | 96 +++- crates/workspace/src/workspace.rs | 16 + crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + crates/zed/src/zed.rs | 3 + script/licenses/zed-licenses.toml | 138 +++++ 33 files changed, 2221 insertions(+), 133 deletions(-) create mode 100644 crates/language/src/toolchain.rs create mode 100644 crates/project/src/toolchain_store.rs create mode 100644 crates/toolchain_selector/Cargo.toml create mode 120000 crates/toolchain_selector/LICENSE-GPL create mode 100644 crates/toolchain_selector/src/active_toolchain.rs create mode 100644 crates/toolchain_selector/src/toolchain_selector.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dc38baeae9..84ed0dd5d4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -192,6 +192,9 @@ jobs: if: github.repository_owner == 'zed-industries' runs-on: hosted-windows-1 steps: + # more info here:- https://github.com/rust-lang/cargo/issues/13020 + - name: Enable longer pathnames for git + run: git config --system core.longpaths true - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: diff --git a/Cargo.lock b/Cargo.lock index 91b76f33e8..bd9ad91bf7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -291,6 +291,12 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "arraydeque" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" + [[package]] name = "arrayref" version = "0.3.8" @@ -385,7 +391,7 @@ dependencies = [ "ctor", "db", "editor", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "futures 0.3.30", @@ -2551,7 +2557,7 @@ dependencies = [ "dashmap 6.0.1", "derive_more", "editor", - "env_logger", + "env_logger 0.11.5", "envy", "file_finder", "fs", @@ -2706,7 +2712,7 @@ dependencies = [ "command_palette_hooks", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "fuzzy", "go_to_line", "gpui", @@ -3483,7 +3489,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -3671,7 +3677,7 @@ dependencies = [ "ctor", "db", "emojis", - "env_logger", + "env_logger 0.11.5", "file_icons", "futures 0.3.30", "fuzzy", @@ -3877,6 +3883,19 @@ dependencies = [ "regex", ] +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + [[package]] name = "env_logger" version = "0.11.5" @@ -3985,7 +4004,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "git", @@ -4080,7 +4099,7 @@ dependencies = [ "client", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "gpui", @@ -4122,7 +4141,7 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", - "env_logger", + "env_logger 0.11.5", "extension", "fs", "language", @@ -4281,7 +4300,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "file_icons", "futures 0.3.30", "fuzzy", @@ -5036,7 +5055,7 @@ dependencies = [ "ctor", "derive_more", "embed-resource", - "env_logger", + "env_logger 0.11.5", "etagere", "filedescriptor", "flume", @@ -5226,6 +5245,15 @@ dependencies = [ "serde", ] +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hashlink" version = "0.9.1" @@ -6184,7 +6212,7 @@ dependencies = [ "collections", "ctor", "ec4rs", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "fuzzy", "git", @@ -6241,7 +6269,7 @@ dependencies = [ "copilot", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "feature_flags", "futures 0.3.30", "google_ai", @@ -6298,7 +6326,7 @@ dependencies = [ "collections", "copilot", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -6332,6 +6360,11 @@ dependencies = [ "lsp", "node_runtime", "paths", + "pet", + "pet-conda", + "pet-core", + "pet-poetry", + "pet-reporter", "project", "regex", "rope", @@ -6628,7 +6661,7 @@ dependencies = [ "async-pipe", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "log", @@ -6711,7 +6744,7 @@ version = "0.1.0" dependencies = [ "anyhow", "assets", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -6824,7 +6857,7 @@ dependencies = [ "clap", "clap_complete", "elasticlunr-rs", - "env_logger", + "env_logger 0.11.5", "futures-util", "handlebars 5.1.2", "ignore", @@ -7006,6 +7039,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "msvc_spectre_libs" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8661ace213a0a130c7c5b9542df5023aedf092a02008ccf477b39ff108990305" +dependencies = [ + "cc", +] + [[package]] name = "multi_buffer" version = "0.1.0" @@ -7014,7 +7056,7 @@ dependencies = [ "clock", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "itertools 0.13.0", @@ -7974,6 +8016,366 @@ dependencies = [ "sha2", ] +[[package]] +name = "pet" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "clap", + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-env-var-path", + "pet-fs", + "pet-global-virtualenvs", + "pet-homebrew", + "pet-jsonrpc", + "pet-linux-global-python", + "pet-mac-commandlinetools", + "pet-mac-python-org", + "pet-mac-xcode", + "pet-pipenv", + "pet-poetry", + "pet-pyenv", + "pet-python-utils", + "pet-reporter", + "pet-telemetry", + "pet-venv", + "pet-virtualenv", + "pet-virtualenvwrapper", + "pet-windows-registry", + "pet-windows-store", + "serde", + "serde_json", +] + +[[package]] +name = "pet-conda" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "regex", + "serde", + "serde_json", + "yaml-rust2", +] + +[[package]] +name = "pet-core" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "clap", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-fs", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-env-var-path" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", +] + +[[package]] +name = "pet-fs" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", +] + +[[package]] +name = "pet-global-virtualenvs" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-virtualenv", +] + +[[package]] +name = "pet-homebrew" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-jsonrpc" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "log", + "msvc_spectre_libs", + "pet-core", + "serde", + "serde_json", +] + +[[package]] +name = "pet-linux-global-python" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-commandlinetools" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-python-org" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-xcode" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-pipenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-poetry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "base64 0.22.1", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "pet-virtualenv", + "regex", + "serde", + "serde_json", + "sha2", + "toml 0.8.19", +] + +[[package]] +name = "pet-pyenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-python-utils" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "regex", + "serde", + "serde_json", + "sha2", +] + +[[package]] +name = "pet-reporter" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-jsonrpc", + "serde", + "serde_json", +] + +[[package]] +name = "pet-telemetry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "regex", +] + +[[package]] +name = "pet-venv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-virtualenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", +] + +[[package]] +name = "pet-virtualenvwrapper" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-windows-registry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "pet-windows-store", + "regex", + "winreg 0.52.0", +] + +[[package]] +name = "pet-windows-store" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", + "winreg 0.52.0", +] + [[package]] name = "petgraph" version = "0.6.5" @@ -8062,7 +8464,7 @@ dependencies = [ "anyhow", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "gpui", "menu", "serde", @@ -8408,7 +8810,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "fuzzy", @@ -9123,7 +9525,7 @@ dependencies = [ "clap", "client", "clock", - "env_logger", + "env_logger 0.11.5", "fork", "fs", "futures 0.3.30", @@ -9174,7 +9576,7 @@ dependencies = [ "collections", "command_palette_hooks", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "http_client", @@ -9454,7 +9856,7 @@ dependencies = [ "arrayvec", "criterion", "ctor", - "env_logger", + "env_logger 0.11.5", "gpui", "log", "rand 0.8.5", @@ -9485,7 +9887,7 @@ dependencies = [ "base64 0.22.1", "chrono", "collections", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "parking_lot", @@ -10074,7 +10476,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "futures 0.3.30", @@ -10767,7 +11169,7 @@ dependencies = [ "futures-io", "futures-util", "hashbrown 0.14.5", - "hashlink", + "hashlink 0.9.1", "hex", "indexmap 2.4.0", "log", @@ -11091,7 +11493,7 @@ version = "0.1.0" dependencies = [ "arrayvec", "ctor", - "env_logger", + "env_logger 0.11.5", "log", "rand 0.8.5", "rayon", @@ -11105,7 +11507,7 @@ dependencies = [ "client", "collections", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "http_client", @@ -11404,7 +11806,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "gpui", "language", "menu", @@ -11611,7 +12013,7 @@ dependencies = [ "clock", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "gpui", "http_client", "log", @@ -12100,6 +12502,21 @@ dependencies = [ "winnow 0.6.18", ] +[[package]] +name = "toolchain_selector" +version = "0.1.0" +dependencies = [ + "editor", + "fuzzy", + "gpui", + "language", + "picker", + "project", + "ui", + "util", + "workspace", +] + [[package]] name = "topological-sort" version = "0.2.2" @@ -14269,7 +14686,7 @@ dependencies = [ "collections", "db", "derive_more", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "git", @@ -14306,7 +14723,7 @@ dependencies = [ "anyhow", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "fuzzy", @@ -14476,6 +14893,17 @@ dependencies = [ "clap", ] +[[package]] +name = "yaml-rust2" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8" +dependencies = [ + "arraydeque", + "encoding_rs", + "hashlink 0.8.4", +] + [[package]] name = "yansi" version = "1.0.1" @@ -14589,7 +15017,7 @@ dependencies = [ "db", "diagnostics", "editor", - "env_logger", + "env_logger 0.11.5", "extension", "extensions_ui", "feature_flags", @@ -14656,6 +15084,7 @@ dependencies = [ "theme", "theme_selector", "time", + "toolchain_selector", "tree-sitter-md", "tree-sitter-rust", "ui", diff --git a/Cargo.toml b/Cargo.toml index 64a2546020..0697cc0c0b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -117,6 +117,7 @@ members = [ "crates/theme_selector", "crates/time_format", "crates/title_bar", + "crates/toolchain_selector", "crates/ui", "crates/ui_input", "crates/ui_macros", @@ -290,6 +291,7 @@ theme_importer = { path = "crates/theme_importer" } theme_selector = { path = "crates/theme_selector" } time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } +toolchain_selector = { path = "crates/toolchain_selector" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } @@ -376,6 +378,11 @@ ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" pathdiff = "0.2" +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = "1.3.0" profiling = "1" diff --git a/assets/settings/default.json b/assets/settings/default.json index cd4e3db15c..879f6bb7fa 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -779,6 +779,7 @@ "tasks": { "variables": {} }, + "toolchain": { "name": "default", "path": "default" }, // An object whose keys are language names, and whose values // are arrays of filenames or extensions of files that should // use those languages. diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index 25179acec6..1557ef2153 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -8,7 +8,8 @@ use collections::HashMap; use futures::{Future, FutureExt}; use gpui::AsyncAppContext; use language::{ - CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate, + CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter, + LspAdapterDelegate, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use serde::Serialize; @@ -194,6 +195,7 @@ impl LspAdapter for ExtensionLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, _cx: &mut AsyncAppContext, ) -> Result { let delegate = delegate.clone(); diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 535d68326f..0a9299a8be 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -37,7 +37,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use indexed_docs::{IndexedDocsRegistry, ProviderId}; use language::{ LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry, - QUERY_FILENAME_PREFIXES, + LoadedLanguage, QUERY_FILENAME_PREFIXES, }; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -1102,14 +1102,21 @@ impl ExtensionStore { let config = std::fs::read_to_string(language_path.join("config.toml"))?; let config: LanguageConfig = ::toml::from_str(&config)?; let queries = load_plugin_queries(&language_path); - let tasks = std::fs::read_to_string(language_path.join("tasks.json")) - .ok() - .and_then(|contents| { - let definitions = serde_json_lenient::from_str(&contents).log_err()?; - Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) - }); + let context_provider = + std::fs::read_to_string(language_path.join("tasks.json")) + .ok() + .and_then(|contents| { + let definitions = + serde_json_lenient::from_str(&contents).log_err()?; + Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) + }); - Ok((config, queries, tasks)) + Ok(LoadedLanguage { + config, + queries, + context_provider, + toolchain_provider: None, + }) }, ); } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index c1c9cfebbe..e52794f81f 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -15,6 +15,7 @@ mod outline; pub mod proto; mod syntax_map; mod task_context; +mod toolchain; #[cfg(test)] pub mod buffer_tests; @@ -28,7 +29,7 @@ use futures::Future; use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; -pub use language_registry::LanguageName; +pub use language_registry::{LanguageName, LoadedLanguage}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use parking_lot::Mutex; use regex::Regex; @@ -61,6 +62,7 @@ use syntax_map::{QueryCursorHandle, SyntaxSnapshot}; use task::RunnableTag; pub use task_context::{ContextProvider, RunnableRange}; use theme::SyntaxTheme; +pub use toolchain::{LanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister}; use tree_sitter::{self, wasmtime, Query, QueryCursor, WasmStore}; use util::serde::default_true; @@ -502,6 +504,7 @@ pub trait LspAdapter: 'static + Send + Sync { async fn workspace_configuration( self: Arc, _: &Arc, + _: Arc, _cx: &mut AsyncAppContext, ) -> Result { Ok(serde_json::json!({})) @@ -855,6 +858,7 @@ pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Option>, pub(crate) context_provider: Option>, + pub(crate) toolchain: Option>, } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] @@ -983,6 +987,7 @@ impl Language { }) }), context_provider: None, + toolchain: None, } } @@ -991,6 +996,11 @@ impl Language { self } + pub fn with_toolchain_lister(mut self, provider: Option>) -> Self { + self.toolchain = provider; + self + } + pub fn with_queries(mut self, queries: LanguageQueries) -> Result { if let Some(query) = queries.highlights { self = self @@ -1361,6 +1371,10 @@ impl Language { self.context_provider.clone() } + pub fn toolchain_lister(&self) -> Option> { + self.toolchain.clone() + } + pub fn highlight_text<'a>( self: &'a Arc, text: &'a Rope, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 880ae3b611..caea801ce5 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -4,7 +4,7 @@ use crate::{ }, task_context::ContextProvider, with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, - LanguageServerName, LspAdapter, PLAIN_TEXT, + LanguageServerName, LspAdapter, ToolchainLister, PLAIN_TEXT, }; use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; @@ -75,6 +75,13 @@ impl<'a> From<&'a str> for LanguageName { } } +impl From for String { + fn from(value: LanguageName) -> Self { + let value: &str = &value.0; + Self::from(value) + } +} + pub struct LanguageRegistry { state: RwLock, language_server_download_dir: Option>, @@ -123,16 +130,7 @@ pub struct AvailableLanguage { name: LanguageName, grammar: Option>, matcher: LanguageMatcher, - load: Arc< - dyn Fn() -> Result<( - LanguageConfig, - LanguageQueries, - Option>, - )> - + 'static - + Send - + Sync, - >, + load: Arc Result + 'static + Send + Sync>, loaded: bool, } @@ -200,6 +198,13 @@ struct LspBinaryStatusSender { txs: Arc>>>, } +pub struct LoadedLanguage { + pub config: LanguageConfig, + pub queries: LanguageQueries, + pub context_provider: Option>, + pub toolchain_provider: Option>, +} + impl LanguageRegistry { pub fn new(executor: BackgroundExecutor) -> Self { let this = Self { @@ -283,7 +288,14 @@ impl LanguageRegistry { config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), Default::default(), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: Default::default(), + toolchain_provider: None, + context_provider: None, + }) + }, ) } @@ -424,14 +436,7 @@ impl LanguageRegistry { name: LanguageName, grammar_name: Option>, matcher: LanguageMatcher, - load: impl Fn() -> Result<( - LanguageConfig, - LanguageQueries, - Option>, - )> - + 'static - + Send - + Sync, + load: impl Fn() -> Result + 'static + Send + Sync, ) { let load = Arc::new(load); let state = &mut *self.state.write(); @@ -726,16 +731,18 @@ impl LanguageRegistry { self.executor .spawn(async move { let language = async { - let (config, queries, provider) = (language_load)()?; - - if let Some(grammar) = config.grammar.clone() { + let loaded_language = (language_load)()?; + if let Some(grammar) = loaded_language.config.grammar.clone() { let grammar = Some(this.get_or_load_grammar(grammar).await?); - Language::new_with_id(id, config, grammar) - .with_context_provider(provider) - .with_queries(queries) + + Language::new_with_id(id, loaded_language.config, grammar) + .with_context_provider(loaded_language.context_provider) + .with_toolchain_lister(loaded_language.toolchain_provider) + .with_queries(loaded_language.queries) } else { - Ok(Language::new_with_id(id, config, None) - .with_context_provider(provider)) + Ok(Language::new_with_id(id, loaded_language.config, None) + .with_context_provider(loaded_language.context_provider) + .with_toolchain_lister(loaded_language.toolchain_provider)) } } .await; diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs new file mode 100644 index 0000000000..efb27008d0 --- /dev/null +++ b/crates/language/src/toolchain.rs @@ -0,0 +1,65 @@ +//! Provides support for language toolchains. +//! +//! A language can have associated toolchains, +//! which is a set of tools used to interact with the projects written in said language. +//! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. + +use std::{path::PathBuf, sync::Arc}; + +use async_trait::async_trait; +use gpui::{AsyncAppContext, SharedString}; +use settings::WorktreeId; + +use crate::LanguageName; + +/// Represents a single toolchain. +#[derive(Clone, Debug, PartialEq)] +pub struct Toolchain { + /// User-facing label + pub name: SharedString, + pub path: SharedString, + pub language_name: LanguageName, +} + +#[async_trait(?Send)] +pub trait ToolchainLister: Send + Sync { + async fn list(&self, _: PathBuf) -> ToolchainList; +} + +#[async_trait(?Send)] +pub trait LanguageToolchainStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option; +} + +type DefaultIndex = usize; +#[derive(Default, Clone)] +pub struct ToolchainList { + pub toolchains: Vec, + pub default: Option, + pub groups: Box<[(usize, SharedString)]>, +} + +impl ToolchainList { + pub fn toolchains(&self) -> &[Toolchain] { + &self.toolchains + } + pub fn default_toolchain(&self) -> Option { + self.default.and_then(|ix| self.toolchains.get(ix)).cloned() + } + pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> { + if index >= self.toolchains.len() { + return None; + } + let first_equal_or_greater = self + .groups + .partition_point(|(group_lower_bound, _)| group_lower_bound <= &index); + self.groups + .get(first_equal_or_greater.checked_sub(1)?) + .cloned() + } +} diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index d6746575f3..29c52ba301 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -47,6 +47,11 @@ log.workspace = true lsp.workspace = true node_runtime.workspace = true paths.workspace = true +pet.workspace = true +pet-core.workspace = true +pet-conda.workspace = true +pet-poetry.workspace = true +pet-reporter.workspace = true project.workspace = true regex.workspace = true rope.workspace = true diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 95c4070b13..28ee884307 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,7 +7,9 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{ + LanguageRegistry, LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate, +}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -198,6 +200,7 @@ impl LspAdapter for JsonLspAdapter { async fn workspace_configuration( self: Arc, _: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 03c4735d6d..2fd8ffa633 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -3,7 +3,7 @@ use gpui::{AppContext, UpdateGlobal}; use json::json_task_context; pub use language::*; use node_runtime::NodeRuntime; -use python::PythonContextProvider; +use python::{PythonContextProvider, PythonToolchainProvider}; use rust_embed::RustEmbed; use settings::SettingsStore; use smol::stream::StreamExt; @@ -61,7 +61,14 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), load_queries($name), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: None, + toolchain_provider: None, + }) + }, ); }; ($name:literal, $adapters:expr) => { @@ -75,7 +82,14 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), load_queries($name), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: None, + toolchain_provider: None, + }) + }, ); }; ($name:literal, $adapters:expr, $context_provider:expr) => { @@ -90,11 +104,33 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.grammar.clone(), config.matcher.clone(), move || { - Ok(( - config.clone(), - load_queries($name), - Some(Arc::new($context_provider)), - )) + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: Some(Arc::new($context_provider)), + toolchain_provider: None, + }) + }, + ); + }; + ($name:literal, $adapters:expr, $context_provider:expr, $toolchain_provider:expr) => { + let config = load_config($name); + // typeck helper + let adapters: Vec> = $adapters; + for adapter in adapters { + languages.register_lsp_adapter(config.name.clone(), adapter); + } + languages.register_language( + config.name.clone(), + config.grammar.clone(), + config.matcher.clone(), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: Some(Arc::new($context_provider)), + toolchain_provider: Some($toolchain_provider), + }) }, ); }; @@ -141,7 +177,8 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu vec![Arc::new(python::PythonLspAdapter::new( node_runtime.clone(), ))], - PythonContextProvider + PythonContextProvider, + Arc::new(PythonToolchainProvider::default()) as Arc ); language!( "rust", diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 4b5fe3d277..e73e3c8682 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -3,9 +3,16 @@ use async_trait::async_trait; use collections::HashMap; use gpui::AppContext; use gpui::AsyncAppContext; +use language::LanguageName; +use language::LanguageToolchainStore; +use language::Toolchain; +use language::ToolchainList; +use language::ToolchainLister; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; +use pet_core::python_environment::PythonEnvironmentKind; +use pet_core::Configuration; use project::lsp_store::language_server_settings; use serde_json::Value; @@ -200,12 +207,35 @@ impl LspAdapter for PythonLspAdapter { async fn workspace_configuration( self: Arc, adapter: &Arc, + toolchains: Arc, cx: &mut AsyncAppContext, ) -> Result { - cx.update(|cx| { - language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) - .and_then(|s| s.settings.clone()) - .unwrap_or_default() + let toolchain = toolchains + .active_toolchain(adapter.worktree_id(), LanguageName::new("Python"), cx) + .await; + cx.update(move |cx| { + let mut user_settings = + language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) + .and_then(|s| s.settings.clone()) + .unwrap_or_default(); + + // If python.pythonPath is not set in user config, do so using our toolchain picker. + if let Some(toolchain) = toolchain { + if user_settings.is_null() { + user_settings = Value::Object(serde_json::Map::default()); + } + let object = user_settings.as_object_mut().unwrap(); + if let Some(python) = object + .entry("python") + .or_insert(Value::Object(serde_json::Map::default())) + .as_object_mut() + { + python + .entry("pythonPath") + .or_insert(Value::String(toolchain.path.into())); + } + } + user_settings }) } } @@ -320,6 +350,83 @@ fn python_module_name_from_relative_path(relative_path: &str) -> String { .to_string() } +#[derive(Default)] +pub(crate) struct PythonToolchainProvider {} + +static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[ + // Prioritize non-Conda environments. + PythonEnvironmentKind::Poetry, + PythonEnvironmentKind::Pipenv, + PythonEnvironmentKind::VirtualEnvWrapper, + PythonEnvironmentKind::Venv, + PythonEnvironmentKind::VirtualEnv, + PythonEnvironmentKind::Conda, + PythonEnvironmentKind::Pyenv, + PythonEnvironmentKind::GlobalPaths, + PythonEnvironmentKind::Homebrew, +]; + +fn env_priority(kind: Option) -> usize { + if let Some(kind) = kind { + ENV_PRIORITY_LIST + .iter() + .position(|blessed_env| blessed_env == &kind) + .unwrap_or(ENV_PRIORITY_LIST.len()) + } else { + // Unknown toolchains are less useful than non-blessed ones. + ENV_PRIORITY_LIST.len() + 1 + } +} + +#[async_trait(?Send)] +impl ToolchainLister for PythonToolchainProvider { + async fn list(&self, worktree_root: PathBuf) -> ToolchainList { + let environment = pet_core::os_environment::EnvironmentApi::new(); + let locators = pet::locators::create_locators( + Arc::new(pet_conda::Conda::from(&environment)), + Arc::new(pet_poetry::Poetry::from(&environment)), + &environment, + ); + let mut config = Configuration::default(); + config.workspace_directories = Some(vec![worktree_root]); + let reporter = pet_reporter::collect::create_reporter(); + pet::find::find_and_report_envs(&reporter, config, &locators, &environment, None); + + let mut toolchains = reporter + .environments + .lock() + .ok() + .map_or(Vec::new(), |mut guard| std::mem::take(&mut guard)); + toolchains.sort_by(|lhs, rhs| { + env_priority(lhs.kind) + .cmp(&env_priority(rhs.kind)) + .then_with(|| lhs.executable.cmp(&rhs.executable)) + }); + let mut toolchains: Vec<_> = toolchains + .into_iter() + .filter_map(|toolchain| { + let name = if let Some(version) = &toolchain.version { + format!("Python {version} ({:?})", toolchain.kind?) + } else { + format!("{:?}", toolchain.kind?) + } + .into(); + Some(Toolchain { + name, + path: toolchain.executable?.to_str()?.to_owned().into(), + language_name: LanguageName::new("Python"), + }) + }) + .collect(); + toolchains.dedup(); + ToolchainList { + toolchains, + default: None, + groups: Default::default(), + } + } +} + #[cfg(test)] mod tests { use gpui::{BorrowAppContext, Context, ModelContext, TestAppContext}; diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 4ed5c742a9..6d4416c7d9 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; use gpui::AsyncAppContext; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -111,6 +111,7 @@ impl LspAdapter for TailwindLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index cfd7e04bc6..345a5f0694 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -5,7 +5,7 @@ use async_trait::async_trait; use collections::HashMap; use gpui::AsyncAppContext; use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion}; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -230,6 +230,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { @@ -325,6 +326,7 @@ impl LspAdapter for EsLintLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let workspace_root = delegate.worktree_root_path(); diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index ff8637dc28..ae65488a38 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -2,7 +2,7 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use collections::HashMap; use gpui::AsyncAppContext; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -183,6 +183,7 @@ impl LspAdapter for VtslsLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let tsdk_path = Self::tsdk_path(delegate).await; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 9f1c468b87..d8f927b770 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -3,7 +3,8 @@ use async_trait::async_trait; use futures::StreamExt; use gpui::AsyncAppContext; use language::{ - language_settings::AllLanguageSettings, LanguageServerName, LspAdapter, LspAdapterDelegate, + language_settings::AllLanguageSettings, LanguageServerName, LanguageToolchainStore, LspAdapter, + LspAdapterDelegate, }; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; @@ -92,6 +93,7 @@ impl LspAdapter for YamlLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let location = SettingsLocation { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 8152ddb3c0..40e87b55e5 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7,10 +7,11 @@ use crate::{ prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, + toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, CodeAction, Completion, CoreCompletion, Hover, InlayHint, Item as _, ProjectPath, - ProjectTransaction, ResolveState, Symbol, + ProjectTransaction, ResolveState, Symbol, ToolchainStore, }; use anyhow::{anyhow, Context as _, Result}; use async_trait::async_trait; @@ -36,9 +37,9 @@ use language::{ proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageName, - LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LocalFile, LspAdapter, - LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LanguageToolchainStore, + LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, @@ -707,12 +708,13 @@ pub struct LspStore { nonce: u128, buffer_store: Model, worktree_store: Model, + toolchain_store: Option>, buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots pub languages: Arc, language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, pub language_server_statuses: BTreeMap, active_entry: Option, - _maintain_workspace_config: Task>, + _maintain_workspace_config: (Task>, watch::Sender<()>), _maintain_buffer_languages: Task<()>, next_diagnostic_group_id: usize, diagnostic_summaries: @@ -871,6 +873,7 @@ impl LspStore { buffer_store: Model, worktree_store: Model, prettier_store: Model, + toolchain_store: Model, environment: Model, languages: Arc, http_client: Arc, @@ -884,9 +887,15 @@ impl LspStore { .detach(); cx.subscribe(&prettier_store, Self::on_prettier_store_event) .detach(); + cx.subscribe(&toolchain_store, Self::on_toolchain_store_event) + .detach(); cx.observe_global::(Self::on_settings_changed) .detach(); + let _maintain_workspace_config = { + let (sender, receiver) = watch::channel(); + (Self::maintain_workspace_config(receiver, cx), sender) + }; Self { mode: LspStoreMode::Local(LocalLspStore { supplementary_language_servers: Default::default(), @@ -909,6 +918,7 @@ impl LspStore { downstream_client: None, buffer_store, worktree_store, + toolchain_store: Some(toolchain_store), languages: languages.clone(), language_server_ids: Default::default(), language_server_statuses: Default::default(), @@ -919,7 +929,7 @@ impl LspStore { diagnostics: Default::default(), active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } } @@ -942,9 +952,10 @@ impl LspStore { }) } - pub fn new_remote( + pub(super) fn new_remote( buffer_store: Model, worktree_store: Model, + toolchain_store: Option>, languages: Arc, upstream_client: AnyProtoClient, project_id: u64, @@ -954,7 +965,10 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - + let _maintain_workspace_config = { + let (sender, receiver) = watch::channel(); + (Self::maintain_workspace_config(receiver, cx), sender) + }; Self { mode: LspStoreMode::Remote(RemoteLspStore { upstream_client: Some(upstream_client), @@ -972,7 +986,8 @@ impl LspStore { diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), + toolchain_store, + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } } @@ -1063,6 +1078,22 @@ impl LspStore { } } + fn on_toolchain_store_event( + &mut self, + _: Model, + event: &ToolchainStoreEvent, + _: &mut ModelContext, + ) { + match event { + ToolchainStoreEvent::ToolchainActivated { .. } => { + self.request_workspace_config_refresh() + } + } + } + + fn request_workspace_config_refresh(&mut self) { + *self._maintain_workspace_config.1.borrow_mut() = (); + } // todo! pub fn prettier_store(&self) -> Option> { self.as_local().map(|local| local.prettier_store.clone()) @@ -3029,17 +3060,13 @@ impl LspStore { None } - fn maintain_workspace_config(cx: &mut ModelContext) -> Task> { - let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel(); - let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx); - - let settings_observation = cx.observe_global::(move |_, _| { - *settings_changed_tx.borrow_mut() = (); - }); - - cx.spawn(move |this, mut cx| async move { - while let Some(()) = settings_changed_rx.next().await { - let servers = this.update(&mut cx, |this, cx| { + pub(crate) async fn refresh_workspace_configurations( + this: &WeakModel, + mut cx: AsyncAppContext, + ) { + maybe!(async move { + let servers = this + .update(&mut cx, |this, cx| { this.language_server_ids .iter() .filter_map(|((worktree_id, _), server_id)| { @@ -3061,17 +3088,52 @@ impl LspStore { } }) .collect::>() - })?; + }) + .ok()?; - for (adapter, server, delegate) in servers { - let settings = adapter.workspace_configuration(&delegate, &mut cx).await?; + let toolchain_store = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; + for (adapter, server, delegate) in servers { + let settings = adapter + .workspace_configuration(&delegate, toolchain_store.clone(), &mut cx) + .await + .ok()?; - server - .notify::( - lsp::DidChangeConfigurationParams { settings }, - ) - .ok(); - } + server + .notify::( + lsp::DidChangeConfigurationParams { settings }, + ) + .ok(); + } + Some(()) + }) + .await; + } + + fn toolchain_store(&self, cx: &AppContext) -> Arc { + if let Some(toolchain_store) = self.toolchain_store.as_ref() { + toolchain_store.read(cx).as_language_toolchain_store() + } else { + Arc::new(EmptyToolchainStore) + } + } + fn maintain_workspace_config( + external_refresh_requests: watch::Receiver<()>, + cx: &mut ModelContext, + ) -> Task> { + let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel(); + let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx); + + let settings_observation = cx.observe_global::(move |_, _| { + *settings_changed_tx.borrow_mut() = (); + }); + + let mut joint_future = + futures::stream::select(settings_changed_rx, external_refresh_requests); + cx.spawn(move |this, cx| async move { + while let Some(()) = joint_future.next().await { + Self::refresh_workspace_configurations(&this, cx.clone()).await; } drop(settings_observation); @@ -5517,6 +5579,9 @@ impl LspStore { let delegate = delegate.clone(); let adapter = adapter.clone(); let this = this.clone(); + let toolchains = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; let mut cx = cx.clone(); async move { let language_server = pending_server.await?; @@ -5524,7 +5589,7 @@ impl LspStore { let workspace_config = adapter .adapter .clone() - .workspace_configuration(&delegate, &mut cx) + .workspace_configuration(&delegate, toolchains.clone(), &mut cx) .await?; let mut initialization_options = adapter @@ -5864,17 +5929,21 @@ impl LspStore { } }) .detach(); - language_server .on_request::({ let adapter = adapter.adapter.clone(); let delegate = delegate.clone(); + let this = this.clone(); move |params, mut cx| { let adapter = adapter.clone(); let delegate = delegate.clone(); + let this = this.clone(); async move { - let workspace_config = - adapter.workspace_configuration(&delegate, &mut cx).await?; + let toolchains = + this.update(&mut cx, |this, cx| this.toolchain_store(cx))?; + let workspace_config = adapter + .workspace_configuration(&delegate, toolchains, &mut cx) + .await?; Ok(params .items .into_iter() diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 49f4b7c6f3..7a57e048c8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -11,6 +11,7 @@ pub mod search; mod task_inventory; pub mod task_store; pub mod terminals; +pub mod toolchain_store; pub mod worktree_store; #[cfg(test)] @@ -44,8 +45,8 @@ use itertools::Itertools; use language::{ language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, DiagnosticEntry, Documentation, File as _, Language, - LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageName, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, + Toolchain, ToolchainList, Transaction, Unclipped, }; use lsp::{ CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer, LanguageServerId, @@ -101,7 +102,7 @@ pub use lsp_store::{ LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, SERVER_PROGRESS_THROTTLE_TIMEOUT, }; - +pub use toolchain_store::ToolchainStore; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; const MAX_SEARCH_RESULT_FILES: usize = 5_000; const MAX_SEARCH_RESULT_RANGES: usize = 10_000; @@ -158,6 +159,7 @@ pub struct Project { snippets: Model, environment: Model, settings_observer: Model, + toolchain_store: Option>, } #[derive(Default)] @@ -579,6 +581,7 @@ impl Project { LspStore::init(&client); SettingsObserver::init(&client); TaskStore::init(Some(&client)); + ToolchainStore::init(&client); } pub fn local( @@ -635,12 +638,15 @@ impl Project { }); cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); - + let toolchain_store = cx.new_model(|cx| { + ToolchainStore::local(languages.clone(), worktree_store.clone(), cx) + }); let lsp_store = cx.new_model(|cx| { LspStore::new_local( buffer_store.clone(), worktree_store.clone(), prettier_store.clone(), + toolchain_store.clone(), environment.clone(), languages.clone(), client.http_client(), @@ -681,6 +687,8 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), + + toolchain_store: Some(toolchain_store), } }) } @@ -737,10 +745,14 @@ impl Project { .detach(); let environment = ProjectEnvironment::new(&worktree_store, None, cx); + let toolchain_store = Some(cx.new_model(|cx| { + ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx) + })); let lsp_store = cx.new_model(|cx| { LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), + toolchain_store.clone(), languages.clone(), ssh_proto.clone(), SSH_PROJECT_ID, @@ -798,6 +810,8 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), + + toolchain_store, }; let ssh = ssh.read(cx); @@ -818,6 +832,7 @@ impl Project { LspStore::init(&ssh_proto); SettingsObserver::init(&ssh_proto); TaskStore::init(Some(&ssh_proto)); + ToolchainStore::init(&ssh_proto); this }) @@ -905,6 +920,7 @@ impl Project { let mut lsp_store = LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), + None, languages.clone(), client.clone().into(), remote_id, @@ -993,6 +1009,7 @@ impl Project { search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), + toolchain_store: None, }; this.set_role(role, cx); for worktree in worktrees { @@ -2346,6 +2363,46 @@ impl Project { .map_err(|e| anyhow!(e)) } + pub fn available_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + if let Some(toolchain_store) = self.toolchain_store.as_ref() { + toolchain_store + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } else { + Task::ready(None) + } + } + pub fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut AppContext, + ) -> Task> { + let Some(toolchain_store) = self.toolchain_store.clone() else { + return Task::ready(None); + }; + toolchain_store.update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }) + } + pub fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let Some(toolchain_store) = self.toolchain_store.clone() else { + return Task::ready(None); + }; + toolchain_store + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } pub fn language_server_statuses<'a>( &'a self, cx: &'a AppContext, diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs new file mode 100644 index 0000000000..a3f27d731b --- /dev/null +++ b/crates/project/src/toolchain_store.rs @@ -0,0 +1,416 @@ +use std::sync::Arc; + +use anyhow::{bail, Result}; + +use async_trait::async_trait; +use collections::BTreeMap; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task, + WeakModel, +}; +use language::{LanguageName, LanguageRegistry, LanguageToolchainStore, Toolchain, ToolchainList}; +use rpc::{proto, AnyProtoClient, TypedEnvelope}; +use settings::WorktreeId; +use util::ResultExt as _; + +use crate::worktree_store::WorktreeStore; + +pub struct ToolchainStore(ToolchainStoreInner); +enum ToolchainStoreInner { + Local(Model, #[allow(dead_code)] Subscription), + Remote(Model), +} + +impl EventEmitter for ToolchainStore {} +impl ToolchainStore { + pub fn init(client: &AnyProtoClient) { + client.add_model_request_handler(Self::handle_activate_toolchain); + client.add_model_request_handler(Self::handle_list_toolchains); + client.add_model_request_handler(Self::handle_active_toolchain); + } + + pub fn local( + languages: Arc, + worktree_store: Model, + cx: &mut ModelContext, + ) -> Self { + let model = cx.new_model(|_| LocalToolchainStore { + languages, + worktree_store, + active_toolchains: Default::default(), + }); + let subscription = cx.subscribe(&model, |_, _, e: &ToolchainStoreEvent, cx| { + cx.emit(e.clone()) + }); + Self(ToolchainStoreInner::Local(model, subscription)) + } + pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut AppContext) -> Self { + Self(ToolchainStoreInner::Remote( + cx.new_model(|_| RemoteToolchainStore { client, project_id }), + )) + } + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => local.update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }), + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .activate_toolchain(worktree_id, toolchain, cx) + } + } + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => { + local + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } + } + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => { + local + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } + } + } + async fn handle_activate_toolchain( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + this.update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let Some(toolchain) = envelope.payload.toolchain else { + bail!("Missing `toolchain` in payload"); + }; + let toolchain = Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + language_name, + }; + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + Ok(this.activate_toolchain(worktree_id, toolchain, cx)) + })?? + .await; + Ok(proto::Ack {}) + } + async fn handle_active_toolchain( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let toolchain = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.active_toolchain(worktree_id, language_name, cx) + })? + .await; + + Ok(proto::ActiveToolchainResponse { + toolchain: toolchain.map(|toolchain| proto::Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + }), + }) + } + + async fn handle_list_toolchains( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let toolchains = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.list_toolchains(worktree_id, language_name, cx) + })? + .await; + let has_values = toolchains.is_some(); + let groups = if let Some(toolchains) = &toolchains { + toolchains + .groups + .iter() + .filter_map(|group| { + Some(proto::ToolchainGroup { + start_index: u64::try_from(group.0).ok()?, + name: String::from(group.1.as_ref()), + }) + }) + .collect() + } else { + vec![] + }; + let toolchains = if let Some(toolchains) = toolchains { + toolchains + .toolchains + .into_iter() + .map(|toolchain| proto::Toolchain { + name: toolchain.name.to_string(), + path: toolchain.path.to_string(), + }) + .collect::>() + } else { + vec![] + }; + + Ok(proto::ListToolchainsResponse { + has_values, + toolchains, + groups, + }) + } + pub(crate) fn as_language_toolchain_store(&self) -> Arc { + match &self.0 { + ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())), + ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())), + } + } +} + +struct LocalToolchainStore { + languages: Arc, + worktree_store: Model, + active_toolchains: BTreeMap<(WorktreeId, LanguageName), Toolchain>, +} + +#[async_trait(?Send)] +impl language::LanguageToolchainStore for LocalStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option { + self.0 + .update(cx, |this, cx| { + this.active_toolchain(worktree_id, language_name, cx) + }) + .ok()? + .await + } +} + +#[async_trait(?Send)] +impl language::LanguageToolchainStore for RemoteStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option { + self.0 + .update(cx, |this, cx| { + this.active_toolchain(worktree_id, language_name, cx) + }) + .ok()? + .await + } +} + +pub(crate) struct EmptyToolchainStore; +#[async_trait(?Send)] +impl language::LanguageToolchainStore for EmptyToolchainStore { + async fn active_toolchain( + self: Arc, + _: WorktreeId, + _: LanguageName, + _: &mut AsyncAppContext, + ) -> Option { + None + } +} +struct LocalStore(WeakModel); +struct RemoteStore(WeakModel); + +#[derive(Clone)] +pub(crate) enum ToolchainStoreEvent { + ToolchainActivated, +} + +impl EventEmitter for LocalToolchainStore {} + +impl LocalToolchainStore { + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(move |this, mut cx| async move { + this.update(&mut cx, |this, cx| { + this.active_toolchains.insert( + (worktree_id, toolchain.language_name.clone()), + toolchain.clone(), + ); + cx.emit(ToolchainStoreEvent::ToolchainActivated); + }) + .ok(); + Some(()) + }) + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let registry = self.languages.clone(); + let Some(root) = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + else { + return Task::ready(None); + }; + cx.spawn(|_| async move { + let language = registry.language_for_name(&language_name.0).await.ok()?; + let toolchains = language.toolchain_lister()?.list(root.to_path_buf()).await; + Some(toolchains) + }) + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + _: &AppContext, + ) -> Task> { + Task::ready( + self.active_toolchains + .get(&(worktree_id, language_name)) + .cloned(), + ) + } +} +struct RemoteToolchainStore { + client: AnyProtoClient, + project_id: u64, +} + +impl RemoteToolchainStore { + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let _ = client + .request(proto::ActivateToolchain { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: toolchain.language_name.into(), + toolchain: Some(proto::Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + }), + }) + .await + .log_err()?; + Some(()) + }) + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let response = client + .request(proto::ListToolchains { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: language_name.clone().into(), + }) + .await + .log_err()?; + if !response.has_values { + return None; + } + let toolchains = response + .toolchains + .into_iter() + .map(|toolchain| Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + }) + .collect(); + let groups = response + .groups + .into_iter() + .filter_map(|group| { + Some((usize::try_from(group.start_index).ok()?, group.name.into())) + }) + .collect(); + Some(ToolchainList { + toolchains, + default: None, + groups, + }) + }) + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let response = client + .request(proto::ActiveToolchain { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: language_name.clone().into(), + }) + .await + .log_err()?; + + response.toolchain.map(|toolchain| Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + }) + }) + } +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 53aaa6ef6d..95a54c3d5c 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -280,11 +280,15 @@ message Envelope { LanguageServerPromptRequest language_server_prompt_request = 268; LanguageServerPromptResponse language_server_prompt_response = 269; - GitBranches git_branches = 270; GitBranchesResponse git_branches_response = 271; - UpdateGitBranch update_git_branch = 272; // current max + UpdateGitBranch update_git_branch = 272; + ListToolchains list_toolchains = 273; + ListToolchainsResponse list_toolchains_response = 274; + ActivateToolchain activate_toolchain = 275; + ActiveToolchain active_toolchain = 276; + ActiveToolchainResponse active_toolchain_response = 277; // current max } @@ -2393,7 +2397,6 @@ message GetPermalinkToLine { message GetPermalinkToLineResponse { string permalink = 1; } - message FlushBufferedMessages {} message FlushBufferedMessagesResponse {} @@ -2419,6 +2422,45 @@ message LanguageServerPromptResponse { optional uint64 action_response = 1; } +message ListToolchains { + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; +} + +message Toolchain { + string name = 1; + string path = 2; +} + +message ToolchainGroup { + uint64 start_index = 1; + string name = 2; +} + +message ListToolchainsResponse { + repeated Toolchain toolchains = 1; + bool has_values = 2; + repeated ToolchainGroup groups = 3; +} + +message ActivateToolchain { + uint64 project_id = 1; + uint64 worktree_id = 2; + Toolchain toolchain = 3; + string language_name = 4; +} + +message ActiveToolchain { + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; +} + +message ActiveToolchainResponse { + optional Toolchain toolchain = 1; +} + message Branch { bool is_head = 1; string name = 2; @@ -2438,4 +2480,5 @@ message UpdateGitBranch { uint64 project_id = 1; string branch_name = 2; ProjectPath repository = 3; + } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index a7140cc7ed..7fcebf0513 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -358,7 +358,12 @@ messages!( (LanguageServerPromptResponse, Foreground), (GitBranches, Background), (GitBranchesResponse, Background), - (UpdateGitBranch, Background) + (UpdateGitBranch, Background), + (ListToolchains, Foreground), + (ListToolchainsResponse, Foreground), + (ActivateToolchain, Foreground), + (ActiveToolchain, Foreground), + (ActiveToolchainResponse, Foreground) ); request_messages!( @@ -475,7 +480,10 @@ request_messages!( (FlushBufferedMessages, Ack), (LanguageServerPromptRequest, LanguageServerPromptResponse), (GitBranches, GitBranchesResponse), - (UpdateGitBranch, Ack) + (UpdateGitBranch, Ack), + (ListToolchains, ListToolchainsResponse), + (ActivateToolchain, Ack), + (ActiveToolchain, ActiveToolchainResponse) ); entity_messages!( @@ -555,7 +563,10 @@ entity_messages!( GetPermalinkToLine, LanguageServerPromptRequest, GitBranches, - UpdateGitBranch + UpdateGitBranch, + ListToolchains, + ActivateToolchain, + ActiveToolchain ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 81be01b6a6..ce34af247f 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -10,7 +10,7 @@ use project::{ search::SearchQuery, task_store::TaskStore, worktree_store::WorktreeStore, - LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, + LspStore, LspStoreEvent, PrettierStore, ProjectPath, ToolchainStore, WorktreeId, }; use remote::ssh_session::ChannelClient; use rpc::{ @@ -108,11 +108,14 @@ impl HeadlessProject { observer.shared(SSH_PROJECT_ID, session.clone().into(), cx); observer }); + let toolchain_store = + cx.new_model(|cx| ToolchainStore::local(languages.clone(), worktree_store.clone(), cx)); let lsp_store = cx.new_model(|cx| { let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), prettier_store.clone(), + toolchain_store.clone(), environment, languages.clone(), http_client, @@ -143,6 +146,7 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store); session.subscribe_to_entity(SSH_PROJECT_ID, &task_store); + session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store); session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); @@ -166,6 +170,7 @@ impl HeadlessProject { SettingsObserver::init(&client); LspStore::init(&client); TaskStore::init(Some(&client)); + ToolchainStore::init(&client); HeadlessProject { session: client, diff --git a/crates/toolchain_selector/Cargo.toml b/crates/toolchain_selector/Cargo.toml new file mode 100644 index 0000000000..ed80bd0dc9 --- /dev/null +++ b/crates/toolchain_selector/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "toolchain_selector" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[dependencies] +editor.workspace = true +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +picker.workspace = true +project.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[lints] +workspace = true + +[lib] +path = "src/toolchain_selector.rs" +doctest = false diff --git a/crates/toolchain_selector/LICENSE-GPL b/crates/toolchain_selector/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/toolchain_selector/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs new file mode 100644 index 0000000000..74a6bd7107 --- /dev/null +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -0,0 +1,173 @@ +use editor::Editor; +use gpui::{ + div, AsyncWindowContext, EventEmitter, IntoElement, ParentElement, Render, Subscription, Task, + View, ViewContext, WeakModel, WeakView, +}; +use language::{Buffer, BufferEvent, LanguageName, Toolchain}; +use project::WorktreeId; +use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; +use workspace::{item::ItemHandle, StatusItemView, Workspace}; + +use crate::ToolchainSelector; + +pub struct ActiveToolchain { + active_toolchain: Option, + workspace: WeakView, + active_buffer: Option<(WorktreeId, WeakModel, Subscription)>, + _observe_language_changes: Subscription, + _update_toolchain_task: Task>, +} + +struct LanguageChanged; + +impl EventEmitter for ActiveToolchain {} + +impl ActiveToolchain { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let view = cx.view().clone(); + Self { + active_toolchain: None, + active_buffer: None, + workspace: workspace.weak_handle(), + _observe_language_changes: cx.subscribe(&view, |this, _, _: &LanguageChanged, cx| { + this._update_toolchain_task = Self::spawn_tracker_task(cx); + }), + _update_toolchain_task: Self::spawn_tracker_task(cx), + } + } + fn spawn_tracker_task(cx: &mut ViewContext) -> Task> { + cx.spawn(|this, mut cx| async move { + let active_file = this + .update(&mut cx, |this, _| { + this.active_buffer + .as_ref() + .map(|(_, buffer, _)| buffer.clone()) + }) + .ok() + .flatten()?; + let workspace = this + .update(&mut cx, |this, _| this.workspace.clone()) + .ok()?; + + let language_name = active_file + .update(&mut cx, |this, _| Some(this.language()?.name())) + .ok() + .flatten()?; + + let worktree_id = active_file + .update(&mut cx, |this, cx| Some(this.file()?.worktree_id(cx))) + .ok() + .flatten()?; + let toolchain = + Self::active_toolchain(workspace, worktree_id, language_name, cx.clone()).await?; + let _ = this.update(&mut cx, |this, cx| { + this.active_toolchain = Some(toolchain); + + cx.notify(); + }); + Some(()) + }) + } + + fn update_lister(&mut self, editor: View, cx: &mut ViewContext) { + let editor = editor.read(cx); + if let Some((_, buffer, _)) = editor.active_excerpt(cx) { + if let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { + let subscription = cx.subscribe(&buffer, |_, _, event: &BufferEvent, cx| { + if let BufferEvent::LanguageChanged = event { + cx.emit(LanguageChanged) + } + }); + self.active_buffer = Some((worktree_id, buffer.downgrade(), subscription)); + cx.emit(LanguageChanged); + } + } + + cx.notify(); + } + + fn active_toolchain( + workspace: WeakView, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: AsyncWindowContext, + ) -> Task> { + cx.spawn(move |mut cx| async move { + let workspace_id = workspace + .update(&mut cx, |this, _| this.database_id()) + .ok() + .flatten()?; + let selected_toolchain = workspace + .update(&mut cx, |this, cx| { + this.project() + .read(cx) + .active_toolchain(worktree_id, language_name.clone(), cx) + }) + .ok()? + .await; + if let Some(toolchain) = selected_toolchain { + Some(toolchain) + } else { + let project = workspace + .update(&mut cx, |this, _| this.project().clone()) + .ok()?; + let toolchains = cx + .update(|cx| { + project + .read(cx) + .available_toolchains(worktree_id, language_name, cx) + }) + .ok()? + .await?; + if let Some(toolchain) = toolchains.toolchains.first() { + // Since we don't have a selected toolchain, pick one for user here. + workspace::WORKSPACE_DB + .set_toolchain(workspace_id, worktree_id, toolchain.clone()) + .await + .ok()?; + project + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain.clone(), cx) + }) + .ok()? + .await; + } + + toolchains.toolchains.first().cloned() + } + }) + } +} + +impl Render for ActiveToolchain { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().when_some(self.active_toolchain.as_ref(), |el, active_toolchain| { + el.child( + Button::new("change-toolchain", active_toolchain.name.clone()) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, cx| { + if let Some(workspace) = this.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + ToolchainSelector::toggle(workspace, cx) + }); + } + })) + .tooltip(|cx| Tooltip::text("Select Toolchain", cx)), + ) + }) + } +} + +impl StatusItemView for ActiveToolchain { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + cx: &mut ViewContext, + ) { + if let Some(editor) = active_pane_item.and_then(|item| item.act_as::(cx)) { + self.active_toolchain.take(); + self.update_lister(editor, cx); + } + cx.notify(); + } +} diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs new file mode 100644 index 0000000000..8a3368f816 --- /dev/null +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -0,0 +1,343 @@ +mod active_toolchain; + +pub use active_toolchain::ActiveToolchain; +use editor::Editor; +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{ + actions, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model, + ParentElement, Render, Styled, Task, View, ViewContext, VisualContext, WeakView, +}; +use language::{LanguageName, Toolchain, ToolchainList}; +use picker::{Picker, PickerDelegate}; +use project::{Project, WorktreeId}; +use std::{path::Path, sync::Arc}; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; +use util::ResultExt; +use workspace::{ModalView, Workspace}; + +actions!(toolchain, [Select]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(ToolchainSelector::register).detach(); +} + +pub struct ToolchainSelector { + picker: View>, +} + +impl ToolchainSelector { + fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(move |workspace, _: &Select, cx| { + Self::toggle(workspace, cx); + }); + } + + fn toggle(workspace: &mut Workspace, cx: &mut ViewContext) -> Option<()> { + let (_, buffer, _) = workspace + .active_item(cx)? + .act_as::(cx)? + .read(cx) + .active_excerpt(cx)?; + let project = workspace.project().clone(); + + let language_name = buffer.read(cx).language()?.name(); + let worktree_id = buffer.read(cx).file()?.worktree_id(cx); + let worktree_root_path = project + .read(cx) + .worktree_for_id(worktree_id, cx)? + .read(cx) + .abs_path(); + let workspace_id = workspace.database_id()?; + let weak = workspace.weak_handle(); + cx.spawn(move |workspace, mut cx| async move { + let active_toolchain = workspace::WORKSPACE_DB + .toolchain(workspace_id, worktree_id, language_name.clone()) + .await + .ok() + .flatten(); + workspace + .update(&mut cx, |this, cx| { + this.toggle_modal(cx, move |cx| { + ToolchainSelector::new( + weak, + project, + active_toolchain, + worktree_id, + worktree_root_path, + language_name, + cx, + ) + }); + }) + .ok(); + }) + .detach(); + + Some(()) + } + + fn new( + workspace: WeakView, + project: Model, + active_toolchain: Option, + worktree_id: WorktreeId, + worktree_root: Arc, + language_name: LanguageName, + cx: &mut ViewContext, + ) -> Self { + let view = cx.view().downgrade(); + let picker = cx.new_view(|cx| { + let delegate = ToolchainSelectorDelegate::new( + active_toolchain, + view, + workspace, + worktree_id, + worktree_root, + project, + language_name, + cx, + ); + Picker::uniform_list(delegate, cx) + }); + Self { picker } + } +} + +impl Render for ToolchainSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +impl FocusableView for ToolchainSelector { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl EventEmitter for ToolchainSelector {} +impl ModalView for ToolchainSelector {} + +pub struct ToolchainSelectorDelegate { + toolchain_selector: WeakView, + candidates: ToolchainList, + matches: Vec, + selected_index: usize, + workspace: WeakView, + worktree_id: WorktreeId, + worktree_abs_path_root: Arc, + _fetch_candidates_task: Task>, +} + +impl ToolchainSelectorDelegate { + #[allow(clippy::too_many_arguments)] + fn new( + active_toolchain: Option, + language_selector: WeakView, + workspace: WeakView, + worktree_id: WorktreeId, + worktree_abs_path_root: Arc, + project: Model, + language_name: LanguageName, + cx: &mut ViewContext>, + ) -> Self { + let _fetch_candidates_task = cx.spawn({ + let project = project.clone(); + move |this, mut cx| async move { + let available_toolchains = project + .update(&mut cx, |this, cx| { + this.available_toolchains(worktree_id, language_name, cx) + }) + .ok()? + .await?; + + let _ = this.update(&mut cx, move |this, cx| { + this.delegate.candidates = available_toolchains; + if let Some(active_toolchain) = active_toolchain { + if let Some(position) = this + .delegate + .candidates + .toolchains + .iter() + .position(|toolchain| *toolchain == active_toolchain) + { + this.delegate.set_selected_index(position, cx); + } + } + this.update_matches(this.query(cx), cx); + }); + + Some(()) + } + }); + + Self { + toolchain_selector: language_selector, + candidates: Default::default(), + matches: vec![], + selected_index: 0, + workspace, + worktree_id, + worktree_abs_path_root, + _fetch_candidates_task, + } + } + fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString { + Path::new(&path.as_ref()) + .strip_prefix(&worktree_root) + .ok() + .map(|suffix| Path::new(".").join(suffix)) + .and_then(|path| path.to_str().map(String::from).map(SharedString::from)) + .unwrap_or(path) + } +} + +impl PickerDelegate for ToolchainSelectorDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Select a toolchain...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(string_match) = self.matches.get(self.selected_index) { + let toolchain = self.candidates.toolchains[string_match.candidate_id].clone(); + if let Some(workspace_id) = self + .workspace + .update(cx, |this, _| this.database_id()) + .ok() + .flatten() + { + let workspace = self.workspace.clone(); + let worktree_id = self.worktree_id; + cx.spawn(|_, mut cx| async move { + workspace::WORKSPACE_DB + .set_toolchain(workspace_id, worktree_id, toolchain.clone()) + .await + .log_err(); + workspace + .update(&mut cx, |this, cx| { + this.project().update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }) + }) + .ok()? + .await; + Some(()) + }) + .detach(); + } + } + self.dismissed(cx); + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.toolchain_selector + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let background = cx.background_executor().clone(); + let candidates = self.candidates.clone(); + let worktree_root_path = self.worktree_abs_path_root.clone(); + cx.spawn(|this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .toolchains + .into_iter() + .enumerate() + .map(|(index, candidate)| { + let path = Self::relativize_path(candidate.path, &worktree_root_path); + let string = format!("{}{}", candidate.name, path); + StringMatch { + candidate_id: index, + string, + positions: Vec::new(), + score: 0.0, + } + }) + .collect() + } else { + let candidates = candidates + .toolchains + .into_iter() + .enumerate() + .map(|(candidate_id, toolchain)| { + let path = Self::relativize_path(toolchain.path, &worktree_root_path); + let string = format!("{}{}", toolchain.name, path); + StringMatchCandidate::new(candidate_id, string) + }) + .collect::>(); + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background, + ) + .await + }; + + this.update(&mut cx, |this, cx| { + let delegate = &mut this.delegate; + delegate.matches = matches; + delegate.selected_index = delegate + .selected_index + .min(delegate.matches.len().saturating_sub(1)); + cx.notify(); + }) + .log_err(); + }) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut ViewContext>, + ) -> Option { + let mat = &self.matches[ix]; + let toolchain = &self.candidates.toolchains[mat.candidate_id]; + + let label = toolchain.name.clone(); + let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root); + let (name_highlights, mut path_highlights) = mat + .positions + .iter() + .cloned() + .partition::, _>(|index| *index < label.len()); + path_highlights.iter_mut().for_each(|index| { + *index -= label.len(); + }); + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child(HighlightedLabel::new(label, name_highlights)) + .child( + HighlightedLabel::new(path, path_highlights) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + } +} diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 7c4fb93ba1..925d56a921 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -7,6 +7,8 @@ use client::DevServerProjectId; use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId}; +use language::{LanguageName, Toolchain}; +use project::WorktreeId; use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -204,7 +206,8 @@ define_connection! { // preview: bool // Indicates if this item is a preview item // ) pub static ref DB: WorkspaceDb<()> = - &[sql!( + &[ + sql!( CREATE TABLE workspaces( workspace_id INTEGER PRIMARY KEY, workspace_location BLOB UNIQUE, @@ -367,6 +370,16 @@ define_connection! { sql!( ALTER TABLE ssh_projects RENAME COLUMN path TO paths; ), + sql!( + CREATE TABLE toolchains ( + workspace_id INTEGER, + worktree_id INTEGER, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + PRIMARY KEY (workspace_id, worktree_id, language_name) + ); + ), ]; } @@ -528,6 +541,7 @@ impl WorkspaceDb { match workspace.location { SerializedWorkspaceLocation::Local(local_paths, local_paths_order) => { conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE local_paths = ? AND workspace_id != ? ))?((&local_paths, workspace.id)) .context("clearing out old locations")?; @@ -576,6 +590,7 @@ impl WorkspaceDb { } SerializedWorkspaceLocation::Ssh(ssh_project) => { conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? ))?((ssh_project.id.0, workspace.id)) .context("clearing out old locations")?; @@ -737,6 +752,7 @@ impl WorkspaceDb { query! { pub async fn delete_workspace_by_id(id: WorkspaceId) -> Result<()> { + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE workspace_id IS ? } @@ -751,6 +767,7 @@ impl WorkspaceDb { DELETE FROM dev_server_projects WHERE id = ? ))?(id.0)?; conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE dev_server_project_id IS ? ))?(id.0) @@ -1053,6 +1070,83 @@ impl WorkspaceDb { WHERE workspace_id = ?1 } } + + pub async fn toolchain( + &self, + workspace_id: WorkspaceId, + worktree_id: WorktreeId, + language_name: LanguageName, + ) -> Result> { + self.write(move |this| { + let mut select = this + .select_bound(sql!( + SELECT name, path FROM toolchains WHERE workspace_id = ? AND language_name = ? AND worktree_id = ? + )) + .context("Preparing insertion")?; + + let toolchain: Vec<(String, String)> = + select((workspace_id, language_name.0.to_owned(), worktree_id.to_usize()))?; + + Ok(toolchain.into_iter().next().map(|(name, path)| Toolchain { + name: name.into(), + path: path.into(), + language_name, + })) + }) + .await + } + + pub(crate) async fn toolchains( + &self, + workspace_id: WorkspaceId, + ) -> Result> { + self.write(move |this| { + let mut select = this + .select_bound(sql!( + SELECT name, path, worktree_id, language_name FROM toolchains WHERE workspace_id = ? + )) + .context("Preparing insertion")?; + + let toolchain: Vec<(String, String, u64, String)> = + select(workspace_id)?; + + Ok(toolchain.into_iter().map(|(name, path, worktree_id, language_name)| (Toolchain { + name: name.into(), + path: path.into(), + language_name: LanguageName::new(&language_name), + }, WorktreeId::from_proto(worktree_id))).collect()) + }) + .await + } + pub async fn set_toolchain( + &self, + workspace_id: WorkspaceId, + worktree_id: WorktreeId, + toolchain: Toolchain, + ) -> Result<()> { + self.write(move |conn| { + let mut insert = conn + .exec_bound(sql!( + INSERT INTO toolchains(workspace_id, worktree_id, language_name, name, path) VALUES (?, ?, ?, ?, ?) + ON CONFLICT DO + UPDATE SET + name = ?4, + path = ?5 + + )) + .context("Preparing insertion")?; + + insert(( + workspace_id, + worktree_id.to_usize(), + toolchain.language_name.0.as_ref(), + toolchain.name.as_ref(), + toolchain.path.as_ref(), + ))?; + + Ok(()) + }).await + } } #[cfg(test)] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b92417b293..de2c985f34 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1153,6 +1153,14 @@ impl Workspace { DB.next_id().await.unwrap_or_else(|_| Default::default()) }; + let toolchains = DB.toolchains(workspace_id).await?; + for (toolchain, worktree_id) in toolchains { + project_handle + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + })? + .await; + } let window = if let Some(window) = requesting_window { cx.update_window(window.into(), |_, cx| { cx.replace_root_view(|cx| { @@ -5522,6 +5530,14 @@ pub fn open_ssh_project( ) })?; + let toolchains = DB.toolchains(workspace_id).await?; + for (toolchain, worktree_id) in toolchains { + project + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + })? + .await; + } let mut project_paths_to_open = vec![]; let mut project_path_errors = vec![]; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 58728d504b..e2a3f2be36 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -104,6 +104,7 @@ terminal_view.workspace = true theme.workspace = true theme_selector.workspace = true time.workspace = true +toolchain_selector.workspace = true ui.workspace = true reqwest_client.workspace = true url.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3cb717d24f..89ff72b5a9 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -441,6 +441,7 @@ fn main() { terminal_view::init(cx); journal::init(app_state.clone(), cx); language_selector::init(cx); + toolchain_selector::init(cx); theme_selector::init(cx); language_tools::init(cx); call::init(app_state.client.clone(), app_state.user_store.clone(), cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 8965a1755a..7b630489cf 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -208,6 +208,8 @@ pub fn initialize_workspace( activity_indicator::ActivityIndicator::new(workspace, app_state.languages.clone(), cx); let active_buffer_language = cx.new_view(|_| language_selector::ActiveBufferLanguage::new(workspace)); + let active_toolchain_language = + cx.new_view(|cx| toolchain_selector::ActiveToolchain::new(workspace, cx)); let vim_mode_indicator = cx.new_view(vim::ModeIndicator::new); let cursor_position = cx.new_view(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); @@ -216,6 +218,7 @@ pub fn initialize_workspace( status_bar.add_left_item(activity_indicator, cx); status_bar.add_right_item(inline_completion_button, cx); status_bar.add_right_item(active_buffer_language, cx); + status_bar.add_right_item(active_toolchain_language, cx); status_bar.add_right_item(vim_mode_indicator, cx); status_bar.add_right_item(cursor_position, cx); }); diff --git a/script/licenses/zed-licenses.toml b/script/licenses/zed-licenses.toml index 3459fee3e5..15c98c6702 100644 --- a/script/licenses/zed-licenses.toml +++ b/script/licenses/zed-licenses.toml @@ -36,3 +36,141 @@ license = "BSD-3-Clause" [[fuchsia-cprng.clarify.files]] path = 'LICENSE' checksum = '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b' + +[pet.clarify] +license = "MIT" +[[pet.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-conda.clarify] +license = "MIT" +[[pet-conda.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-core.clarify] +license = "MIT" +[[pet-core.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-env-var-path.clarify] +license = "MIT" +[[pet-env-var-path.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-fs.clarify] +license = "MIT" +[[pet-fs.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-global-virtualenvs.clarify] +license = "MIT" +[[pet-global-virtualenvs.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-homebrew.clarify] +license = "MIT" +[[pet-homebrew.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-jsonrpc.clarify] +license = "MIT" +[[pet-jsonrpc.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-linux-global-python.clarify] +license = "MIT" +[[pet-linux-global-python.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-mac-commandlinetools.clarify] +license = "MIT" +[[pet-mac-commandlinetools.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-mac-python-org.clarify] +license = "MIT" +[[pet-mac-python-org.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-mac-xcode.clarify] +license = "MIT" +[[pet-mac-xcode.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-pipenv.clarify] +license = "MIT" +[[pet-pipenv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-poetry.clarify] +license = "MIT" +[[pet-poetry.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-pyenv.clarify] +license = "MIT" +[[pet-pyenv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-python-utils.clarify] +license = "MIT" +[[pet-python-utils.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-reporter.clarify] +license = "MIT" +[[pet-reporter.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-telemetry.clarify] +license = "MIT" +[[pet-telemetry.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-venv.clarify] +license = "MIT" +[[pet-venv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-virtualenv.clarify] +license = "MIT" +[[pet-virtualenv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-virtualenvwrapper.clarify] +license = "MIT" +[[pet-virtualenvwrapper.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-windows-registry.clarify] +license = "MIT" +[[pet-windows-registry.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-windows-store.clarify] +license = "MIT" +[[pet-windows-store.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' From 8a96ea25c465697ec74ce3447bcd2ce9cb25b4f0 Mon Sep 17 00:00:00 2001 From: David Soria Parra <167242713+dsp-ant@users.noreply.github.com> Date: Mon, 28 Oct 2024 14:37:58 +0000 Subject: [PATCH 16/87] context_servers: Support tools (#19548) This PR depends on #19547 This PR adds support for tools from context servers. Context servers are free to expose tools that Zed can pass to models. When called by the model, Zed forwards the request to context servers. This allows for some interesting techniques. Context servers can easily expose tools such as querying local databases, reading or writing local files, reading resources over authenticated APIs (e.g. kubernetes, asana, etc). This is currently experimental. Things to discuss * I want to still add a confirm dialog asking people if a server is allows to use the tool. Should do this or just use the tool and assume trustworthyness of context servers? * Can we add tool use behind a local setting flag? Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/assistant/src/assistant.rs | 85 ++++++++++++++----- crates/assistant/src/tools.rs | 1 + .../src/tools/context_server_tool.rs | 82 ++++++++++++++++++ crates/context_servers/src/protocol.rs | 33 +++++++ crates/context_servers/src/registry.rs | 32 +++++-- crates/context_servers/src/types.rs | 18 ++++ .../language_model/src/provider/anthropic.rs | 12 ++- 7 files changed, 235 insertions(+), 28 deletions(-) create mode 100644 crates/assistant/src/tools/context_server_tool.rs diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index e1e574744f..a48f6d6c29 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -298,25 +298,64 @@ fn register_context_server_handlers(cx: &mut AppContext) { return; }; - if let Some(prompts) = protocol.list_prompts().await.log_err() { - for prompt in prompts - .into_iter() - .filter(context_server_command::acceptable_prompt) - { - log::info!( - "registering context server command: {:?}", - prompt.name - ); - context_server_registry.register_command( - server.id.clone(), - prompt.name.as_str(), - ); - slash_command_registry.register_command( - context_server_command::ContextServerSlashCommand::new( - &server, prompt, - ), - true, - ); + if protocol.capable(context_servers::protocol::ServerCapability::Prompts) { + if let Some(prompts) = protocol.list_prompts().await.log_err() { + for prompt in prompts + .into_iter() + .filter(context_server_command::acceptable_prompt) + { + log::info!( + "registering context server command: {:?}", + prompt.name + ); + context_server_registry.register_command( + server.id.clone(), + prompt.name.as_str(), + ); + slash_command_registry.register_command( + context_server_command::ContextServerSlashCommand::new( + &server, prompt, + ), + true, + ); + } + } + } + }) + .detach(); + } + }, + ); + + cx.update_model( + &manager, + |manager: &mut context_servers::manager::ContextServerManager, cx| { + let tool_registry = ToolRegistry::global(cx); + let context_server_registry = ContextServerRegistry::global(cx); + if let Some(server) = manager.get_server(server_id) { + cx.spawn(|_, _| async move { + let Some(protocol) = server.client.read().clone() else { + return; + }; + + if protocol.capable(context_servers::protocol::ServerCapability::Tools) { + if let Some(tools) = protocol.list_tools().await.log_err() { + for tool in tools.tools { + log::info!( + "registering context server tool: {:?}", + tool.name + ); + context_server_registry.register_tool( + server.id.clone(), + tool.name.as_str(), + ); + tool_registry.register_tool( + tools::context_server_tool::ContextServerTool::new( + server.id.clone(), + tool + ), + ); + } } } }) @@ -334,6 +373,14 @@ fn register_context_server_handlers(cx: &mut AppContext) { context_server_registry.unregister_command(&server_id, &command_name); } } + + if let Some(tools) = context_server_registry.get_tools(server_id) { + let tool_registry = ToolRegistry::global(cx); + for tool_name in tools { + tool_registry.unregister_tool_by_name(&tool_name); + context_server_registry.unregister_tool(&server_id, &tool_name); + } + } } }, ) diff --git a/crates/assistant/src/tools.rs b/crates/assistant/src/tools.rs index abde04e760..83a396c020 100644 --- a/crates/assistant/src/tools.rs +++ b/crates/assistant/src/tools.rs @@ -1 +1,2 @@ +pub mod context_server_tool; pub mod now_tool; diff --git a/crates/assistant/src/tools/context_server_tool.rs b/crates/assistant/src/tools/context_server_tool.rs new file mode 100644 index 0000000000..93edb32b75 --- /dev/null +++ b/crates/assistant/src/tools/context_server_tool.rs @@ -0,0 +1,82 @@ +use anyhow::{anyhow, bail}; +use assistant_tool::Tool; +use context_servers::manager::ContextServerManager; +use context_servers::types; +use gpui::Task; + +pub struct ContextServerTool { + server_id: String, + tool: types::Tool, +} + +impl ContextServerTool { + pub fn new(server_id: impl Into, tool: types::Tool) -> Self { + Self { + server_id: server_id.into(), + tool, + } + } +} + +impl Tool for ContextServerTool { + fn name(&self) -> String { + self.tool.name.clone() + } + + fn description(&self) -> String { + self.tool.description.clone().unwrap_or_default() + } + + fn input_schema(&self) -> serde_json::Value { + match &self.tool.input_schema { + serde_json::Value::Null => { + serde_json::json!({ "type": "object", "properties": [] }) + } + serde_json::Value::Object(map) if map.is_empty() => { + serde_json::json!({ "type": "object", "properties": [] }) + } + _ => self.tool.input_schema.clone(), + } + } + + fn run( + self: std::sync::Arc, + input: serde_json::Value, + _workspace: gpui::WeakView, + cx: &mut ui::WindowContext, + ) -> gpui::Task> { + let manager = ContextServerManager::global(cx); + let manager = manager.read(cx); + if let Some(server) = manager.get_server(&self.server_id) { + cx.foreground_executor().spawn({ + let tool_name = self.tool.name.clone(); + async move { + let Some(protocol) = server.client.read().clone() else { + bail!("Context server not initialized"); + }; + + let arguments = if let serde_json::Value::Object(map) = input { + Some(map.into_iter().collect()) + } else { + None + }; + + log::trace!( + "Running tool: {} with arguments: {:?}", + tool_name, + arguments + ); + let response = protocol.run_tool(tool_name, arguments).await?; + + let tool_result = match response.tool_result { + serde_json::Value::String(s) => s, + _ => serde_json::to_string(&response.tool_result)?, + }; + Ok(tool_result) + } + }) + } else { + Task::ready(Err(anyhow!("Context server not found"))) + } + } +} diff --git a/crates/context_servers/src/protocol.rs b/crates/context_servers/src/protocol.rs index 80a7a7f991..996fc34f46 100644 --- a/crates/context_servers/src/protocol.rs +++ b/crates/context_servers/src/protocol.rs @@ -180,6 +180,39 @@ impl InitializedContextServerProtocol { Ok(completion) } + + /// List MCP tools. + pub async fn list_tools(&self) -> Result { + self.check_capability(ServerCapability::Tools)?; + + let response = self + .inner + .request::(types::RequestType::ListTools.as_str(), ()) + .await?; + + Ok(response) + } + + /// Executes a tool with the given arguments + pub async fn run_tool>( + &self, + tool: P, + arguments: Option>, + ) -> Result { + self.check_capability(ServerCapability::Tools)?; + + let params = types::CallToolParams { + name: tool.as_ref().to_string(), + arguments, + }; + + let response: types::CallToolResponse = self + .inner + .request(types::RequestType::CallTool.as_str(), params) + .await?; + + Ok(response) + } } impl InitializedContextServerProtocol { diff --git a/crates/context_servers/src/registry.rs b/crates/context_servers/src/registry.rs index 625f308c15..5490187034 100644 --- a/crates/context_servers/src/registry.rs +++ b/crates/context_servers/src/registry.rs @@ -9,7 +9,8 @@ struct GlobalContextServerRegistry(Arc); impl Global for GlobalContextServerRegistry {} pub struct ContextServerRegistry { - registry: RwLock>>>, + command_registry: RwLock>>>, + tool_registry: RwLock>>>, } impl ContextServerRegistry { @@ -20,13 +21,14 @@ impl ContextServerRegistry { pub fn register(cx: &mut AppContext) { cx.set_global(GlobalContextServerRegistry(Arc::new( ContextServerRegistry { - registry: RwLock::new(HashMap::default()), + command_registry: RwLock::new(HashMap::default()), + tool_registry: RwLock::new(HashMap::default()), }, ))) } pub fn register_command(&self, server_id: String, command_name: &str) { - let mut registry = self.registry.write(); + let mut registry = self.command_registry.write(); registry .entry(server_id) .or_default() @@ -34,14 +36,34 @@ impl ContextServerRegistry { } pub fn unregister_command(&self, server_id: &str, command_name: &str) { - let mut registry = self.registry.write(); + let mut registry = self.command_registry.write(); if let Some(commands) = registry.get_mut(server_id) { commands.retain(|name| name.as_ref() != command_name); } } pub fn get_commands(&self, server_id: &str) -> Option>> { - let registry = self.registry.read(); + let registry = self.command_registry.read(); + registry.get(server_id).cloned() + } + + pub fn register_tool(&self, server_id: String, tool_name: &str) { + let mut registry = self.tool_registry.write(); + registry + .entry(server_id) + .or_default() + .push(tool_name.into()); + } + + pub fn unregister_tool(&self, server_id: &str, tool_name: &str) { + let mut registry = self.tool_registry.write(); + if let Some(tools) = registry.get_mut(server_id) { + tools.retain(|name| name.as_ref() != tool_name); + } + } + + pub fn get_tools(&self, server_id: &str) -> Option>> { + let registry = self.tool_registry.read(); registry.get(server_id).cloned() } } diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index 2bca0a021a..b6d8a958bb 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -16,6 +16,8 @@ pub enum RequestType { PromptsList, CompletionComplete, Ping, + ListTools, + ListResourceTemplates, } impl RequestType { @@ -32,6 +34,8 @@ impl RequestType { RequestType::PromptsList => "prompts/list", RequestType::CompletionComplete => "completion/complete", RequestType::Ping => "ping", + RequestType::ListTools => "tools/list", + RequestType::ListResourceTemplates => "resources/templates/list", } } } @@ -402,3 +406,17 @@ pub struct Completion { pub values: Vec, pub total: CompletionTotal, } + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CallToolResponse { + pub tool_result: serde_json::Value, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ListToolsResponse { + pub tools: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub next_cursor: Option, +} diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index fe88c73b90..b7e65650b5 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -505,10 +505,14 @@ pub fn map_to_language_model_completion_events( LanguageModelToolUse { id: tool_use.id, name: tool_use.name, - input: serde_json::Value::from_str( - &tool_use.input_json, - ) - .map_err(|err| anyhow!(err))?, + input: if tool_use.input_json.is_empty() { + serde_json::Value::Null + } else { + serde_json::Value::from_str( + &tool_use.input_json, + ) + .map_err(|err| anyhow!(err))? + }, }, )) })), From 6686f66949f10c189f27bca1fa3cfc1eddc6bdf0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 15:40:50 +0100 Subject: [PATCH 17/87] ollama: Ensure only single task fetches models (#19830) Before this change, we'd see a ton of requests from the Ollama provider trying to fetch models: ``` [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: https://api.zed.dev/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ ``` Turns out we'd send a request on *every* change to settings. Now, with this change, we only send a single request. Release Notes: - N/A Co-authored-by: Bennet --- crates/language_model/src/provider/ollama.rs | 34 +++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index a29ff3cf6a..c95bed181a 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -54,6 +54,7 @@ pub struct OllamaLanguageModelProvider { pub struct State { http_client: Arc, available_models: Vec, + fetch_model_task: Option>>, _subscription: Subscription, } @@ -89,6 +90,11 @@ impl State { }) } + fn restart_fetch_models_task(&mut self, cx: &mut ModelContext) { + let task = self.fetch_models(cx); + self.fetch_model_task.replace(task); + } + fn authenticate(&mut self, cx: &mut ModelContext) -> Task> { if self.is_authenticated() { Task::ready(Ok(())) @@ -102,17 +108,29 @@ impl OllamaLanguageModelProvider { pub fn new(http_client: Arc, cx: &mut AppContext) -> Self { let this = Self { http_client: http_client.clone(), - state: cx.new_model(|cx| State { - http_client, - available_models: Default::default(), - _subscription: cx.observe_global::(|this: &mut State, cx| { - this.fetch_models(cx).detach(); - cx.notify(); - }), + state: cx.new_model(|cx| { + let subscription = cx.observe_global::({ + let mut settings = AllLanguageModelSettings::get_global(cx).ollama.clone(); + move |this: &mut State, cx| { + let new_settings = &AllLanguageModelSettings::get_global(cx).ollama; + if &settings != new_settings { + settings = new_settings.clone(); + this.restart_fetch_models_task(cx); + cx.notify(); + } + } + }); + + State { + http_client, + available_models: Default::default(), + fetch_model_task: None, + _subscription: subscription, + } }), }; this.state - .update(cx, |state, cx| state.fetch_models(cx).detach()); + .update(cx, |state, cx| state.restart_fetch_models_task(cx)); this } } From ff29a34298614ef65fa5e5cdcab0356e05ecd472 Mon Sep 17 00:00:00 2001 From: xdBronch <51252236+xdBronch@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:49:40 -0400 Subject: [PATCH 18/87] zig: Account for doctests in outline (#19776) zig has a feature called [doctests](https://ziglang.org/documentation/master/#Doctests) where instead of providing a string as the name of a test you use an identifier so that the test is "tied" to it and can be used in documentation. this wasnt accounted for so any tests using this were unnamed in the outline Release Notes: - N/A --- extensions/zig/languages/zig/outline.scm | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/extensions/zig/languages/zig/outline.scm b/extensions/zig/languages/zig/outline.scm index d87cff2058..7ae683a876 100644 --- a/extensions/zig/languages/zig/outline.scm +++ b/extensions/zig/languages/zig/outline.scm @@ -19,6 +19,9 @@ ( TestDecl ( "test" @context - (STRINGLITERALSINGLE)? @name + [ + (STRINGLITERALSINGLE) + (IDENTIFIER) + ]? @name ) ) @item From e0ea9a9ab55a188e0aa4a1e43b2bad9f4d815e26 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 28 Oct 2024 16:00:38 +0100 Subject: [PATCH 19/87] Remove leftover comments from previous PR (#19820) Co-Authored-by: Thorsten Removes some leftover comments from #19766 Release Notes: - N/A Co-authored-by: Thorsten --- crates/editor/src/editor.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 5bd3accc13..d23889b427 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -223,7 +223,6 @@ pub fn render_parsed_markdown( } }), ); - // hello let mut links = Vec::new(); let mut link_ranges = Vec::new(); @@ -3785,9 +3784,6 @@ impl Editor { pub fn newline_below(&mut self, _: &NewlineBelow, cx: &mut ViewContext) { let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); - // - // - // let mut edits = Vec::new(); let mut rows = Vec::new(); From 67eb652bf1c0c8b7183f89043146d665ce0cab1d Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 16:12:37 +0100 Subject: [PATCH 20/87] remote servers: Always dismiss modal (#19831) We display the errors in another window anyway and if the connection takes a while it looks like a bug that the modal stays open. Release Notes: - N/A Co-authored-by: Bennet --- crates/recent_projects/src/remote_servers.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index faf58f312f..a7ffee5e57 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -738,7 +738,8 @@ impl RemoteServerProjects { }; let project = project.clone(); let server = server.clone(); - cx.spawn(|remote_server_projects, mut cx| async move { + cx.emit(DismissEvent); + cx.spawn(|_, mut cx| async move { let result = open_ssh_project( server.into(), project.paths.into_iter().map(PathBuf::from).collect(), @@ -757,10 +758,6 @@ impl RemoteServerProjects { ) .await .ok(); - } else { - remote_server_projects - .update(&mut cx, |_, cx| cx.emit(DismissEvent)) - .ok(); } }) .detach(); From 5e89fba68116077369252539fc199108cabd0a8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Sinan=20A=C4=9Facan?= Date: Mon, 28 Oct 2024 16:20:04 +0100 Subject: [PATCH 21/87] dart: Add support for documentation comments (#19592) Closes #19590 Release Notes: - N/A --- I'm unable to test this because rebuilding Zed with the changes does not seem to use the changes. If maintainers could let me know how to test these changes I'd like to verify that this really fixes #19590. --------- Co-authored-by: Marshall Bowers --- extensions/dart/languages/dart/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/dart/languages/dart/config.toml b/extensions/dart/languages/dart/config.toml index d723d4d6d3..15420c74f0 100644 --- a/extensions/dart/languages/dart/config.toml +++ b/extensions/dart/languages/dart/config.toml @@ -1,7 +1,7 @@ name = "Dart" grammar = "dart" path_suffixes = ["dart"] -line_comments = ["// "] +line_comments = ["// ", "/// "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, From cc81f19c68260669c275973054ad2d466d54b5e9 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 16:35:37 +0100 Subject: [PATCH 22/87] remote server: Fix error log about inability to open buffer (#19824) Turns out that we used client-side `fs` to check whether something is a directory or not, which obviously doesn't work with SSH projects. Release Notes: - N/A --------- Co-authored-by: Bennet --- crates/editor/src/hover_links.rs | 45 ++++++++++- crates/file_finder/src/file_finder.rs | 4 +- crates/project/src/project.rs | 64 +++++++++++---- crates/proto/proto/zed.proto | 16 ++-- crates/proto/src/proto.rs | 14 ++-- crates/remote_server/src/headless_project.rs | 16 ++-- .../remote_server/src/remote_editing_tests.rs | 77 ++++++++++++++++++- crates/workspace/src/workspace.rs | 36 +++++---- 8 files changed, 213 insertions(+), 59 deletions(-) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 4a636f673a..31be9e93a9 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -706,10 +706,11 @@ pub(crate) async fn find_file( ) -> Option { project .update(cx, |project, cx| { - project.resolve_existing_file_path(&candidate_file_path, buffer, cx) + project.resolve_path_in_buffer(&candidate_file_path, buffer, cx) }) .ok()? .await + .filter(|s| s.is_file()) } if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await { @@ -1612,4 +1613,46 @@ mod tests { assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs"); }); } + + #[gpui::test] + async fn test_hover_directories(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + // Insert a new file + let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone()); + fs.as_fake() + .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) + .await; + + cx.set_state(indoc! {" + You can't open ../diˇr because it's a directory. + "}); + + // File does not exist + let screen_coord = cx.pixel_position(indoc! {" + You can't open ../diˇr because it's a directory. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + + // No highlight + cx.update_editor(|editor, cx| { + assert!(editor + .snapshot(cx) + .text_highlight_ranges::() + .unwrap_or_default() + .1 + .is_empty()); + }); + + // Does not open the directory + cx.simulate_click(screen_coord, Modifiers::secondary_key()); + cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 1)); + } } diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 299b129d82..ce0e385057 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -790,9 +790,9 @@ impl FileFinderDelegate { let mut path_matches = Vec::new(); let abs_file_exists = if let Ok(task) = project.update(&mut cx, |this, cx| { - this.abs_file_path_exists(query.path_query(), cx) + this.resolve_abs_file_path(query.path_query(), cx) }) { - task.await + task.await.is_some() } else { false }; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7a57e048c8..04ae203b4d 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3094,7 +3094,7 @@ impl Project { } /// Returns the resolved version of `path`, that was found in `buffer`, if it exists. - pub fn resolve_existing_file_path( + pub fn resolve_path_in_buffer( &self, path: &str, buffer: &Model, @@ -3102,47 +3102,56 @@ impl Project { ) -> Task> { let path_buf = PathBuf::from(path); if path_buf.is_absolute() || path.starts_with("~") { - self.resolve_abs_file_path(path, cx) + self.resolve_abs_path(path, cx) } else { self.resolve_path_in_worktrees(path_buf, buffer, cx) } } - pub fn abs_file_path_exists(&self, path: &str, cx: &mut ModelContext) -> Task { - let resolve_task = self.resolve_abs_file_path(path, cx); + pub fn resolve_abs_file_path( + &self, + path: &str, + cx: &mut ModelContext, + ) -> Task> { + let resolve_task = self.resolve_abs_path(path, cx); cx.background_executor().spawn(async move { let resolved_path = resolve_task.await; - resolved_path.is_some() + resolved_path.filter(|path| path.is_file()) }) } - fn resolve_abs_file_path( + pub fn resolve_abs_path( &self, path: &str, cx: &mut ModelContext, ) -> Task> { if self.is_local() { let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); - let fs = self.fs.clone(); cx.background_executor().spawn(async move { let path = expanded.as_path(); - let exists = fs.is_file(path).await; + let metadata = fs.metadata(path).await.ok().flatten(); - exists.then(|| ResolvedPath::AbsPath(expanded)) + metadata.map(|metadata| ResolvedPath::AbsPath { + path: expanded, + is_dir: metadata.is_dir, + }) }) } else if let Some(ssh_client) = self.ssh_client.as_ref() { let request = ssh_client .read(cx) .proto_client() - .request(proto::CheckFileExists { + .request(proto::GetPathMetadata { project_id: SSH_PROJECT_ID, path: path.to_string(), }); cx.background_executor().spawn(async move { let response = request.await.log_err()?; if response.exists { - Some(ResolvedPath::AbsPath(PathBuf::from(response.path))) + Some(ResolvedPath::AbsPath { + path: PathBuf::from(response.path), + is_dir: response.is_dir, + }) } else { None } @@ -3181,10 +3190,14 @@ impl Project { resolved.strip_prefix(root_entry_path).unwrap_or(&resolved); worktree.entry_for_path(stripped).map(|entry| { - ResolvedPath::ProjectPath(ProjectPath { + let project_path = ProjectPath { worktree_id: worktree.id(), path: entry.path.clone(), - }) + }; + ResolvedPath::ProjectPath { + project_path, + is_dir: entry.is_dir(), + } }) }) .ok()?; @@ -4149,24 +4162,41 @@ fn resolve_path(base: &Path, path: &Path) -> PathBuf { /// or an AbsPath and that *exists*. #[derive(Debug, Clone)] pub enum ResolvedPath { - ProjectPath(ProjectPath), - AbsPath(PathBuf), + ProjectPath { + project_path: ProjectPath, + is_dir: bool, + }, + AbsPath { + path: PathBuf, + is_dir: bool, + }, } impl ResolvedPath { pub fn abs_path(&self) -> Option<&Path> { match self { - Self::AbsPath(path) => Some(path.as_path()), + Self::AbsPath { path, .. } => Some(path.as_path()), _ => None, } } pub fn project_path(&self) -> Option<&ProjectPath> { match self { - Self::ProjectPath(path) => Some(&path), + Self::ProjectPath { project_path, .. } => Some(&project_path), _ => None, } } + + pub fn is_file(&self) -> bool { + !self.is_dir() + } + + pub fn is_dir(&self) -> bool { + match self { + Self::ProjectPath { is_dir, .. } => *is_dir, + Self::AbsPath { is_dir, .. } => *is_dir, + } + } } impl Item for Buffer { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 95a54c3d5c..e9e42dac18 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -259,9 +259,6 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CheckFileExists check_file_exists = 255; - CheckFileExistsResponse check_file_exists_response = 256; - ShutdownRemoteServer shutdown_remote_server = 257; RemoveWorktree remove_worktree = 258; @@ -284,13 +281,16 @@ message Envelope { GitBranchesResponse git_branches_response = 271; UpdateGitBranch update_git_branch = 272; + ListToolchains list_toolchains = 273; ListToolchainsResponse list_toolchains_response = 274; ActivateToolchain activate_toolchain = 275; ActiveToolchain active_toolchain = 276; - ActiveToolchainResponse active_toolchain_response = 277; // current max - } + ActiveToolchainResponse active_toolchain_response = 277; + GetPathMetadata get_path_metadata = 278; + GetPathMetadataResponse get_path_metadata_response = 279; // current max + } reserved 87 to 88; reserved 158 to 161; @@ -305,6 +305,7 @@ message Envelope { reserved 221; reserved 224 to 229; reserved 247 to 254; + reserved 255 to 256; } // Messages @@ -2357,14 +2358,15 @@ message UpdateUserSettings { } } -message CheckFileExists { +message GetPathMetadata { uint64 project_id = 1; string path = 2; } -message CheckFileExistsResponse { +message GetPathMetadataResponse { bool exists = 1; string path = 2; + bool is_dir = 3; } message ShutdownRemoteServer {} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 7fcebf0513..4bae2d9931 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -343,8 +343,6 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CheckFileExists, Background), - (CheckFileExistsResponse, Background), (ShutdownRemoteServer, Foreground), (RemoveWorktree, Foreground), (LanguageServerLog, Foreground), @@ -363,7 +361,9 @@ messages!( (ListToolchainsResponse, Foreground), (ActivateToolchain, Foreground), (ActiveToolchain, Foreground), - (ActiveToolchainResponse, Foreground) + (ActiveToolchainResponse, Foreground), + (GetPathMetadata, Background), + (GetPathMetadataResponse, Background) ); request_messages!( @@ -472,7 +472,6 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CheckFileExists, CheckFileExistsResponse), (ShutdownRemoteServer, Ack), (RemoveWorktree, Ack), (OpenServerSettings, OpenBufferResponse), @@ -483,7 +482,8 @@ request_messages!( (UpdateGitBranch, Ack), (ListToolchains, ListToolchainsResponse), (ActivateToolchain, Ack), - (ActiveToolchain, ActiveToolchainResponse) + (ActiveToolchain, ActiveToolchainResponse), + (GetPathMetadata, GetPathMetadataResponse) ); entity_messages!( @@ -555,7 +555,6 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CheckFileExists, LanguageServerLog, Toast, HideToast, @@ -566,7 +565,8 @@ entity_messages!( UpdateGitBranch, ListToolchains, ActivateToolchain, - ActiveToolchain + ActiveToolchain, + GetPathMetadata ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ce34af247f..155b141af6 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -150,7 +150,7 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); - client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists); + client.add_request_handler(cx.weak_model(), Self::handle_get_path_metadata); client.add_request_handler(cx.weak_model(), Self::handle_shutdown_remote_server); client.add_request_handler(cx.weak_model(), Self::handle_ping); @@ -525,18 +525,20 @@ impl HeadlessProject { Ok(proto::ListRemoteDirectoryResponse { entries }) } - pub async fn handle_check_file_exists( + pub async fn handle_get_path_metadata( this: Model, - envelope: TypedEnvelope, + envelope: TypedEnvelope, cx: AsyncAppContext, - ) -> Result { + ) -> Result { let fs = cx.read_model(&this, |this, _| this.fs.clone())?; let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); - let exists = fs.is_file(&PathBuf::from(expanded.clone())).await; + let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?; + let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false); - Ok(proto::CheckFileExistsResponse { - exists, + Ok(proto::GetPathMetadataResponse { + exists: metadata.is_some(), + is_dir, path: expanded, }) } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 82e3824eb0..c7d3a3c97f 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -604,7 +604,10 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont } #[gpui::test] -async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { +async fn test_remote_resolve_path_in_buffer( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", @@ -639,10 +642,11 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut let path = project .update(cx, |project, cx| { - project.resolve_existing_file_path("/code/project1/README.md", &buffer, cx) + project.resolve_path_in_buffer("/code/project1/README.md", &buffer, cx) }) .await .unwrap(); + assert!(path.is_file()); assert_eq!( path.abs_path().unwrap().to_string_lossy(), "/code/project1/README.md" @@ -650,15 +654,80 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut let path = project .update(cx, |project, cx| { - project.resolve_existing_file_path("../README.md", &buffer, cx) + project.resolve_path_in_buffer("../README.md", &buffer, cx) }) .await .unwrap(); - + assert!(path.is_file()); assert_eq!( path.project_path().unwrap().clone(), ProjectPath::from((worktree_id, "README.md")) ); + + let path = project + .update(cx, |project, cx| { + project.resolve_path_in_buffer("../src", &buffer, cx) + }) + .await + .unwrap(); + assert_eq!( + path.project_path().unwrap().clone(), + ProjectPath::from((worktree_id, "src")) + ); + assert!(path.is_dir()); +} + +#[gpui::test] +async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/README.md", cx) + }) + .await + .unwrap(); + + assert!(path.is_file()); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/README.md" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/src", cx) + }) + .await + .unwrap(); + + assert!(path.is_dir()); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/src" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/DOESNOTEXIST", cx) + }) + .await; + assert!(path.is_none()); } #[gpui::test(iterations = 10)] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index de2c985f34..f0786aa479 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1218,7 +1218,7 @@ impl Workspace { notify_if_database_failed(window, &mut cx); let opened_items = window .update(&mut cx, |_workspace, cx| { - open_items(serialized_workspace, project_paths, app_state, cx) + open_items(serialized_workspace, project_paths, cx) })? .await .unwrap_or_default(); @@ -2058,8 +2058,10 @@ impl Workspace { cx: &mut ViewContext, ) -> Task>> { match path { - ResolvedPath::ProjectPath(project_path) => self.open_path(project_path, None, true, cx), - ResolvedPath::AbsPath(path) => self.open_abs_path(path, false, cx), + ResolvedPath::ProjectPath { project_path, .. } => { + self.open_path(project_path, None, true, cx) + } + ResolvedPath::AbsPath { path, .. } => self.open_abs_path(path, false, cx), } } @@ -4563,7 +4565,6 @@ fn window_bounds_env_override() -> Option> { fn open_items( serialized_workspace: Option, mut project_paths_to_open: Vec<(PathBuf, Option)>, - app_state: Arc, cx: &mut ViewContext, ) -> impl 'static + Future>>>>> { let restored_items = serialized_workspace.map(|serialized_workspace| { @@ -4619,14 +4620,20 @@ fn open_items( .enumerate() .map(|(ix, (abs_path, project_path))| { let workspace = workspace.clone(); - cx.spawn(|mut cx| { - let fs = app_state.fs.clone(); - async move { - let file_project_path = project_path?; - if fs.is_dir(&abs_path).await { - None - } else { - Some(( + cx.spawn(|mut cx| async move { + let file_project_path = project_path?; + let abs_path_task = workspace.update(&mut cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.resolve_abs_path(abs_path.to_string_lossy().as_ref(), cx) + }) + }); + + // We only want to open file paths here. If one of the items + // here is a directory, it was already opened further above + // with a `find_or_create_worktree`. + if let Ok(task) = abs_path_task { + if task.await.map_or(true, |p| p.is_file()) { + return Some(( ix, workspace .update(&mut cx, |workspace, cx| { @@ -4634,9 +4641,10 @@ fn open_items( }) .log_err()? .await, - )) + )); } } + None }) }); @@ -5580,7 +5588,7 @@ pub fn open_ssh_project( .update(&mut cx, |_, cx| { cx.activate_window(); - open_items(serialized_workspace, project_paths_to_open, app_state, cx) + open_items(serialized_workspace, project_paths_to_open, cx) })? .await?; From 5e9ff3e3131e766a01e161353a2ba1596de261b7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 28 Oct 2024 11:36:44 -0400 Subject: [PATCH 23/87] dart: Bump to v0.1.2 (#19835) This PR bumps the Dart extension to v0.1.2. Changes: - https://github.com/zed-industries/zed/pull/19592 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/dart/Cargo.toml | 2 +- extensions/dart/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bd9ad91bf7..921ec3a4f0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15132,7 +15132,7 @@ dependencies = [ [[package]] name = "zed_dart" -version = "0.1.1" +version = "0.1.2" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/dart/Cargo.toml b/extensions/dart/Cargo.toml index 3d79e104c1..8d50e620cc 100644 --- a/extensions/dart/Cargo.toml +++ b/extensions/dart/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_dart" -version = "0.1.1" +version = "0.1.2" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/dart/extension.toml b/extensions/dart/extension.toml index 5ea8c37c2f..684580e7c0 100644 --- a/extensions/dart/extension.toml +++ b/extensions/dart/extension.toml @@ -1,7 +1,7 @@ id = "dart" name = "Dart" description = "Dart support." -version = "0.1.1" +version = "0.1.2" schema_version = 1 authors = ["Abdullah Alsigar ", "Flo ", "ybbond "] repository = "https://github.com/zed-industries/zed" From a451bcc3c47e044fa7adadc0a636b1a8e0e32a87 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 28 Oct 2024 11:45:18 -0400 Subject: [PATCH 24/87] collab: Exempt staff from LLM usage limits (#19836) This PR updates the usage limit check to exempt Zed staff members from usage limits. We previously had some affordances for the rate limits, but hadn't yet updated it for the usage-based billing. Release Notes: - N/A --- crates/collab/src/llm.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index cb3478879e..654327c463 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -449,6 +449,10 @@ async fn check_usage_limit( model_name: &str, claims: &LlmTokenClaims, ) -> Result<()> { + if claims.is_staff { + return Ok(()); + } + let model = state.db.model(provider, model_name)?; let usage = state .db @@ -513,11 +517,6 @@ async fn check_usage_limit( ]; for (used, limit, usage_measure) in checks { - // Temporarily bypass rate-limiting for staff members. - if claims.is_staff { - continue; - } - if used > limit { let resource = match usage_measure { UsageMeasure::RequestsPerMinute => "requests_per_minute", From fab2f22a89443f2845ed625e3961c3cb23c00d01 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 17:07:30 +0100 Subject: [PATCH 25/87] remote project: Fix project reference leak when waiting for prompt reply (#19838) When the language server gave us a prompt and we'd close the window, we wouldn't release the `project` until the next `flush_effects` call that came in when opening a window. With this change, we no longer hold a strong reference to the project in the future. Fixes the leak and makes sure we clean up the SSH connection when closing a window. Release Notes: - N/A Co-authored-by: Bennet --- crates/project/src/project.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 04ae203b4d..eb5edabc8e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3603,6 +3603,13 @@ impl Project { anyhow::Ok(()) })??; + // We drop `this` to avoid holding a reference in this future for too + // long. + // If we keep the reference, we might not drop the `Project` early + // enough when closing a window and it will only get releases on the + // next `flush_effects()` call. + drop(this); + let answer = rx.next().await; Ok(LanguageServerPromptResponse { From f5d5fab2c8fc2066426b7fc80ccb964bef1ef534 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 28 Oct 2024 13:37:28 -0400 Subject: [PATCH 26/87] Improve `fold_at_level` performance (#19845) Just spotted a tiny error that was causing us to continue looking for nested folds 1 layer deeper than any fold already found at the target level. We shouldn't continue to seek for a deeper fold after the fold at the target level is found. Tested on a debug build and used `editor.rs` as the source material: ``` Old Level 1 fold: [crates/editor/src/editor.rs:10777:9] counter = 2806 [crates/editor/src/editor.rs:10778:9] time_elapsed = 320.570792ms Level 2 fold: [crates/editor/src/editor.rs:10777:9] counter = 5615 [crates/editor/src/editor.rs:10778:9] time_elapsed = 497.4305ms Level 3 fold: [crates/editor/src/editor.rs:10777:9] counter = 7528 [crates/editor/src/editor.rs:10778:9] time_elapsed = 619.818334ms New Level 1 fold: [crates/editor/src/editor.rs:10776:9] counter = 543 [crates/editor/src/editor.rs:10777:9] time_elapsed = 139.115625ms Level 2 fold: [crates/editor/src/editor.rs:10776:9] counter = 2806 [crates/editor/src/editor.rs:10777:9] time_elapsed = 312.560416ms Level 3 fold: [crates/editor/src/editor.rs:10776:9] counter = 5615 [crates/editor/src/editor.rs:10777:9] time_elapsed = 498.873292ms ``` Release Notes: - N/A --- crates/editor/src/editor.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d23889b427..df13f74806 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10756,12 +10756,10 @@ impl Editor { let nested_start_row = foldable_range.0.start.row + 1; let nested_end_row = foldable_range.0.end.row; - if current_level == fold_at_level { - fold_ranges.push(foldable_range); - } - - if current_level <= fold_at_level { + if current_level < fold_at_level { stack.push((nested_start_row, nested_end_row, current_level + 1)); + } else if current_level == fold_at_level { + fold_ranges.push(foldable_range); } start_row = nested_end_row + 1; From 826d83edfee474ede86377666c3c13dd7422d453 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 28 Oct 2024 12:28:42 -0700 Subject: [PATCH 27/87] Fix backtrace spam on remote server (#19850) Release Notes: - N/A Co-authored-by: conrad --- crates/remote/src/ssh_session.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 422937ed23..857b139736 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1221,9 +1221,11 @@ impl RemoteConnection for SshRemoteConnection { delegate.set_status(Some("Starting proxy"), cx); let mut start_proxy_command = format!( - "RUST_LOG={} RUST_BACKTRACE={} {:?} proxy --identifier {}", + "RUST_LOG={} {} {:?} proxy --identifier {}", std::env::var("RUST_LOG").unwrap_or_default(), - std::env::var("RUST_BACKTRACE").unwrap_or_default(), + std::env::var("RUST_BACKTRACE") + .map(|b| { format!("RUST_BACKTRACE={}", b) }) + .unwrap_or_default(), remote_binary_path, unique_identifier, ); From 80f89059aabade8f83c35a371ba1cc3a713a5cd6 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 28 Oct 2024 12:55:55 -0700 Subject: [PATCH 28/87] Fix mouse clicks on remote-open-folder UI (#19851) Also change Zed's standard style to use `.track_focus(&self.focus_handle(cx))`, instead of `.track_focus(&self.focus_handle)`, to catch these kinds of errors more easily in the future. Release Notes: - N/A --------- Co-authored-by: Conrad --- crates/assistant/src/assistant_panel.rs | 2 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/copilot/src/sign_in.rs | 2 +- crates/diagnostics/src/diagnostics.rs | 2 +- crates/gpui/examples/input.rs | 4 ++-- crates/gpui/src/key_dispatch.rs | 2 +- crates/image_viewer/src/image_viewer.rs | 2 +- crates/markdown_preview/src/markdown_preview_view.rs | 2 +- crates/outline_panel/src/outline_panel.rs | 2 +- crates/picker/src/head.rs | 4 ++-- crates/project_panel/src/project_panel.rs | 4 ++-- crates/recent_projects/src/disconnected_overlay.rs | 2 +- crates/recent_projects/src/remote_servers.rs | 2 +- crates/search/src/project_search.rs | 4 ++-- crates/terminal_view/src/terminal_view.rs | 2 +- crates/ui/src/components/context_menu.rs | 2 +- crates/welcome/src/welcome.rs | 2 +- crates/workspace/src/dock.rs | 8 ++++---- crates/workspace/src/item.rs | 4 ++-- crates/workspace/src/pane.rs | 2 +- 20 files changed, 28 insertions(+), 28 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 479d63a76e..b15026c1ea 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4707,7 +4707,7 @@ impl Render for ConfigurationView { let mut element = v_flex() .id("assistant-configuration-view") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .bg(cx.theme().colors().editor_background) .size_full() .overflow_y_scroll() diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index f188aaf921..14cab63f63 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2726,7 +2726,7 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::collapse_selected_channel)) .on_action(cx.listener(CollabPanel::expand_selected_channel)) .on_action(cx.listener(CollabPanel::start_move_selected_channel)) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .child(if self.user_store.read(cx).current_user().is_none() { self.render_signed_out(cx) diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index da6b969b72..d63710983b 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -185,7 +185,7 @@ impl Render for CopilotCodeVerification { v_flex() .id("copilot code verification") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .elevation_3(cx) .w_96() .items_center() diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index cb6d07e906..cef634a41c 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -101,7 +101,7 @@ impl Render for ProjectDiagnosticsEditor { }; div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .when(self.path_states.is_empty(), |el| { el.key_context("EmptyPane") }) diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index 7e7de269b1..97c8b666c7 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -485,7 +485,7 @@ impl Render for TextInput { div() .flex() .key_context("TextInput") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .cursor(CursorStyle::IBeam) .on_action(cx.listener(Self::backspace)) .on_action(cx.listener(Self::delete)) @@ -549,7 +549,7 @@ impl Render for InputExample { let num_keystrokes = self.recent_keystrokes.len(); div() .bg(rgb(0xaaaaaa)) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .flex() .flex_col() .size_full() diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index cb40a56367..722bc89a1d 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -16,7 +16,7 @@ /// impl Render for Editor { /// fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { /// div() -/// .track_focus(&self.focus_handle) +/// .track_focus(&self.focus_handle(cx)) /// .keymap_context("Editor") /// .on_action(cx.listener(Editor::undo)) /// .on_action(cx.listener(Editor::redo)) diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 607a4a4abe..1f6fb54d16 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -271,7 +271,7 @@ impl Render for ImageView { .left_0(); div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .child(checkered_background) .child( diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 1aa60e2a3b..81145afa3f 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -479,7 +479,7 @@ impl Render for MarkdownPreviewView { v_flex() .id("MarkdownPreview") .key_context("MarkdownPreview") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .bg(cx.theme().colors().editor_background) .p_4() diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 6def76bb38..1259646a1b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -3787,7 +3787,7 @@ impl Render for OutlinePanel { } }), ) - .track_focus(&self.focus_handle); + .track_focus(&self.focus_handle(cx)); if self.cached_entries.is_empty() { let header = if self.updating_fs_entries { diff --git a/crates/picker/src/head.rs b/crates/picker/src/head.rs index 1a103b252f..5ebcaf13a5 100644 --- a/crates/picker/src/head.rs +++ b/crates/picker/src/head.rs @@ -52,8 +52,8 @@ impl EmptyHead { } impl Render for EmptyHead { - fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { - div().track_focus(&self.focus_handle) + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().track_focus(&self.focus_handle(cx)) } } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 355e8780cc..0df1062526 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -3136,7 +3136,7 @@ impl Render for ProjectPanel { } }), ) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( uniform_list(cx.view().clone(), "entries", item_count, { |this, range, cx| { @@ -3268,7 +3268,7 @@ impl Render for ProjectPanel { .id("empty-project_panel") .size_full() .p_4() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( Button::new("open_project", "Open a project") .full_width() diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index ed81fbb345..f176d56c51 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -149,7 +149,7 @@ impl Render for DisconnectedOverlay { }; div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .elevation_3(cx) .on_action(cx.listener(Self::cancel)) .occlude() diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index a7ffee5e57..003485354e 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1266,7 +1266,7 @@ impl Render for RemoteServerProjects { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { self.selectable_items.reset(); div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .elevation_3(cx) .w(rems(34.)) .key_context("RemoteServerModal") diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 1bc49551a7..b018f29693 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -327,7 +327,7 @@ impl Render for ProjectSearchView { div() .flex_1() .size_full() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child(self.results_editor.clone()) } else { let model = self.model.read(cx); @@ -365,7 +365,7 @@ impl Render for ProjectSearchView { .size_full() .justify_center() .bg(cx.theme().colors().editor_background) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( h_flex() .size_full() diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index eed8c8123b..d192680968 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -975,7 +975,7 @@ impl Render for TerminalView { div() .size_full() .relative() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .key_context(self.dispatch_context(cx)) .on_action(cx.listener(TerminalView::send_text)) .on_action(cx.listener(TerminalView::send_keystroke)) diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index 92884b0182..702dd6a092 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -348,7 +348,7 @@ impl Render for ContextMenu { .min_w(px(200.)) .max_h(vh(0.75, cx)) .overflow_y_scroll() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .on_mouse_down_out(cx.listener(|this, _, cx| this.cancel(&menu::Cancel, cx))) .key_context("menu") .on_action(cx.listener(ContextMenu::select_first)) diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index 1be2567c0a..0be48bd82e 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -72,7 +72,7 @@ impl Render for WelcomePage { h_flex() .size_full() .bg(cx.theme().colors().editor_background) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( v_flex() .w_80() diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 28c462fbfc..2317d02a5a 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -658,7 +658,7 @@ impl Render for Dock { div() .key_context(dispatch_context) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .flex() .bg(cx.theme().colors().panel_background) .border_color(cx.theme().colors().border) @@ -689,7 +689,7 @@ impl Render for Dock { } else { div() .key_context(dispatch_context) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) } } } @@ -826,8 +826,8 @@ pub mod test { } impl Render for TestPanel { - fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - div().id("test").track_focus(&self.focus_handle) + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().id("test").track_focus(&self.focus_handle(cx)) } } diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 9dc0b957f8..2f1c900ecf 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -1173,8 +1173,8 @@ pub mod test { } impl Render for TestItem { - fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { - gpui::div().track_focus(&self.focus_handle) + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + gpui::div().track_focus(&self.focus_handle(cx)) } } diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 6b99401f05..01a1f0271e 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2574,7 +2574,7 @@ impl Render for Pane { v_flex() .key_context(key_context) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .flex_none() .overflow_hidden() From 052b746fbd66a5c908b7185c703052818d1c84b5 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 28 Oct 2024 21:55:38 +0100 Subject: [PATCH 29/87] language_selector: Fix debug_assert firing off on context menu creation for LSP view (#19854) Closes #ISSUE Release Notes: - N/A --- crates/language_tools/src/lsp_log.rs | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index a5f77ec55f..e57d5dbc4a 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -1237,6 +1237,22 @@ impl Render for LspLogToolbarItemView { view.show_rpc_trace_for_server(row.server_id, cx); }), ); + if server_selected && row.selected_entry == LogKind::Rpc { + let selected_ix = menu.select_last(); + // Each language server has: + // 1. A title. + // 2. Server logs. + // 3. Server trace. + // 4. RPC messages. + // 5. Server capabilities + // Thus, if nth server's RPC is selected, the index of selected entry should match this formula + let _expected_index = ix * 5 + 3; + debug_assert_eq!( + Some(_expected_index), + selected_ix, + "Could not scroll to a just added LSP menu item" + ); + } menu = menu.entry( SERVER_CAPABILITIES, None, @@ -1244,14 +1260,6 @@ impl Render for LspLogToolbarItemView { view.show_capabilities_for_server(row.server_id, cx); }), ); - if server_selected && row.selected_entry == LogKind::Rpc { - let selected_ix = menu.select_last(); - debug_assert_eq!( - Some(ix * 4 + 3), - selected_ix, - "Could not scroll to a just added LSP menu item" - ); - } } menu }) From 188a893fd0c6f5b62f578c74612d7b71b60c8a5c Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Tue, 29 Oct 2024 04:56:59 +0800 Subject: [PATCH 30/87] python: Enhance syntax highlighting for type hints (#18185) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Release Notes: - Python: Improved syntax highlighting for type hints. # Before ![image](https://github.com/user-attachments/assets/876a69ab-a572-4d1b-af99-e6f85f249ea6) # After ![image](https://github.com/user-attachments/assets/4fb98a9b-bc5d-4799-b535-057047884383) --- Why manual recursion? - Due to tree-sitter grammar not supporting recursion in query (https://github.com/tree-sitter-grammars/tree-sitter-lua/issues/24), currently only manual recursion is possible (refer to https://github.com/projekt0n/github-nvim-theme/pull/250/files).
Unable to highlight when simple structures appear before complex structures, example: ```python def t() -> str | dict[int, dict[int, dict[int, str]]]: pass ``` Because complex structures are parsed as `subscript` rather than `generic_type` by tree-sitter in this case ☹
Related: - https://github.com/zed-industries/zed/issues/14715 - [Union Type (Python Doc)](https://docs.python.org/3/library/stdtypes.html#union-type) - [Type parameter lists (Python Doc)](https://docs.python.org/3/reference/compound_stmts.html#type-parameter-lists) --------- Co-authored-by: Marshall Bowers --- crates/languages/src/python/highlights.scm | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index 5b64642771..e5f1b4d423 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,5 +1,14 @@ (attribute attribute: (identifier) @property) (type (identifier) @type) +(generic_type (identifier) @type) + +; Type alias +(type_alias_statement "type" @keyword) + +; TypeVar with constraints in type parameters +(type + (tuple (identifier) @type) +) ; Function calls From 93b20008e04a73b55f0448c508e9b1ccda713abd Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 28 Oct 2024 17:44:23 -0400 Subject: [PATCH 31/87] Add support for Doxygen doc comments in C++ (#19858) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds support for Doxygen-style doc comments in C++. Screenshot 2024-10-28 at 5 38 34 PM https://www.doxygen.nl/manual/docblocks.html Closes https://github.com/zed-industries/zed/issues/18361. Release Notes: - C++: Added support for Doxygen-style doc comments starting with `/// ` or `//! `. --- crates/languages/src/cpp/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/cpp/config.toml b/crates/languages/src/cpp/config.toml index f9e7a26818..e78bc8ea6c 100644 --- a/crates/languages/src/cpp/config.toml +++ b/crates/languages/src/cpp/config.toml @@ -1,7 +1,7 @@ name = "C++" grammar = "cpp" path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "ipp", "inl", "cu", "cuh"] -line_comments = ["// "] +line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, From a3f0bb454735413bd0a11d5da06db06e8e5b3306 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 28 Oct 2024 17:10:08 -0600 Subject: [PATCH 32/87] SSH Remoting: Document manual binary management (#19862) Release Notes: - N/A --- docs/src/remote-development.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 9dc1777f39..e2bf1c5b66 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -113,7 +113,9 @@ Any prompts that SSH needs will be shown in the UI, so you can verify host keys, Once the master connection is established, Zed will check to see if the remote server binary is present in `~/.zed_server` on the remote, and that its version matches the current version of Zed that you're using. -If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"remote_server": {"download":false}}` in your local settings, it will download the binary to your local machine and then upload it to the remote server. +If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"upload_binary_over_ssh":true}` in your settings for that server, it will download the binary to your local machine and then upload it to the remote server. + +If you'd like to maintain the server binary yourself you can. You can either download our prebuilt versions from [Github](https://github.com/zed-industries/zed/releases), or [build your own](https://zed.dev/docs/development) with `cargo build -p remote_server --release`. If you do this, you must upload it to `~/.zed_server/zed-remote-server-{RELEASE_CHANNEL}-{OS}-{ARCH}` on the server, for example `.zed-server/zed-remote-server-preview-linux-x86_64`. The version must exactly match the version of Zed itself you are using. ## Maintaining the SSH connection From 85ff03cde047d4fd77931252ef4f31e141d3ce49 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 28 Oct 2024 17:21:41 -0700 Subject: [PATCH 33/87] Add more context to the save new file path picker (#19863) Release Notes: - N/A Co-authored-by: Conrad --- crates/file_finder/src/new_path_prompt.rs | 38 ++++++++++++++++++++-- crates/file_finder/src/open_path_prompt.rs | 6 +++- crates/picker/src/picker.rs | 8 +++-- crates/tasks_ui/src/modal.rs | 6 +++- 4 files changed, 52 insertions(+), 6 deletions(-) diff --git a/crates/file_finder/src/new_path_prompt.rs b/crates/file_finder/src/new_path_prompt.rs index e992dd315f..d4492857b4 100644 --- a/crates/file_finder/src/new_path_prompt.rs +++ b/crates/file_finder/src/new_path_prompt.rs @@ -4,7 +4,7 @@ use gpui::{HighlightStyle, Model, StyledText}; use picker::{Picker, PickerDelegate}; use project::{Entry, PathMatchCandidateSet, Project, ProjectPath, WorktreeId}; use std::{ - path::PathBuf, + path::{Path, PathBuf}, sync::{ atomic::{self, AtomicBool}, Arc, @@ -254,6 +254,7 @@ impl PickerDelegate for NewPathDelegate { .trim() .trim_start_matches("./") .trim_start_matches('/'); + let (dir, suffix) = if let Some(index) = query.rfind('/') { let suffix = if index + 1 < query.len() { Some(query[index + 1..].to_string()) @@ -317,6 +318,14 @@ impl PickerDelegate for NewPathDelegate { }) } + fn confirm_completion( + &mut self, + _: String, + cx: &mut ViewContext>, + ) -> Option { + self.confirm_update_query(cx) + } + fn confirm_update_query(&mut self, cx: &mut ViewContext>) -> Option { let m = self.matches.get(self.selected_index)?; if m.is_dir(self.project.read(cx), cx) { @@ -422,7 +431,32 @@ impl NewPathDelegate { ) { cx.notify(); if query.is_empty() { - self.matches = vec![]; + self.matches = self + .project + .read(cx) + .worktrees(cx) + .flat_map(|worktree| { + let worktree_id = worktree.read(cx).id(); + worktree + .read(cx) + .child_entries(Path::new("")) + .filter_map(move |entry| { + entry.is_dir().then(|| Match { + path_match: Some(PathMatch { + score: 1.0, + positions: Default::default(), + worktree_id: worktree_id.to_usize(), + path: entry.path.clone(), + path_prefix: "".into(), + is_dir: entry.is_dir(), + distance_to_relative_ancestor: 0, + }), + suffix: None, + }) + }) + }) + .collect(); + return; } diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 0736d4189b..be1e91b482 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -220,7 +220,11 @@ impl PickerDelegate for OpenPathDelegate { }) } - fn confirm_completion(&self, query: String) -> Option { + fn confirm_completion( + &mut self, + query: String, + _: &mut ViewContext>, + ) -> Option { Some( maybe!({ let m = self.matches.get(self.selected_index)?; diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index a9512606d2..5ebbcd3330 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -108,7 +108,11 @@ pub trait PickerDelegate: Sized + 'static { fn should_dismiss(&self) -> bool { true } - fn confirm_completion(&self, _query: String) -> Option { + fn confirm_completion( + &mut self, + _query: String, + _: &mut ViewContext>, + ) -> Option { None } @@ -370,7 +374,7 @@ impl Picker { } fn confirm_completion(&mut self, _: &ConfirmCompletion, cx: &mut ViewContext) { - if let Some(new_query) = self.delegate.confirm_completion(self.query(cx)) { + if let Some(new_query) = self.delegate.confirm_completion(self.query(cx), cx) { self.set_query(new_query, cx); } else { cx.propagate() diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index c18a0e6ba6..3de116702a 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -425,7 +425,11 @@ impl PickerDelegate for TasksModalDelegate { ) } - fn confirm_completion(&self, _: String) -> Option { + fn confirm_completion( + &mut self, + _: String, + _: &mut ViewContext>, + ) -> Option { let task_index = self.matches.get(self.selected_index())?.candidate_id; let tasks = self.candidates.as_ref()?; let (_, task) = tasks.get(task_index)?; From 58e5d4ff0245368aa36b3274361d51a2a6dbe25b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 28 Oct 2024 20:27:09 -0600 Subject: [PATCH 34/87] Reland invisibles (#19846) Release Notes: - Show invisibles in the editor Relands #19298 Trying to quantify a performance impact, it doesn't seem to impact much visible in Instruments or in a micro-benchmark of Editor#layout_lines. We're still taking a few hundred micro-seconds (+/- a lot) every time. The ascii file has just ascii, where as the cc file has one control character per line. Screenshot 2024-10-28 at 12 14 53 Screenshot 2024-10-28 at 12 15 07 --- crates/editor/src/display_map.rs | 100 ++++++++++++++- crates/editor/src/display_map/invisibles.rs | 129 ++++++++++++++++++++ crates/editor/src/hover_popover.rs | 41 ++++++- crates/gpui/src/text_system/line.rs | 65 ++++++++-- 4 files changed, 319 insertions(+), 16 deletions(-) create mode 100644 crates/editor/src/display_map/invisibles.rs diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 559c2321c6..79a2fbdb11 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -21,6 +21,7 @@ mod block_map; mod crease_map; mod fold_map; mod inlay_map; +pub(crate) mod invisibles; mod tab_map; mod wrap_map; @@ -42,6 +43,7 @@ use gpui::{ pub(crate) use inlay_map::Inlay; use inlay_map::{InlayMap, InlaySnapshot}; pub use inlay_map::{InlayOffset, InlayPoint}; +use invisibles::{is_invisible, replacement}; use language::{ language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point, Subscription as BufferSubscription, @@ -56,6 +58,7 @@ use std::{ any::TypeId, borrow::Cow, fmt::Debug, + iter, num::NonZeroU32, ops::{Add, Range, Sub}, sync::Arc, @@ -63,7 +66,7 @@ use std::{ use sum_tree::{Bias, TreeMap}; use tab_map::{TabMap, TabSnapshot}; use text::LineIndent; -use ui::WindowContext; +use ui::{div, px, IntoElement, ParentElement, Styled, WindowContext}; use wrap_map::{WrapMap, WrapSnapshot}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -461,6 +464,98 @@ pub struct HighlightedChunk<'a> { pub renderer: Option, } +impl<'a> HighlightedChunk<'a> { + fn highlight_invisibles( + self, + editor_style: &'a EditorStyle, + ) -> impl Iterator + 'a { + let mut chars = self.text.chars().peekable(); + let mut text = self.text; + let style = self.style; + let is_tab = self.is_tab; + let renderer = self.renderer; + iter::from_fn(move || { + let mut prefix_len = 0; + while let Some(&ch) = chars.peek() { + if !is_invisible(ch) { + prefix_len += ch.len_utf8(); + chars.next(); + continue; + } + if prefix_len > 0 { + let (prefix, suffix) = text.split_at(prefix_len); + text = suffix; + return Some(HighlightedChunk { + text: prefix, + style, + is_tab, + renderer: renderer.clone(), + }); + } + chars.next(); + let (prefix, suffix) = text.split_at(ch.len_utf8()); + text = suffix; + if let Some(replacement) = replacement(ch) { + let background = editor_style.status.hint_background; + let underline = editor_style.status.hint; + return Some(HighlightedChunk { + text: prefix, + style: None, + is_tab: false, + renderer: Some(ChunkRenderer { + render: Arc::new(move |_| { + div() + .child(replacement) + .bg(background) + .text_decoration_1() + .text_decoration_color(underline) + .into_any_element() + }), + constrain_width: false, + }), + }); + } else { + let invisible_highlight = HighlightStyle { + background_color: Some(editor_style.status.hint_background), + underline: Some(UnderlineStyle { + color: Some(editor_style.status.hint), + thickness: px(1.), + wavy: false, + }), + ..Default::default() + }; + let invisible_style = if let Some(mut style) = style { + style.highlight(invisible_highlight); + style + } else { + invisible_highlight + }; + + return Some(HighlightedChunk { + text: prefix, + style: Some(invisible_style), + is_tab: false, + renderer: renderer.clone(), + }); + } + } + + if !text.is_empty() { + let remainder = text; + text = ""; + Some(HighlightedChunk { + text: remainder, + style, + is_tab, + renderer: renderer.clone(), + }) + } else { + None + } + }) + } +} + #[derive(Clone)] pub struct DisplaySnapshot { pub buffer_snapshot: MultiBufferSnapshot, @@ -675,7 +770,7 @@ impl DisplaySnapshot { suggestion: Some(editor_style.suggestions_style), }, ) - .map(|chunk| { + .flat_map(|chunk| { let mut highlight_style = chunk .syntax_highlight_id .and_then(|id| id.style(&editor_style.syntax)); @@ -718,6 +813,7 @@ impl DisplaySnapshot { is_tab: chunk.is_tab, renderer: chunk.renderer, } + .highlight_invisibles(editor_style) }) } diff --git a/crates/editor/src/display_map/invisibles.rs b/crates/editor/src/display_map/invisibles.rs new file mode 100644 index 0000000000..794b897603 --- /dev/null +++ b/crates/editor/src/display_map/invisibles.rs @@ -0,0 +1,129 @@ +// Invisibility in a Unicode context is not well defined, so we have to guess. +// +// We highlight all ASCII control codes, and unicode whitespace because they are likely +// confused with an ASCII space in a programming context (U+0020). +// +// We also highlight the handful of blank non-space characters: +// U+2800 BRAILLE PATTERN BLANK - Category: So +// U+115F HANGUL CHOSEONG FILLER - Category: Lo +// U+1160 HANGUL CHOSEONG FILLER - Category: Lo +// U+3164 HANGUL FILLER - Category: Lo +// U+FFA0 HALFWIDTH HANGUL FILLER - Category: Lo +// U+FFFC OBJECT REPLACEMENT CHARACTER - Category: So +// +// For the rest of Unicode, invisibility happens for two reasons: +// * A Format character (like a byte order mark or right-to-left override) +// * An invisible Nonspacing Mark character (like U+034F, or variation selectors) +// +// We don't consider unassigned codepoints invisible as the font renderer already shows +// a replacement character in that case (and there are a *lot* of them) +// +// Control characters are mostly fine to highlight; except: +// * U+E0020..=U+E007F are used in emoji flags. We don't highlight them right now, but we could if we tightened our heuristics. +// * U+200D is used to join characters. We highlight this but don't replace it. As our font system ignores mid-glyph highlights this mostly works to highlight unexpected uses. +// +// Nonspacing marks are handled like U+200D. This means that mid-glyph we ignore them, but +// probably causes issues with end-of-glyph usage. +// +// ref: https://invisible-characters.com +// ref: https://www.compart.com/en/unicode/category/Cf +// ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1 +// ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt +// https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt +pub fn is_invisible(c: char) -> bool { + if c <= '\u{1f}' { + c != '\t' && c != '\n' && c != '\r' + } else if c >= '\u{7f}' { + c <= '\u{9f}' + || (c.is_whitespace() && c != IDEOGRAPHIC_SPACE) + || contains(c, &FORMAT) + || contains(c, &OTHER) + } else { + false + } +} +// ASCII control characters have fancy unicode glyphs, everything else +// is replaced by a space - unless it is used in combining characters in +// which case we need to leave it in the string. +pub(crate) fn replacement(c: char) -> Option<&'static str> { + if c <= '\x1f' { + Some(C0_SYMBOLS[c as usize]) + } else if c == '\x7f' { + Some(DEL) + } else if contains(c, &PRESERVE) { + None + } else { + Some("\u{2007}") // fixed width space + } +} +// IDEOGRAPHIC SPACE is common alongside Chinese and other wide character sets. +// We don't highlight this for now (as it already shows up wide in the editor), +// but could if we tracked state in the classifier. +const IDEOGRAPHIC_SPACE: char = '\u{3000}'; + +const C0_SYMBOLS: &'static [&'static str] = &[ + "␀", "␁", "␂", "␃", "␄", "␅", "␆", "␇", "␈", "␉", "␊", "␋", "␌", "␍", "␎", "␏", "␐", "␑", "␒", + "␓", "␔", "␕", "␖", "␗", "␘", "␙", "␚", "␛", "␜", "␝", "␞", "␟", +]; +const DEL: &'static str = "␡"; + +// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0 +pub const FORMAT: &'static [(char, char)] = &[ + ('\u{ad}', '\u{ad}'), + ('\u{600}', '\u{605}'), + ('\u{61c}', '\u{61c}'), + ('\u{6dd}', '\u{6dd}'), + ('\u{70f}', '\u{70f}'), + ('\u{890}', '\u{891}'), + ('\u{8e2}', '\u{8e2}'), + ('\u{180e}', '\u{180e}'), + ('\u{200b}', '\u{200f}'), + ('\u{202a}', '\u{202e}'), + ('\u{2060}', '\u{2064}'), + ('\u{2066}', '\u{206f}'), + ('\u{feff}', '\u{feff}'), + ('\u{fff9}', '\u{fffb}'), + ('\u{110bd}', '\u{110bd}'), + ('\u{110cd}', '\u{110cd}'), + ('\u{13430}', '\u{1343f}'), + ('\u{1bca0}', '\u{1bca3}'), + ('\u{1d173}', '\u{1d17a}'), + ('\u{e0001}', '\u{e0001}'), + ('\u{e0020}', '\u{e007f}'), +]; + +// hand-made base on https://invisible-characters.com (Excluding Cf) +pub const OTHER: &'static [(char, char)] = &[ + ('\u{034f}', '\u{034f}'), + ('\u{115F}', '\u{1160}'), + ('\u{17b4}', '\u{17b5}'), + ('\u{180b}', '\u{180d}'), + ('\u{2800}', '\u{2800}'), + ('\u{3164}', '\u{3164}'), + ('\u{fe00}', '\u{fe0d}'), + ('\u{ffa0}', '\u{ffa0}'), + ('\u{fffc}', '\u{fffc}'), + ('\u{e0100}', '\u{e01ef}'), +]; + +// a subset of FORMAT/OTHER that may appear within glyphs +const PRESERVE: &'static [(char, char)] = &[ + ('\u{034f}', '\u{034f}'), + ('\u{200d}', '\u{200d}'), + ('\u{17b4}', '\u{17b5}'), + ('\u{180b}', '\u{180d}'), + ('\u{e0061}', '\u{e007a}'), + ('\u{e007f}', '\u{e007f}'), +]; + +fn contains(c: char, list: &[(char, char)]) -> bool { + for (start, end) in list { + if c < *start { + return false; + } + if c <= *end { + return true; + } + } + false +} diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 9200dd7b8c..fb198c837c 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1,5 +1,5 @@ use crate::{ - display_map::{InlayOffset, ToDisplayPoint}, + display_map::{invisibles::is_invisible, InlayOffset, ToDisplayPoint}, hover_links::{InlayHighlight, RangeInEditor}, scroll::ScrollAmount, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot, @@ -11,7 +11,7 @@ use gpui::{ StyleRefinement, Styled, Task, TextStyleRefinement, View, ViewContext, }; use itertools::Itertools; -use language::{DiagnosticEntry, Language, LanguageRegistry}; +use language::{Diagnostic, DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{Markdown, MarkdownStyle}; use multi_buffer::ToOffset; @@ -259,7 +259,7 @@ fn show_hover( } // If there's a diagnostic, assign it on the hover state and notify - let local_diagnostic = snapshot + let mut local_diagnostic = snapshot .buffer_snapshot .diagnostics_in_range::<_, usize>(anchor..anchor, false) // Find the entry with the most specific range @@ -280,6 +280,41 @@ fn show_hover( range: entry.range.to_anchors(&snapshot.buffer_snapshot), }) }); + if let Some(invisible) = snapshot + .buffer_snapshot + .chars_at(anchor) + .next() + .filter(|&c| is_invisible(c)) + { + let after = snapshot.buffer_snapshot.anchor_after( + anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(), + ); + local_diagnostic = Some(DiagnosticEntry { + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: format!("Unicode character U+{:02X}", invisible as u32), + ..Default::default() + }, + range: anchor..after, + }) + } else if let Some(invisible) = snapshot + .buffer_snapshot + .reversed_chars_at(anchor) + .next() + .filter(|&c| is_invisible(c)) + { + let before = snapshot.buffer_snapshot.anchor_before( + anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(), + ); + local_diagnostic = Some(DiagnosticEntry { + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: format!("Unicode character U+{:02X}", invisible as u32), + ..Default::default() + }, + range: before..anchor, + }) + } let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic { let text = match local_diagnostic.diagnostic.source { diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index 240654e57e..b8b698a042 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -1,6 +1,7 @@ use crate::{ - black, fill, point, px, size, Bounds, Hsla, LineLayout, Pixels, Point, Result, SharedString, - StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, WrappedLineLayout, + black, fill, point, px, size, Bounds, Half, Hsla, LineLayout, Pixels, Point, Result, + SharedString, StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, + WrappedLineLayout, }; use derive_more::{Deref, DerefMut}; use smallvec::SmallVec; @@ -129,8 +130,9 @@ fn paint_line( let text_system = cx.text_system().clone(); let mut glyph_origin = origin; let mut prev_glyph_position = Point::default(); + let mut max_glyph_size = size(px(0.), px(0.)); for (run_ix, run) in layout.runs.iter().enumerate() { - let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size; + max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size; for (glyph_ix, glyph) in run.glyphs.iter().enumerate() { glyph_origin.x += glyph.position.x - prev_glyph_position.x; @@ -139,6 +141,9 @@ fn paint_line( wraps.next(); if let Some((background_origin, background_color)) = current_background.as_mut() { + if glyph_origin.x == background_origin.x { + background_origin.x -= max_glyph_size.width.half() + } cx.paint_quad(fill( Bounds { origin: *background_origin, @@ -150,6 +155,9 @@ fn paint_line( background_origin.y += line_height; } if let Some((underline_origin, underline_style)) = current_underline.as_mut() { + if glyph_origin.x == underline_origin.x { + underline_origin.x -= max_glyph_size.width.half(); + }; cx.paint_underline( *underline_origin, glyph_origin.x - underline_origin.x, @@ -161,6 +169,9 @@ fn paint_line( if let Some((strikethrough_origin, strikethrough_style)) = current_strikethrough.as_mut() { + if glyph_origin.x == strikethrough_origin.x { + strikethrough_origin.x -= max_glyph_size.width.half(); + }; cx.paint_strikethrough( *strikethrough_origin, glyph_origin.x - strikethrough_origin.x, @@ -179,7 +190,18 @@ fn paint_line( let mut finished_underline: Option<(Point, UnderlineStyle)> = None; let mut finished_strikethrough: Option<(Point, StrikethroughStyle)> = None; if glyph.index >= run_end { - if let Some(style_run) = decoration_runs.next() { + let mut style_run = decoration_runs.next(); + + // ignore style runs that apply to a partial glyph + while let Some(run) = style_run { + if glyph.index < run_end + (run.len as usize) { + break; + } + run_end += run.len as usize; + style_run = decoration_runs.next(); + } + + if let Some(style_run) = style_run { if let Some((_, background_color)) = &mut current_background { if style_run.background_color.as_ref() != Some(background_color) { finished_background = current_background.take(); @@ -239,17 +261,24 @@ fn paint_line( } } - if let Some((background_origin, background_color)) = finished_background { + if let Some((mut background_origin, background_color)) = finished_background { + let mut width = glyph_origin.x - background_origin.x; + if background_origin.x == glyph_origin.x { + background_origin.x -= max_glyph_size.width.half(); + }; cx.paint_quad(fill( Bounds { origin: background_origin, - size: size(glyph_origin.x - background_origin.x, line_height), + size: size(width, line_height), }, background_color, )); } - if let Some((underline_origin, underline_style)) = finished_underline { + if let Some((mut underline_origin, underline_style)) = finished_underline { + if underline_origin.x == glyph_origin.x { + underline_origin.x -= max_glyph_size.width.half(); + }; cx.paint_underline( underline_origin, glyph_origin.x - underline_origin.x, @@ -257,7 +286,12 @@ fn paint_line( ); } - if let Some((strikethrough_origin, strikethrough_style)) = finished_strikethrough { + if let Some((mut strikethrough_origin, strikethrough_style)) = + finished_strikethrough + { + if strikethrough_origin.x == glyph_origin.x { + strikethrough_origin.x -= max_glyph_size.width.half(); + }; cx.paint_strikethrough( strikethrough_origin, glyph_origin.x - strikethrough_origin.x, @@ -299,7 +333,10 @@ fn paint_line( last_line_end_x -= glyph.position.x; } - if let Some((background_origin, background_color)) = current_background.take() { + if let Some((mut background_origin, background_color)) = current_background.take() { + if last_line_end_x == background_origin.x { + background_origin.x -= max_glyph_size.width.half() + }; cx.paint_quad(fill( Bounds { origin: background_origin, @@ -309,7 +346,10 @@ fn paint_line( )); } - if let Some((underline_start, underline_style)) = current_underline.take() { + if let Some((mut underline_start, underline_style)) = current_underline.take() { + if last_line_end_x == underline_start.x { + underline_start.x -= max_glyph_size.width.half() + }; cx.paint_underline( underline_start, last_line_end_x - underline_start.x, @@ -317,7 +357,10 @@ fn paint_line( ); } - if let Some((strikethrough_start, strikethrough_style)) = current_strikethrough.take() { + if let Some((mut strikethrough_start, strikethrough_style)) = current_strikethrough.take() { + if last_line_end_x == strikethrough_start.x { + strikethrough_start.x -= max_glyph_size.width.half() + }; cx.paint_strikethrough( strikethrough_start, last_line_end_x - strikethrough_start.x, From 1b84fee708265fb1468bf8c656df55f2187071fa Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 28 Oct 2024 22:32:59 -0400 Subject: [PATCH 35/87] restore `editor::UnfoldRecursive` binding (#19865) --- assets/keymaps/default-macos.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index ade3ece1ed..cb9a86bd0b 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -349,6 +349,7 @@ "alt-cmd-]": "editor::UnfoldLines", "cmd-k cmd-l": "editor::ToggleFold", "cmd-k cmd-[": "editor::FoldRecursive", + "cmd-k cmd-]": "editor::UnfoldRecursive", "cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }], "cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }], "cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }], From 719a7f7890296a712eb5c15844bad7f530ccfbe1 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 28 Oct 2024 21:05:24 -0600 Subject: [PATCH 36/87] Fix block cursor on graphemes (#19867) Release Notes: - Fixed block cursor rendering only first char of multii-char graphemes. --- Cargo.lock | 1 + crates/editor/Cargo.toml | 1 + crates/editor/src/display_map.rs | 37 +++++++++++++++++++------------- crates/editor/src/element.rs | 18 ++++++---------- 4 files changed, 30 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 921ec3a4f0..1b3c4de81d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3717,6 +3717,7 @@ dependencies = [ "tree-sitter-rust", "tree-sitter-typescript", "ui", + "unicode-segmentation", "unindent", "url", "util", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index cfd9284f80..bff1935f86 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -76,6 +76,7 @@ theme.workspace = true tree-sitter-html = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } +unicode-segmentation.workspace = true unindent = { workspace = true, optional = true } ui.workspace = true url.workspace = true diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 79a2fbdb11..c176213682 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -66,7 +66,8 @@ use std::{ use sum_tree::{Bias, TreeMap}; use tab_map::{TabMap, TabSnapshot}; use text::LineIndent; -use ui::{div, px, IntoElement, ParentElement, Styled, WindowContext}; +use ui::{div, px, IntoElement, ParentElement, SharedString, Styled, WindowContext}; +use unicode_segmentation::UnicodeSegmentation; use wrap_map::{WrapMap, WrapSnapshot}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -880,12 +881,10 @@ impl DisplaySnapshot { layout_line.closest_index_for_x(x) as u32 } - pub fn display_chars_at( - &self, - mut point: DisplayPoint, - ) -> impl Iterator + '_ { + pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option { point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left)); - self.text_chunks(point.row()) + let chars = self + .text_chunks(point.row()) .flat_map(str::chars) .skip_while({ let mut column = 0; @@ -895,16 +894,24 @@ impl DisplaySnapshot { !at_point } }) - .map(move |ch| { - let result = (ch, point); - if ch == '\n' { - *point.row_mut() += 1; - *point.column_mut() = 0; - } else { - *point.column_mut() += ch.len_utf8() as u32; + .take_while({ + let mut prev = false; + move |char| { + let now = char.is_ascii(); + let end = char.is_ascii() && (char.is_ascii_whitespace() || prev); + prev = now; + !end } - result - }) + }); + chars.collect::().graphemes(true).next().map(|s| { + if let Some(invisible) = s.chars().next().filter(|&c| is_invisible(c)) { + replacement(invisible).unwrap_or(s).to_owned().into() + } else if s == "\n" { + " ".into() + } else { + s.to_owned().into() + } + }) } pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator + '_ { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 2c3bed7eb7..3ece171b05 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -68,6 +68,7 @@ use sum_tree::Bias; use theme::{ActiveTheme, Appearance, PlayerColor}; use ui::prelude::*; use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip}; +use unicode_segmentation::UnicodeSegmentation; use util::RangeExt; use util::ResultExt; use workspace::{item::Item, Workspace}; @@ -1027,24 +1028,17 @@ impl EditorElement { } let block_text = if let CursorShape::Block = selection.cursor_shape { snapshot - .display_chars_at(cursor_position) - .next() + .grapheme_at(cursor_position) .or_else(|| { if cursor_column == 0 { - snapshot - .placeholder_text() - .and_then(|s| s.chars().next()) - .map(|c| (c, cursor_position)) + snapshot.placeholder_text().and_then(|s| { + s.graphemes(true).next().map(|s| s.to_string().into()) + }) } else { None } }) - .and_then(|(character, _)| { - let text = if character == '\n' { - SharedString::from(" ") - } else { - SharedString::from(character.to_string()) - }; + .and_then(|text| { let len = text.len(); let font = cursor_row_layout From b5c41eeb98ab2618dd6583c7f28bca6a2f2ff57b Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 29 Oct 2024 09:52:36 +0100 Subject: [PATCH 37/87] Future-proof indent guides settings for panels (#19878) This PR ensures that we do not have to break the indent guides settings for the project/outline panel. In the future we might want to have a more granular way to control when to show indent guides, or control other indent guide properties, like its width. Release Notes: - N/A --- assets/settings/default.json | 28 +++++++-- crates/outline_panel/src/outline_panel.rs | 4 +- .../src/outline_panel_settings.rs | 26 ++++++-- crates/project_panel/src/project_panel.rs | 7 ++- .../src/project_panel_settings.rs | 26 ++++++-- docs/src/configuring-zed.md | 60 ++++++++++++++----- 6 files changed, 117 insertions(+), 34 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 879f6bb7fa..748a4b12d1 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -346,8 +346,6 @@ "git_status": true, // Amount of indentation for nested items. "indent_size": 20, - // Whether to show indent guides in the project panel. - "indent_guides": true, // Whether to reveal it in the project panel automatically, // when a corresponding project entry becomes active. // Gitignored entries are never auto revealed. @@ -371,6 +369,17 @@ /// 5. Never show the scrollbar: /// "never" "show": null + }, + // Settings related to indent guides in the project panel. + "indent_guides": { + // When to show indent guides in the project panel. + // This setting can take two values: + // + // 1. Always show indent guides: + // "always" + // 2. Never show indent guides: + // "never" + "show": "always" } }, "outline_panel": { @@ -388,15 +397,24 @@ "git_status": true, // Amount of indentation for nested items. "indent_size": 20, - // Whether to show indent guides in the outline panel. - "indent_guides": true, // Whether to reveal it in the outline panel automatically, // when a corresponding outline entry becomes active. // Gitignored entries are never auto revealed. "auto_reveal_entries": true, /// Whether to fold directories automatically /// when a directory has only one directory inside. - "auto_fold_dirs": true + "auto_fold_dirs": true, + // Settings related to indent guides in the outline panel. + "indent_guides": { + // When to show indent guides in the outline panel. + // This setting can take two values: + // + // 1. Always show indent guides: + // "always" + // 2. Never show indent guides: + // "never" + "show": "always" + } }, "collaboration_panel": { // Whether to show the collaboration panel button in the status bar. diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 1259646a1b..83eb7347ce 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -35,7 +35,7 @@ use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrev}; -use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings}; +use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings, ShowIndentGuides}; use project::{File, Fs, Item, Project}; use search::{BufferSearchBar, ProjectSearchView}; use serde::{Deserialize, Serialize}; @@ -3748,7 +3748,7 @@ impl Render for OutlinePanel { let pinned = self.pinned; let settings = OutlinePanelSettings::get_global(cx); let indent_size = settings.indent_size; - let show_indent_guides = settings.indent_guides; + let show_indent_guides = settings.indent_guides.show == ShowIndentGuides::Always; let outline_panel = v_flex() .id("outline-panel") diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index e165978fc7..d658a55793 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -10,6 +10,13 @@ pub enum OutlinePanelDockPosition { Right, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ShowIndentGuides { + Always, + Never, +} + #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct OutlinePanelSettings { pub button: bool, @@ -19,11 +26,22 @@ pub struct OutlinePanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, - pub indent_guides: bool, + pub indent_guides: IndentGuidesSettings, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettings { + pub show: ShowIndentGuides, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettingsContent { + /// When to show the scrollbar in the outline panel. + pub show: Option, +} + #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] pub struct OutlinePanelSettingsContent { /// Whether to show the outline panel button in the status bar. @@ -54,10 +72,6 @@ pub struct OutlinePanelSettingsContent { /// /// Default: 20 pub indent_size: Option, - /// Whether to show indent guides in the outline panel. - /// - /// Default: true - pub indent_guides: Option, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. @@ -69,6 +83,8 @@ pub struct OutlinePanelSettingsContent { /// /// Default: true pub auto_fold_dirs: Option, + /// Settings related to indent guides in the outline panel. + pub indent_guides: Option, } impl Settings for OutlinePanelSettings { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 0df1062526..6532e3e977 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -30,7 +30,7 @@ use project::{ relativize_path, Entry, EntryKind, Fs, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId, }; -use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings}; +use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings, ShowIndentGuides}; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::{ @@ -3043,7 +3043,8 @@ impl Render for ProjectPanel { let has_worktree = !self.visible_entries.is_empty(); let project = self.project.read(cx); let indent_size = ProjectPanelSettings::get_global(cx).indent_size; - let indent_guides = ProjectPanelSettings::get_global(cx).indent_guides; + let show_indent_guides = + ProjectPanelSettings::get_global(cx).indent_guides.show == ShowIndentGuides::Always; let is_local = project.is_local(); if has_worktree { @@ -3147,7 +3148,7 @@ impl Render for ProjectPanel { items } }) - .when(indent_guides, |list| { + .when(show_indent_guides, |list| { list.with_decoration( ui::indent_guides( cx.view().clone(), diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 16980c00d1..c841794585 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -11,6 +11,13 @@ pub enum ProjectPanelDockPosition { Right, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ShowIndentGuides { + Always, + Never, +} + #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct ProjectPanelSettings { pub button: bool, @@ -20,12 +27,23 @@ pub struct ProjectPanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, - pub indent_guides: bool, + pub indent_guides: IndentGuidesSettings, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, pub scrollbar: ScrollbarSettings, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettings { + pub show: ShowIndentGuides, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettingsContent { + /// When to show the scrollbar in the project panel. + pub show: Option, +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. @@ -72,10 +90,6 @@ pub struct ProjectPanelSettingsContent { /// /// Default: 20 pub indent_size: Option, - /// Whether to show indent guides in the project panel. - /// - /// Default: true - pub indent_guides: Option, /// Whether to reveal it in the project panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. @@ -89,6 +103,8 @@ pub struct ProjectPanelSettingsContent { pub auto_fold_dirs: Option, /// Scrollbar-related settings pub scrollbar: Option, + /// Settings related to indent guides in the project panel. + pub indent_guides: Option, } impl Settings for ProjectPanelSettings { diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index f149fa5cf1..e1c4f698a5 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2047,6 +2047,9 @@ Run the `theme selector: toggle` action in the command palette to see a current "auto_fold_dirs": true, "scrollbar": { "show": null + }, + "indent_guides": { + "show": "always" } } } @@ -2164,27 +2167,54 @@ Run the `theme selector: toggle` action in the command palette to see a current - Setting: `indent_size` - Default: `20` -### Indent Guides +### Indent Guides: Show -- Description: Whether to show indent guides in the project panel. +- Description: Whether to show indent guides in the project panel. Possible values: "always", "never". - Setting: `indent_guides` -- Default: `true` - -### Scrollbar - -- Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. -- Setting: `scrollbar` -- Default: ```json -"scrollbar": { - "show": null +"indent_guides": { + "show": "always" } ``` **Options** -1. Show scrollbar in project panel +1. Show indent guides in the project panel + +```json +{ + "indent_guides": { + "show": "always" + } +} +``` + +2. Hide indent guides in the project panel + +```json +{ + "indent_guides": { + "show": "never" + } +} +``` + +### Scrollbar: Show + +- Description: Whether to show a scrollbar in the project panel. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. +- Setting: `scrollbar` +- Default: + +```json +"scrollbar": { + "show": null +} +``` + +**Options** + +1. Show scrollbar in the project panel ```json { @@ -2194,7 +2224,7 @@ Run the `theme selector: toggle` action in the command palette to see a current } ``` -2. Hide scrollbar in project panel +2. Hide scrollbar in the project panel ```json { @@ -2237,9 +2267,11 @@ Run the `theme selector: toggle` action in the command palette to see a current "folder_icons": true, "git_status": true, "indent_size": 20, - "indent_guides": true, "auto_reveal_entries": true, "auto_fold_dirs": true, + "indent_guides": { + "show": "always" + } } ``` From bdb54decdc5ad010503ca8f3915a78f4b3b1619f Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 29 Oct 2024 09:52:54 +0100 Subject: [PATCH 38/87] ssh remoting: Show the host's GitHub name in the titlebar when sharing an SSH project (#19844) The name (GitHub name) of the host was not displayed when sharing an ssh project. Previously we assumed that the a collaborator is a host if the `replica_id` of the collaborator was `0`, but for ssh project the `replica_id` is actually `1`. Screenshot 2024-10-28 at 18 16 30 Co-Authored-by: Thorsten Release Notes: - N/A --------- Co-authored-by: Thorsten --- crates/client/src/user.rs | 2 ++ crates/collab/src/db.rs | 1 + crates/collab/src/db/queries/buffers.rs | 4 ++++ crates/collab/src/db/tests/buffer_tests.rs | 2 ++ crates/collab/src/rpc.rs | 1 + crates/project/src/project.rs | 4 ++-- crates/proto/proto/zed.proto | 1 + crates/workspace/src/workspace.rs | 2 +- 8 files changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index f6ee279dc8..fab5687c41 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -48,6 +48,7 @@ pub struct Collaborator { pub peer_id: proto::PeerId, pub replica_id: ReplicaId, pub user_id: UserId, + pub is_host: bool, } impl PartialOrd for User { @@ -824,6 +825,7 @@ impl Collaborator { peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?, replica_id: message.replica_id as ReplicaId, user_id: message.user_id as UserId, + is_host: message.is_host, }) } } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index ef85f91fe1..81db7158e8 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -740,6 +740,7 @@ impl ProjectCollaborator { peer_id: Some(self.connection_id.into()), replica_id: self.replica_id.0 as u32, user_id: self.user_id.to_proto(), + is_host: self.is_host, } } } diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 06ad2b4594..dee4d820e8 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -116,6 +116,7 @@ impl Database { peer_id: Some(collaborator.connection().into()), user_id: collaborator.user_id.to_proto(), replica_id: collaborator.replica_id.0 as u32, + is_host: false, }) .collect(), }) @@ -222,6 +223,7 @@ impl Database { peer_id: Some(collaborator.connection().into()), user_id: collaborator.user_id.to_proto(), replica_id: collaborator.replica_id.0 as u32, + is_host: false, }) .collect(), }, @@ -257,6 +259,7 @@ impl Database { peer_id: Some(db_collaborator.connection().into()), replica_id: db_collaborator.replica_id.0 as u32, user_id: db_collaborator.user_id.to_proto(), + is_host: false, }) } else { collaborator_ids_to_remove.push(db_collaborator.id); @@ -385,6 +388,7 @@ impl Database { peer_id: Some(connection.into()), replica_id: row.replica_id.0 as u32, user_id: row.user_id.to_proto(), + is_host: false, }); } diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index adc571580a..9575ed505b 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -121,11 +121,13 @@ async fn test_channel_buffers(db: &Arc) { user_id: a_id.to_proto(), peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }), replica_id: 0, + is_host: false, }, rpc::proto::Collaborator { user_id: b_id.to_proto(), peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }), replica_id: 1, + is_host: false, } ] ); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 0b90bfa0c9..f83bebbbb1 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1827,6 +1827,7 @@ fn join_project_internal( peer_id: Some(session.connection_id.into()), replica_id: replica_id.0 as u32, user_id: guest_user_id.to_proto(), + is_host: false, }), }; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index eb5edabc8e..7fd77fb0ad 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1333,7 +1333,7 @@ impl Project { } pub fn host(&self) -> Option<&Collaborator> { - self.collaborators.values().find(|c| c.replica_id == 0) + self.collaborators.values().find(|c| c.is_host) } pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool, cx: &mut AppContext) { @@ -3495,7 +3495,7 @@ impl Project { .collaborators .remove(&old_peer_id) .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?; - let is_host = collaborator.replica_id == 0; + let is_host = collaborator.is_host; this.collaborators.insert(new_peer_id, collaborator); log::info!("peer {} became {}", old_peer_id, new_peer_id,); diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index e9e42dac18..d78795eed9 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1721,6 +1721,7 @@ message Collaborator { PeerId peer_id = 1; uint32 replica_id = 2; uint64 user_id = 3; + bool is_host = 4; } message User { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index f0786aa479..490851a56e 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5715,7 +5715,7 @@ pub fn join_in_room_project( .read(cx) .collaborators() .values() - .find(|collaborator| collaborator.replica_id == 0)?; + .find(|collaborator| collaborator.is_host)?; Some(collaborator.peer_id) }); From 7a6b6435c48024948b01d8f25d17c249f77ad522 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 29 Oct 2024 11:00:44 +0100 Subject: [PATCH 39/87] languages: Enable grammar loading when compiling with test feature (#19881) This ensures that `cargo tests -p languages` will not fail with a confusing error message. Follow up to #19821 We opted to check the `test` feature flag instead of defining a runtime flag, because we only want to include the `tree-sitter-*` dependencies in some cases, which is not possible with a runtime flag. Co-Authored-by: Thorsten Release Notes: - N/A Co-authored-by: Thorsten --- crates/languages/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 2fd8ffa633..455b05b64c 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -325,7 +325,7 @@ fn load_config(name: &str) -> LanguageConfig { .with_context(|| format!("failed to load config.toml for language {name:?}")) .unwrap(); - #[cfg(not(feature = "load-grammars"))] + #[cfg(not(any(feature = "load-grammars", test)))] { config = LanguageConfig { name: config.name, From f7b2b41df98b01cc783532c23163b7f5ab84159f Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 29 Oct 2024 11:32:55 +0100 Subject: [PATCH 40/87] ssh remoting: Check nightly version correctly by comparing commit SHA (#19884) This ensures that we detect if a new nightly version of the remote server is available. Previously we would always mark a version as matching if they had the same semantic version. However, for nightly versions we also need to check if they have the same commit SHA. Co-Authored-by: Thorsten Release Notes: - N/A --------- Co-authored-by: Thorsten --- crates/auto_update/src/auto_update.rs | 10 +-- crates/recent_projects/src/ssh_connections.rs | 25 +++++-- crates/remote/src/ssh_session.rs | 75 +++++++++++++------ crates/remote_server/build.rs | 21 ++++++ crates/remote_server/src/main.rs | 7 +- script/bundle-mac | 10 ++- 6 files changed, 107 insertions(+), 41 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 61154cb504..fbbd23907a 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -84,9 +84,9 @@ pub struct AutoUpdater { } #[derive(Deserialize)] -struct JsonRelease { - version: String, - url: String, +pub struct JsonRelease { + pub version: String, + pub url: String, } struct MacOsUnmounter { @@ -482,7 +482,7 @@ impl AutoUpdater { release_channel: ReleaseChannel, version: Option, cx: &mut AsyncAppContext, - ) -> Result<(String, String)> { + ) -> Result<(JsonRelease, String)> { let this = cx.update(|cx| { cx.default_global::() .0 @@ -504,7 +504,7 @@ impl AutoUpdater { let update_request_body = build_remote_server_update_request_body(cx)?; let body = serde_json::to_string(&update_request_body)?; - Ok((release.url, body)) + Ok((release, body)) } async fn get_release( diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 7dc2853650..a2964952eb 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -14,7 +14,7 @@ use gpui::{AppContext, Model}; use language::CursorShape; use markdown::{Markdown, MarkdownStyle}; use release_channel::{AppVersion, ReleaseChannel}; -use remote::ssh_session::ServerBinary; +use remote::ssh_session::{ServerBinary, ServerVersion}; use remote::{SshConnectionOptions, SshPlatform, SshRemoteClient}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -446,7 +446,7 @@ impl remote::SshClientDelegate for SshClientDelegate { platform: SshPlatform, upload_binary_over_ssh: bool, cx: &mut AsyncAppContext, - ) -> oneshot::Receiver> { + ) -> oneshot::Receiver> { let (tx, rx) = oneshot::channel(); let this = self.clone(); cx.spawn(|mut cx| async move { @@ -491,7 +491,7 @@ impl SshClientDelegate { platform: SshPlatform, upload_binary_via_ssh: bool, cx: &mut AsyncAppContext, - ) -> Result<(ServerBinary, SemanticVersion)> { + ) -> Result<(ServerBinary, ServerVersion)> { let (version, release_channel) = cx.update(|cx| { let version = AppVersion::global(cx); let channel = ReleaseChannel::global(cx); @@ -505,7 +505,10 @@ impl SshClientDelegate { let result = self.build_local(cx, platform, version).await?; // Fall through to a remote binary if we're not able to compile a local binary if let Some((path, version)) = result { - return Ok((ServerBinary::LocalBinary(path), version)); + return Ok(( + ServerBinary::LocalBinary(path), + ServerVersion::Semantic(version), + )); } } @@ -540,9 +543,12 @@ impl SshClientDelegate { ) })?; - Ok((ServerBinary::LocalBinary(binary_path), version)) + Ok(( + ServerBinary::LocalBinary(binary_path), + ServerVersion::Semantic(version), + )) } else { - let (request_url, request_body) = AutoUpdater::get_remote_server_release_url( + let (release, request_body) = AutoUpdater::get_remote_server_release_url( platform.os, platform.arch, release_channel, @@ -560,9 +566,14 @@ impl SshClientDelegate { ) })?; + let version = release + .version + .parse::() + .map(ServerVersion::Semantic) + .unwrap_or_else(|_| ServerVersion::Commit(release.version)); Ok(( ServerBinary::ReleaseUrl { - url: request_url, + url: release.url, body: request_body, }, version, diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 857b139736..16b7662871 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -227,6 +227,20 @@ pub enum ServerBinary { ReleaseUrl { url: String, body: String }, } +pub enum ServerVersion { + Semantic(SemanticVersion), + Commit(String), +} + +impl std::fmt::Display for ServerVersion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Semantic(version) => write!(f, "{}", version), + Self::Commit(commit) => write!(f, "{}", commit), + } + } +} + pub trait SshClientDelegate: Send + Sync { fn ask_password( &self, @@ -243,7 +257,7 @@ pub trait SshClientDelegate: Send + Sync { platform: SshPlatform, upload_binary_over_ssh: bool, cx: &mut AsyncAppContext, - ) -> oneshot::Receiver>; + ) -> oneshot::Receiver>; fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext); } @@ -1714,34 +1728,47 @@ impl SshRemoteConnection { } let upload_binary_over_ssh = self.socket.connection_options.upload_binary_over_ssh; - let (binary, version) = delegate + let (binary, new_server_version) = delegate .get_server_binary(platform, upload_binary_over_ssh, cx) .await??; - let mut remote_version = None; if cfg!(not(debug_assertions)) { - if let Ok(installed_version) = + let installed_version = if let Ok(version_output) = run_cmd(self.socket.ssh_command(dst_path).arg("version")).await { - if let Ok(version) = installed_version.trim().parse::() { - remote_version = Some(version); + if let Ok(version) = version_output.trim().parse::() { + Some(ServerVersion::Semantic(version)) } else { - log::warn!("failed to parse version of remote server: {installed_version:?}",); + Some(ServerVersion::Commit(version_output.trim().to_string())) } - } + } else { + None + }; - if let Some(remote_version) = remote_version { - if remote_version == version { - log::info!("remote development server present and matching client version"); - return Ok(()); - } else if remote_version > version { - let error = anyhow!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", remote_version, version); - return Err(error); - } else { - log::info!( - "remote development server has older version: {}. updating...", - remote_version - ); + if let Some(installed_version) = installed_version { + use ServerVersion::*; + match (installed_version, new_server_version) { + (Semantic(installed), Semantic(new)) if installed == new => { + log::info!("remote development server present and matching client version"); + return Ok(()); + } + (Semantic(installed), Semantic(new)) if installed > new => { + let error = anyhow!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", installed, new); + return Err(error); + } + (Commit(installed), Commit(new)) if installed == new => { + log::info!( + "remote development server present and matching client version {}", + installed + ); + return Ok(()); + } + (installed, _) => { + log::info!( + "remote development server has version: {}. updating...", + installed + ); + } } } } @@ -2224,12 +2251,12 @@ mod fake { }, select_biased, FutureExt, SinkExt, StreamExt, }; - use gpui::{AsyncAppContext, SemanticVersion, Task}; + use gpui::{AsyncAppContext, Task}; use rpc::proto::Envelope; use super::{ - ChannelClient, RemoteConnection, ServerBinary, SshClientDelegate, SshConnectionOptions, - SshPlatform, + ChannelClient, RemoteConnection, ServerBinary, ServerVersion, SshClientDelegate, + SshConnectionOptions, SshPlatform, }; pub(super) struct FakeRemoteConnection { @@ -2349,7 +2376,7 @@ mod fake { _: SshPlatform, _: bool, _: &mut AsyncAppContext, - ) -> oneshot::Receiver> { + ) -> oneshot::Receiver> { unreachable!() } diff --git a/crates/remote_server/build.rs b/crates/remote_server/build.rs index 11a8969a44..fae1889773 100644 --- a/crates/remote_server/build.rs +++ b/crates/remote_server/build.rs @@ -1,3 +1,5 @@ +use std::process::Command; + const ZED_MANIFEST: &str = include_str!("../zed/Cargo.toml"); fn main() { @@ -7,4 +9,23 @@ fn main() { "cargo:rustc-env=ZED_PKG_VERSION={}", zed_cargo_toml.package.unwrap().version.unwrap() ); + + // If we're building this for nightly, we want to set the ZED_COMMIT_SHA + if let Some(release_channel) = std::env::var("ZED_RELEASE_CHANNEL").ok() { + if release_channel.as_str() == "nightly" { + // Populate git sha environment variable if git is available + println!("cargo:rerun-if-changed=../../.git/logs/HEAD"); + if let Some(output) = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + .filter(|output| output.status.success()) + { + let git_sha = String::from_utf8_lossy(&output.stdout); + let git_sha = git_sha.trim(); + + println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); + } + } + } } diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 72ac438e60..132bd36b7b 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -72,7 +72,12 @@ fn main() { } }, Some(Commands::Version) => { - println!("{}", env!("ZED_PKG_VERSION")); + if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { + println!("{}", build_sha); + } else { + println!("{}", env!("ZED_PKG_VERSION")); + } + std::process::exit(0); } None => { diff --git a/script/bundle-mac b/script/bundle-mac index 230722ecfa..7a25881535 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -63,6 +63,12 @@ if [[ $# -gt 0 ]]; then fi fi +# Get release channel +pushd crates/zed +channel=$( Date: Tue, 29 Oct 2024 04:35:31 -0600 Subject: [PATCH 41/87] Include commit summary in inline Git blame (#19759) Closes #19758 Release Notes: - Added feature to show commit summary as part of the inline Git blame --------- Co-authored-by: Thorsten Ball --- crates/collab/src/tests/editor_tests.rs | 1 + crates/editor/src/element.rs | 11 ++++++++++- crates/project/src/project_settings.rs | 18 ++++++++++++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 2a3c643f6d..beb1ef61ef 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1978,6 +1978,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA enabled: false, delay_ms: None, min_column: None, + show_commit_summary: false, }); cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3ece171b05..489fe4c5ed 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -4153,7 +4153,16 @@ fn render_inline_blame_entry( let relative_timestamp = blame_entry_relative_timestamp(&blame_entry); let author = blame_entry.author.as_deref().unwrap_or_default(); - let text = format!("{}, {}", author, relative_timestamp); + let summary_enabled = ProjectSettings::get_global(cx) + .git + .show_inline_commit_summary(); + + let text = match blame_entry.summary.as_ref() { + Some(summary) if summary_enabled => { + format!("{}, {} - {}", author, relative_timestamp, summary) + } + _ => format!("{}, {}", author, relative_timestamp), + }; let details = blame.read(cx).details_for_entry(&blame_entry); diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 54dd969c39..42d8ae6dac 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -111,6 +111,16 @@ impl GitSettings { _ => None, } } + + pub fn show_inline_commit_summary(&self) -> bool { + match self.inline_blame { + Some(InlineBlameSettings { + show_commit_summary, + .. + }) => show_commit_summary, + _ => false, + } + } } #[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)] @@ -141,11 +151,19 @@ pub struct InlineBlameSettings { /// /// Default: 0 pub min_column: Option, + /// Whether to show commit summary as part of the inline blame. + /// + /// Default: false + #[serde(default = "false_value")] + pub show_commit_summary: bool, } const fn true_value() -> bool { true } +const fn false_value() -> bool { + true +} #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] pub struct BinarySettings { From 6a0bcca9ec48ba6966983a10ac7538b94232ee70 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 29 Oct 2024 12:13:21 +0100 Subject: [PATCH 42/87] ssh remoting: Hide share button while connecting to project (#19885) Co-Authored-by: Thorsten Release Notes: - N/A Co-authored-by: Thorsten --- crates/recent_projects/src/recent_projects.rs | 2 +- crates/recent_projects/src/ssh_connections.rs | 4 ++++ crates/title_bar/src/collab.rs | 9 ++++++++- crates/workspace/src/workspace.rs | 2 +- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index e5d28d16ca..22068900c9 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -1,7 +1,7 @@ pub mod disconnected_overlay; mod remote_servers; mod ssh_connections; -pub use ssh_connections::open_ssh_project; +pub use ssh_connections::{is_connecting_over_ssh, open_ssh_project}; use disconnected_overlay::DisconnectedOverlay; use fuzzy::{StringMatch, StringMatchCandidate}; diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index a2964952eb..84618a2f49 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -689,6 +689,10 @@ impl SshClientDelegate { } } +pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &AppContext) -> bool { + workspace.active_modal::(cx).is_some() +} + pub fn connect_over_ssh( unique_identifier: String, connection_options: SshConnectionOptions, diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index edbc147926..805c0e7202 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -282,6 +282,13 @@ impl TitleBar { return Vec::new(); }; + let is_connecting_to_project = self + .workspace + .update(cx, |workspace, cx| { + recent_projects::is_connecting_over_ssh(workspace, cx) + }) + .unwrap_or(false); + let room = room.read(cx); let project = self.project.read(cx); let is_local = project.is_local() || project.is_via_ssh(); @@ -298,7 +305,7 @@ impl TitleBar { let mut children = Vec::new(); - if is_local && can_share_projects { + if is_local && can_share_projects && !is_connecting_to_project { children.push( Button::new( "toggle_sharing", diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 490851a56e..24c681083b 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4465,7 +4465,7 @@ impl Workspace { self.modal_layer.read(cx).has_active_modal() } - pub fn active_modal(&mut self, cx: &AppContext) -> Option> { + pub fn active_modal(&self, cx: &AppContext) -> Option> { self.modal_layer.read(cx).active_modal() } From 21b58643fadb5d06d5896ab1b41be25b95d86875 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 29 Oct 2024 12:27:30 +0100 Subject: [PATCH 43/87] vsc menu: Fix issue when switching branch while non-visible worktree is open (#19888) Fixes a regression introduced in #19755 Screenshot 2024-10-29 at 12 13 04 Co-Authored-by: Thorsten Release Notes: - Fixed an issue where the branch switcher would show an error, when opening a file outside of the project Co-authored-by: Thorsten --- crates/vcs_menu/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/vcs_menu/src/lib.rs b/crates/vcs_menu/src/lib.rs index 3ee289df0e..8f73153dd8 100644 --- a/crates/vcs_menu/src/lib.rs +++ b/crates/vcs_menu/src/lib.rs @@ -242,7 +242,7 @@ impl PickerDelegate for BranchListDelegate { BranchEntry::NewBranch { name: branch_name } => branch_name, }; let worktree = project - .worktrees(cx) + .visible_worktrees(cx) .next() .context("worktree disappeared")?; let repository = ProjectPath::root_path(worktree.read(cx).id()); From f919fa92de1d73c492282084b96249b492732f83 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 29 Oct 2024 13:38:30 +0100 Subject: [PATCH 44/87] remote servers: Fix title from alpha to beta (#19889) Discussed this in Slack yesterday. We use `beta` because that's what we use in the docs as well. Release Notes: - N/A --- crates/recent_projects/src/remote_servers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 003485354e..1b83120eb3 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1204,7 +1204,7 @@ impl RemoteServerProjects { Modal::new("remote-projects", Some(self.scroll_handle.clone())) .header( ModalHeader::new() - .child(Headline::new("Remote Projects (alpha)").size(HeadlineSize::XSmall)), + .child(Headline::new("Remote Projects (beta)").size(HeadlineSize::XSmall)), ) .section( Section::new().padded(false).child( From 249c8a4d96df285500330c1c6858220af3e07e03 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 29 Oct 2024 09:44:58 -0400 Subject: [PATCH 45/87] Remove community content from docs and point to zed.dev (#19895) The community content now lives on zed.dev, discoverable via the navbar `resources` menu. See: - https://github.com/zed-industries/zed.dev/pull/783 Release Notes: - N/A --- CODE_OF_CONDUCT.md | 2 +- CONTRIBUTING.md | 2 +- .../zed/resources/flatpak/zed.metainfo.xml.in | 2 +- docs/src/SUMMARY.md | 7 - docs/src/code-of-conduct.md | 127 ------------------ docs/src/contribute-to-zed.md | 15 --- docs/src/conversations.md | 21 --- docs/src/development.md | 2 +- docs/src/feedback-and-support.md | 39 ------ 9 files changed, 4 insertions(+), 213 deletions(-) delete mode 100644 docs/src/code-of-conduct.md delete mode 100644 docs/src/contribute-to-zed.md delete mode 100644 docs/src/conversations.md delete mode 100644 docs/src/feedback-and-support.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 57e3cc7c59..8d064b64f5 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,3 +1,3 @@ # Code of Conduct -The Code of Conduct for this repository can be found online at [zed.dev/docs/code-of-conduct](https://zed.dev/docs/code-of-conduct). +The Code of Conduct for this repository can be found online at [zed.dev/code-of-conduct](https://zed.dev/code-of-conduct). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f7657b9ccd..4a0a632413 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,7 @@ Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor! -All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged. +All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged. ## Contribution ideas diff --git a/crates/zed/resources/flatpak/zed.metainfo.xml.in b/crates/zed/resources/flatpak/zed.metainfo.xml.in index 15a5055961..6c0d5309ac 100644 --- a/crates/zed/resources/flatpak/zed.metainfo.xml.in +++ b/crates/zed/resources/flatpak/zed.metainfo.xml.in @@ -38,7 +38,7 @@ https://github.com/zed-industries/zed/issues https://zed.dev/faq https://zed.dev/docs/getting-started - https://zed.dev/docs/feedback-and-support + https://zed.dev/feedback-and-support https://github.com/zed-industries/zed https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 967b91d59d..8383e990d9 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -129,10 +129,3 @@ - [Local Collaboration](./development/local-collaboration.md) - [Release Process](./development/releases.md) - [Debugging Crashes](./development/debugging-crashes.md) - -# Community - -- [Code of Conduct](./code-of-conduct.md) -- [Contributing to Zed](./contribute-to-zed.md) -- [Conversations](./conversations.md) -- [Feedback and Support](./feedback-and-support.md) diff --git a/docs/src/code-of-conduct.md b/docs/src/code-of-conduct.md deleted file mode 100644 index 6c168dcc7d..0000000000 --- a/docs/src/code-of-conduct.md +++ /dev/null @@ -1,127 +0,0 @@ -# Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -- Demonstrating empathy and kindness toward other people -- Being respectful of differing opinions, viewpoints, and experiences -- Giving and gracefully accepting constructive feedback -- Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -- Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -- The use of sexualized language or imagery, and sexual attention or - advances of any kind -- Trolling, insulting or derogatory comments, and personal or political attacks -- Public or private harassment -- Publishing others' private information, such as a physical or email - address, without their explicit permission -- Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -hi@zed.dev. -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -[homepage]: https://www.contributor-covenant.org - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html). - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -For answers to common questions about this code of conduct, see [the Contributor Covenant FAQ](https://www.contributor-covenant.org/faq). -For translations, see [Contributor Covenant Translations](https://www.contributor-covenant.org/translations). diff --git a/docs/src/contribute-to-zed.md b/docs/src/contribute-to-zed.md deleted file mode 100644 index 14a189b0e5..0000000000 --- a/docs/src/contribute-to-zed.md +++ /dev/null @@ -1,15 +0,0 @@ -# Contributing to Zed - -Thank you for your interest in contributing to the Zed! Before making contributions, we recommend reading our [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md) guidelines. This document provides a detailed guide to contributing to Zed. - -### Issues - -If you're eager to dive in and start contributing immediately, check out the issues in the [issue tracker](https://github.com/zed-industries/zed/issues). - -## Public Roadmap - -If you're wanting to contribute by building out a feature, we recommend taking a look at our [#roadmap](https://zed.dev/roadmap). This roadmap documents, at a high level, the larger features we're planning to build out in the future. - -## Zed GitHub Repository - -Check [our codebase out on GitHub](https://github.com/zed-industries/zed). diff --git a/docs/src/conversations.md b/docs/src/conversations.md deleted file mode 100644 index bab2fd6d5d..0000000000 --- a/docs/src/conversations.md +++ /dev/null @@ -1,21 +0,0 @@ -# Conversations - -## Zed Channels - -Our Zed channel tree is public. You can find us hanging out and writing code across various channels. The root channel, [#zed](https://zed.dev/channel/zed-283), is a great place to ask questions and get to know other Zed users. - -## Discord Channel - -[Zed Community](https://discord.gg/zed-community) is our official Discord channel. We drop in pretty regularly to answer questions and chat with the community. - -## Twitter - -We use Twitter to highlight new Zed features and to share our blog posts. Follow us [@zeddotdev](https://x.com/zeddotdev). - -## YouTube - -We have a [YouTube channel](https://www.youtube.com/@zeddotdev) where we post longer-form videos about Zed. - -## Blog - -Our [blog](https://zed.dev/blog) gets frequent updates. We post about big releases, new features, and under-the-hood Zed tech. diff --git a/docs/src/development.md b/docs/src/development.md index ecd68a1181..96993389c2 100644 --- a/docs/src/development.md +++ b/docs/src/development.md @@ -15,5 +15,5 @@ If you'd like to develop collaboration features, additionally see: - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md) - [Releases](./development/releases.md) - [Debugging Crashes](./development/debugging-crashes.md) -- [Code of Conduct](./code-of-conduct.md) +- [Code of Conduct](https://zed.dev/code-of-conduct) - [Zed Contributor License](https://zed.dev/cla) diff --git a/docs/src/feedback-and-support.md b/docs/src/feedback-and-support.md deleted file mode 100644 index 2e4be92caa..0000000000 --- a/docs/src/feedback-and-support.md +++ /dev/null @@ -1,39 +0,0 @@ -# Feedback and Support - -## Community Support - -Our [Discord community](https://discord.gg/zed-community) is vibrant and a great place to ask questions and learn from others. Be sure to check out the `#support` channel. - -Use the [community forum](https://github.com/zed-industries/zed/discussions) to ask questions and learn from one another. We will be present in the forum and answering questions as well. - -## Frequently Asked Questions - -Our [FAQ](https://zed.dev/faq) is a great place to start for common questions about Zed. - -## Issue Tracking - -We track our issues at [`zed-industries/zed`](https://github.com/zed-industries/zed/issues). - -### Feature Requests - -Try to focus on the things that are most critical to you rather than exhaustively listing all features another editor you have used has. - -Command palette: `zed: request feature` - -### Bug Reports - -Try to add as much detail as possible, if it is not obvious to reproduce. Let us know how severe the problem is for you; is the issue more of a minor inconvenience or something that would prevent you from using Zed? - -Command palette: `zed: file bug report` - -## Feedback Channels - -In-app feedback can be submitted from within Zed via the feedback modal. - -Command palette: `feedback: give feedback` - -If you prefer to write up your thoughts as an email, you can send them to [hi@zed.dev](mailto:hi@zed.dev). - -## Merch Store - -We have a [merch store](https://zedindustries.creator-spring.com/) where you can buy Zed stickers, shirts, and more. From 9739da8de342d99640c1a38cef5b8fc2eba8a955 Mon Sep 17 00:00:00 2001 From: Jen Stehlik Date: Tue, 29 Oct 2024 14:54:21 +0100 Subject: [PATCH 46/87] Add Gleam icon (#19887) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I took a shot at creating an icon version of the Gleam logo in response to https://github.com/zed-industries/zed/pull/19529 Release Notes: - Added an icon for Gleam files. ![image](https://github.com/user-attachments/assets/97432ded-342f-4d87-8eb2-dc9145513d8c) Screenshot 2024-10-29 at 9 46 33 AM --------- Co-authored-by: Marshall Bowers --- assets/icons/file_icons/file_types.json | 4 ++++ assets/icons/file_icons/gleam.svg | 6 ++++++ 2 files changed, 10 insertions(+) create mode 100644 assets/icons/file_icons/gleam.svg diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json index a9fe4a2eff..ce5944e097 100644 --- a/assets/icons/file_icons/file_types.json +++ b/assets/icons/file_icons/file_types.json @@ -58,6 +58,7 @@ "gitignore": "vcs", "gitkeep": "vcs", "gitmodules": "vcs", + "gleam": "gleam", "go": "go", "gql": "graphql", "graphql": "graphql", @@ -264,6 +265,9 @@ "fsharp": { "icon": "icons/file_icons/fsharp.svg" }, + "gleam": { + "icon": "icons/file_icons/gleam.svg" + }, "go": { "icon": "icons/file_icons/go.svg" }, diff --git a/assets/icons/file_icons/gleam.svg b/assets/icons/file_icons/gleam.svg new file mode 100644 index 0000000000..9036ec15dc --- /dev/null +++ b/assets/icons/file_icons/gleam.svg @@ -0,0 +1,6 @@ + + + + + + From 1356665ed30af26de3275f1e12777fa000c51f3b Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 29 Oct 2024 10:13:26 -0400 Subject: [PATCH 47/87] Update community links page url (#19899) See: - https://github.com/zed-industries/zed.dev/pull/786 Release Notes: - N/A --- crates/zed/resources/flatpak/zed.metainfo.xml.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/zed/resources/flatpak/zed.metainfo.xml.in b/crates/zed/resources/flatpak/zed.metainfo.xml.in index 6c0d5309ac..b8a88d9221 100644 --- a/crates/zed/resources/flatpak/zed.metainfo.xml.in +++ b/crates/zed/resources/flatpak/zed.metainfo.xml.in @@ -38,7 +38,7 @@ https://github.com/zed-industries/zed/issues https://zed.dev/faq https://zed.dev/docs/getting-started - https://zed.dev/feedback-and-support + https://zed.dev/community-links https://github.com/zed-industries/zed https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md From 5893e85708d05f58ca434e732759c7760bafbd14 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 29 Oct 2024 17:24:10 +0200 Subject: [PATCH 48/87] Ensure shared ssh project propagates buffer changes to all participants (#19907) Fixed the bug when shared ssh project did not account for client changing things in their buffers. Also ensures Prettier formatting workflow works for both ssh project owner and ssh project clients. Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- crates/collab/src/tests/integration_tests.rs | 6 +- .../remote_editing_collaboration_tests.rs | 197 +++++++++++++++++- crates/prettier/src/prettier.rs | 6 +- crates/project/src/lsp_store.rs | 3 +- crates/project/src/project.rs | 20 +- 5 files changed, 221 insertions(+), 11 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index c905c440cf..b1e8e56861 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -21,8 +21,8 @@ use language::{ language_settings::{ AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter, }, - tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig, - LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, + tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter, + Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, }; use live_kit_client::MacOSDisplay; use lsp::LanguageServerId; @@ -4461,7 +4461,7 @@ async fn test_prettier_formatting_buffer( }, ..Default::default() }, - Some(tree_sitter_rust::LANGUAGE.into()), + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), ))); let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "TypeScript", diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 9fe546ffcd..0e29bd5ef3 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -1,14 +1,27 @@ use crate::tests::TestServer; use call::ActiveCall; +use collections::HashSet; use fs::{FakeFs, Fs as _}; -use gpui::{BackgroundExecutor, Context as _, TestAppContext}; +use futures::StreamExt as _; +use gpui::{BackgroundExecutor, Context as _, TestAppContext, UpdateGlobal as _}; use http_client::BlockedHttpClient; -use language::{language_settings::language_settings, LanguageRegistry}; +use language::{ + language_settings::{ + language_settings, AllLanguageSettings, Formatter, FormatterList, PrettierSettings, + SelectedFormatter, + }, + tree_sitter_typescript, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, + LanguageRegistry, +}; use node_runtime::NodeRuntime; -use project::ProjectPath; +use project::{ + lsp_store::{FormatTarget, FormatTrigger}, + ProjectPath, +}; use remote::SshRemoteClient; use remote_server::{HeadlessAppState, HeadlessProject}; use serde_json::json; +use settings::SettingsStore; use std::{path::Path, sync::Arc}; #[gpui::test(iterations = 10)] @@ -304,3 +317,181 @@ async fn test_ssh_collaboration_git_branches( assert_eq!(server_branch.as_ref(), "totally-new-branch"); } + +#[gpui::test] +async fn test_ssh_collaboration_formatting_with_prettier( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + let buffer_text = "let one = \"two\""; + let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; + remote_fs + .insert_tree("/project", serde_json::json!({ "a.ts": buffer_text })) + .await; + + let test_plugin = "test_plugin"; + let ts_lang = Arc::new(Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..LanguageMatcher::default() + }, + ..LanguageConfig::default() + }, + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + )); + client_a.language_registry().add(ts_lang.clone()); + client_b.language_registry().add(ts_lang.clone()); + + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let mut fake_language_servers = languages.register_fake_lsp( + "TypeScript", + FakeLspAdapter { + prettier_plugins: vec![test_plugin], + ..Default::default() + }, + ); + + // User A connects to the remote project via SSH. + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let _headless_project = server_cx.new_model(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: NodeRuntime::unavailable(), + languages, + }, + cx, + ) + }); + + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; + let (project_a, worktree_id) = client_a + .build_ssh_project("/project", client_ssh, cx_a) + .await; + + // While the SSH worktree is being scanned, user A shares the remote project. + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // User B joins the project. + let project_b = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + // Opens the buffer and formats it + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)) + .await + .expect("user B opens buffer for formatting"); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(SelectedFormatter::Auto); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + cx_b.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(SelectedFormatter::List(FormatterList( + vec![Formatter::LanguageServer { name: None }].into(), + ))); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + panic!( + "Unexpected: prettier should be preferred since it's enabled and language supports it" + ) + }); + + project_b + .update(cx_b, |project, cx| { + project.format( + HashSet::from_iter([buffer_b.clone()]), + true, + FormatTrigger::Save, + FormatTarget::Buffer, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + buffer_text.to_string() + "\n" + prettier_format_suffix, + "Prettier formatting was not applied to client buffer after client's request" + ); + + // User A opens and formats the same buffer too + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)) + .await + .expect("user A opens buffer for formatting"); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(SelectedFormatter::Auto); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + project_a + .update(cx_a, |project, cx| { + project.format( + HashSet::from_iter([buffer_a.clone()]), + true, + FormatTrigger::Manual, + FormatTarget::Buffer, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix, + "Prettier formatting was not applied to client buffer after host's request" + ); +} diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index d2d56696a6..4dc5bca40f 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -14,14 +14,14 @@ use std::{ }; use util::paths::PathMatcher; -#[derive(Clone)] +#[derive(Debug, Clone)] pub enum Prettier { Real(RealPrettier), #[cfg(any(test, feature = "test-support"))] Test(TestPrettier), } -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct RealPrettier { default: bool, prettier_dir: PathBuf, @@ -29,7 +29,7 @@ pub struct RealPrettier { } #[cfg(any(test, feature = "test-support"))] -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct TestPrettier { prettier_dir: PathBuf, default: bool, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 40e87b55e5..fe39dc0914 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -675,6 +675,7 @@ impl LocalLspStore { } } +#[derive(Debug)] pub struct FormattableBuffer { handle: Model, abs_path: Option, @@ -5342,7 +5343,7 @@ impl LspStore { buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); } let trigger = FormatTrigger::from_proto(envelope.payload.trigger); - Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, FormatTarget::Buffer, cx)) + anyhow::Ok(this.format(buffers, false, trigger, FormatTarget::Buffer, cx)) })??; let project_transaction = format.await?; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7fd77fb0ad..f5a295a3a3 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -827,7 +827,7 @@ impl Project { ssh_proto.add_model_message_handler(Self::handle_toast); ssh_proto.add_model_request_handler(Self::handle_language_server_prompt_request); ssh_proto.add_model_message_handler(Self::handle_hide_toast); - ssh_proto.add_model_request_handler(BufferStore::handle_update_buffer); + ssh_proto.add_model_request_handler(Self::handle_update_buffer_from_ssh); BufferStore::init(&ssh_proto); LspStore::init(&ssh_proto); SettingsObserver::init(&ssh_proto); @@ -3653,6 +3653,24 @@ impl Project { })? } + async fn handle_update_buffer_from_ssh( + this: Model, + envelope: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let buffer_store = this.read_with(&cx, |this, cx| { + if let Some(remote_id) = this.remote_id() { + let mut payload = envelope.payload.clone(); + payload.project_id = remote_id; + cx.background_executor() + .spawn(this.client.request(payload)) + .detach_and_log_err(cx); + } + this.buffer_store.clone() + })?; + BufferStore::handle_update_buffer(buffer_store, envelope, cx).await + } + async fn handle_update_buffer( this: Model, envelope: TypedEnvelope, From 3fed738d2f85d374db93edac96d6220f41ab8d16 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 29 Oct 2024 10:12:34 -0600 Subject: [PATCH 49/87] Use same logic for skipping single instance check on Linux as on Mac/Win (#19446) Release Notes: - Linux: Now skips check which exits with "zed is already running" when in development mode or when run with `zed-local`, matching the behavior on Mac and Windows Co-authored-by: Nathan Sobo --- crates/zed/src/main.rs | 45 ++++++++++----------- crates/zed/src/zed/mac_only_instance.rs | 4 -- crates/zed/src/zed/windows_only_instance.rs | 8 ---- 3 files changed, 21 insertions(+), 36 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 89ff72b5a9..83f30f3e6d 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -32,7 +32,7 @@ use node_runtime::{NodeBinaryOptions, NodeRuntime}; use parking_lot::Mutex; use project::project_settings::ProjectSettings; use recent_projects::{open_ssh_project, SshSettings}; -use release_channel::{AppCommitSha, AppVersion}; +use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use session::{AppSession, Session}; use settings::{ handle_settings_file_changes, watch_config_file, InvalidSettingsError, Settings, SettingsStore, @@ -164,32 +164,29 @@ fn main() { let (open_listener, mut open_rx) = OpenListener::new(); - #[cfg(target_os = "linux")] - { - if env::var("ZED_STATELESS").is_err() { - if crate::zed::listen_for_cli_connections(open_listener.clone()).is_err() { - println!("zed is already running"); - return; + let failed_single_instance_check = + if *db::ZED_STATELESS || *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { + false + } else { + #[cfg(target_os = "linux")] + { + crate::zed::listen_for_cli_connections(open_listener.clone()).is_err() } - } - } - #[cfg(target_os = "windows")] - { - use zed::windows_only_instance::*; - if !check_single_instance() { - println!("zed is already running"); - return; - } - } + #[cfg(target_os = "windows")] + { + !crate::zed::windows_only_instance::check_single_instance() + } - #[cfg(target_os = "macos")] - { - use zed::mac_only_instance::*; - if ensure_only_instance() != IsOnlyInstance::Yes { - println!("zed is already running"); - return; - } + #[cfg(target_os = "macos")] + { + use zed::mac_only_instance::*; + ensure_only_instance() != IsOnlyInstance::Yes + } + }; + if failed_single_instance_check { + println!("zed is already running"); + return; } let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); diff --git a/crates/zed/src/zed/mac_only_instance.rs b/crates/zed/src/zed/mac_only_instance.rs index 2c8f564201..716c2224e3 100644 --- a/crates/zed/src/zed/mac_only_instance.rs +++ b/crates/zed/src/zed/mac_only_instance.rs @@ -87,10 +87,6 @@ pub enum IsOnlyInstance { } pub fn ensure_only_instance() -> IsOnlyInstance { - if *db::ZED_STATELESS || *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { - return IsOnlyInstance::Yes; - } - if check_got_handshake() { return IsOnlyInstance::No; } diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index e8d32e7ed0..2645650bfa 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -17,14 +17,6 @@ fn retrieve_app_instance_event_identifier() -> &'static str { } pub fn check_single_instance() -> bool { - if *db::ZED_STATELESS || *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { - return true; - } - - check_single_instance_event() -} - -fn check_single_instance_event() -> bool { unsafe { CreateEventW( None, From 3e2f1d733c75e3ea1604ba9750ea51675b93d1c5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 29 Oct 2024 09:16:38 -0700 Subject: [PATCH 50/87] Fix horizontal scroll caused by diagnostic block width error (#19856) Previously, when scrolling the diagnostics view with the mouse, we'd get a spurious horizontal scroll (even if the content was not overflowing horizontally) due to an error in the widths of the diagnostic blocks. Release Notes: - Fixed an issue where the project diagnostics view spuriously allowed horizontal scrolling by a small amount. --- crates/editor/src/editor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index df13f74806..81ee6a01de 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -14188,7 +14188,7 @@ pub fn diagnostic_block_renderer( .relative() .size_full() .pl(cx.gutter_dimensions.width) - .w(cx.max_width + cx.gutter_dimensions.width) + .w(cx.max_width - cx.gutter_dimensions.full_width()) .child( div() .flex() From 322aa41ad60e5f4dcd6ddf420169ef76cee5eadd Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 29 Oct 2024 12:31:51 -0400 Subject: [PATCH 51/87] Add support for self-hosted GitLab instances for Git permalinks (#19909) This PR adds support for self-hosted GitLab instances when generating Git permalinks. If the `origin` Git remote contains `gitlab` in the URL hostname we will then attempt to register it as a self-hosted GitLab instance. A note on this: I don't think relying on specific keywords is going to be a suitable long-term solution to detection. In reality the self-hosted instance could be hosted anywhere (e.g., `vcs.my-company.com`), so we will ultimately need a way to have the user indicate which Git provider they are using (perhaps via a setting). Closes https://github.com/zed-industries/zed/issues/18012. Release Notes: - Added support for self-hosted GitLab instances when generating Git permalinks. - The instance URL must have `gitlab` somewhere in the host in order to be recognized. --- Cargo.lock | 2 + crates/git/src/hosting_provider.rs | 6 + crates/git_hosting_providers/Cargo.toml | 1 + .../src/git_hosting_providers.rs | 33 ++++-- .../src/providers/gitlab.rs | 111 ++++++++++++++++-- crates/worktree/Cargo.toml | 1 + crates/worktree/src/worktree.rs | 11 ++ 7 files changed, 142 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1b3c4de81d..c04ec535a6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4915,6 +4915,7 @@ dependencies = [ "serde_json", "unindent", "url", + "util", ] [[package]] @@ -14730,6 +14731,7 @@ dependencies = [ "fuzzy", "git", "git2", + "git_hosting_providers", "gpui", "http_client", "ignore", diff --git a/crates/git/src/hosting_provider.rs b/crates/git/src/hosting_provider.rs index 988dae377f..72ed92e8ab 100644 --- a/crates/git/src/hosting_provider.rs +++ b/crates/git/src/hosting_provider.rs @@ -111,6 +111,12 @@ impl GitHostingProviderRegistry { cx.global::().0.clone() } + /// Returns the global [`GitHostingProviderRegistry`], if one is set. + pub fn try_global(cx: &AppContext) -> Option> { + cx.try_global::() + .map(|registry| registry.0.clone()) + } + /// Returns the global [`GitHostingProviderRegistry`]. /// /// Inserts a default [`GitHostingProviderRegistry`] if one does not yet exist. diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index b8ad1ed05d..eac30b72d9 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -22,6 +22,7 @@ regex.workspace = true serde.workspace = true serde_json.workspace = true url.workspace = true +util.workspace = true [dev-dependencies] unindent.workspace = true diff --git a/crates/git_hosting_providers/src/git_hosting_providers.rs b/crates/git_hosting_providers/src/git_hosting_providers.rs index 864faa9b49..2689d797f4 100644 --- a/crates/git_hosting_providers/src/git_hosting_providers.rs +++ b/crates/git_hosting_providers/src/git_hosting_providers.rs @@ -2,6 +2,7 @@ mod providers; use std::sync::Arc; +use git::repository::GitRepository; use git::GitHostingProviderRegistry; use gpui::AppContext; @@ -10,17 +11,27 @@ pub use crate::providers::*; /// Initializes the Git hosting providers. pub fn init(cx: &AppContext) { let provider_registry = GitHostingProviderRegistry::global(cx); - - // The providers are stored in a `BTreeMap`, so insertion order matters. - // GitHub comes first. - provider_registry.register_hosting_provider(Arc::new(Github)); - - // Then GitLab. - provider_registry.register_hosting_provider(Arc::new(Gitlab)); - - // Then the other providers, in the order they were added. - provider_registry.register_hosting_provider(Arc::new(Gitee)); provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - provider_registry.register_hosting_provider(Arc::new(Sourcehut)); provider_registry.register_hosting_provider(Arc::new(Codeberg)); + provider_registry.register_hosting_provider(Arc::new(Gitee)); + provider_registry.register_hosting_provider(Arc::new(Github)); + provider_registry.register_hosting_provider(Arc::new(Gitlab::new())); + provider_registry.register_hosting_provider(Arc::new(Sourcehut)); +} + +/// Registers additional Git hosting providers. +/// +/// These require information from the Git repository to construct, so their +/// registration is deferred until we have a Git repository initialized. +pub fn register_additional_providers( + provider_registry: Arc, + repository: Arc, +) { + let Some(origin_url) = repository.remote_url("origin") else { + return; + }; + + if let Ok(gitlab_self_hosted) = Gitlab::from_remote_url(&origin_url) { + provider_registry.register_hosting_provider(Arc::new(gitlab_self_hosted)); + } } diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index 36ee214cf9..a8b97182c0 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -1,16 +1,55 @@ +use anyhow::{anyhow, bail, Result}; use url::Url; +use util::maybe; use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; -pub struct Gitlab; +#[derive(Debug)] +pub struct Gitlab { + name: String, + base_url: Url, +} + +impl Gitlab { + pub fn new() -> Self { + Self { + name: "GitLab".to_string(), + base_url: Url::parse("https://gitlab.com").unwrap(), + } + } + + pub fn from_remote_url(remote_url: &str) -> Result { + let host = maybe!({ + if let Some(remote_url) = remote_url.strip_prefix("git@") { + if let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') { + return Some(host.to_string()); + } + } + + Url::parse(&remote_url) + .ok() + .and_then(|remote_url| remote_url.host_str().map(|host| host.to_string())) + }) + .ok_or_else(|| anyhow!("URL has no host"))?; + + if !host.contains("gitlab") { + bail!("not a GitLab URL"); + } + + Ok(Self { + name: "GitLab Self-Hosted".to_string(), + base_url: Url::parse(&format!("https://{}", host))?, + }) + } +} impl GitHostingProvider for Gitlab { fn name(&self) -> String { - "GitLab".to_string() + self.name.clone() } fn base_url(&self) -> Url { - Url::parse("https://gitlab.com").unwrap() + self.base_url.clone() } fn supports_avatars(&self) -> bool { @@ -26,10 +65,12 @@ impl GitHostingProvider for Gitlab { } fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@gitlab.com:") || url.starts_with("https://gitlab.com/") { + let host = self.base_url.host_str()?; + + if url.starts_with(&format!("git@{host}")) || url.starts_with(&format!("https://{host}/")) { let repo_with_owner = url - .trim_start_matches("git@gitlab.com:") - .trim_start_matches("https://gitlab.com/") + .trim_start_matches(&format!("git@{host}:")) + .trim_start_matches(&format!("https://{host}/")) .trim_end_matches(".git"); let (owner, repo) = repo_with_owner.split_once('/')?; @@ -79,6 +120,8 @@ impl GitHostingProvider for Gitlab { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] @@ -87,7 +130,7 @@ mod tests { owner: "zed-industries", repo: "zed", }; - let permalink = Gitlab.build_permalink( + let permalink = Gitlab::new().build_permalink( remote, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", @@ -106,7 +149,7 @@ mod tests { owner: "zed-industries", repo: "zed", }; - let permalink = Gitlab.build_permalink( + let permalink = Gitlab::new().build_permalink( remote, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", @@ -125,7 +168,7 @@ mod tests { owner: "zed-industries", repo: "zed", }; - let permalink = Gitlab.build_permalink( + let permalink = Gitlab::new().build_permalink( remote, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", @@ -144,7 +187,7 @@ mod tests { owner: "zed-industries", repo: "zed", }; - let permalink = Gitlab.build_permalink( + let permalink = Gitlab::new().build_permalink( remote, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", @@ -163,7 +206,7 @@ mod tests { owner: "zed-industries", repo: "zed", }; - let permalink = Gitlab.build_permalink( + let permalink = Gitlab::new().build_permalink( remote, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", @@ -182,7 +225,7 @@ mod tests { owner: "zed-industries", repo: "zed", }; - let permalink = Gitlab.build_permalink( + let permalink = Gitlab::new().build_permalink( remote, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", @@ -194,4 +237,48 @@ mod tests { let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } + + #[test] + fn test_build_gitlab_self_hosted_permalink_from_ssh_url() { + let remote = ParsedGitRemote { + owner: "zed-industries", + repo: "zed", + }; + let gitlab = + Gitlab::from_remote_url("git@gitlab.some-enterprise.com:zed-industries/zed.git") + .unwrap(); + let permalink = gitlab.build_permalink( + remote, + BuildPermalinkParams { + sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", + path: "crates/editor/src/git/permalink.rs", + selection: None, + }, + ); + + let expected_url = "https://gitlab.some-enterprise.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + + #[test] + fn test_build_gitlab_self_hosted_permalink_from_https_url() { + let remote = ParsedGitRemote { + owner: "zed-industries", + repo: "zed", + }; + let gitlab = + Gitlab::from_remote_url("https://gitlab-instance.big-co.com/zed-industries/zed.git") + .unwrap(); + let permalink = gitlab.build_permalink( + remote, + BuildPermalinkParams { + sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", + path: "crates/zed/src/main.rs", + selection: None, + }, + ); + + let expected_url = "https://gitlab-instance.big-co.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } } diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 9437358e1a..da3676f15c 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -29,6 +29,7 @@ fs.workspace = true futures.workspace = true fuzzy.workspace = true git.workspace = true +git_hosting_providers.workspace = true gpui.workspace = true ignore.workspace = true language.workspace = true diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index ba65eae87c..8114f2dd7b 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -19,6 +19,7 @@ use futures::{ FutureExt as _, Stream, StreamExt, }; use fuzzy::CharBag; +use git::GitHostingProviderRegistry; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, status::GitStatus, @@ -299,6 +300,7 @@ struct BackgroundScannerState { removed_entries: HashMap, changed_paths: Vec>, prev_snapshot: Snapshot, + git_hosting_provider_registry: Option>, } #[derive(Debug, Clone)] @@ -1004,6 +1006,7 @@ impl LocalWorktree { let share_private_files = self.share_private_files; let next_entry_id = self.next_entry_id.clone(); let fs = self.fs.clone(); + let git_hosting_provider_registry = GitHostingProviderRegistry::try_global(cx); let settings = self.settings.clone(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_executor().spawn({ @@ -1039,6 +1042,7 @@ impl LocalWorktree { paths_to_scan: Default::default(), removed_entries: Default::default(), changed_paths: Default::default(), + git_hosting_provider_registry, }), phase: BackgroundScannerPhase::InitialScan, share_private_files, @@ -2948,6 +2952,13 @@ impl BackgroundScannerState { log::trace!("constructed libgit2 repo in {:?}", t0.elapsed()); let work_directory = RepositoryWorkDirectory(work_dir_path.clone()); + if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() { + git_hosting_providers::register_additional_providers( + git_hosting_provider_registry, + repository.clone(), + ); + } + self.snapshot.repository_entries.insert( work_directory.clone(), RepositoryEntry { From 759d136fe60a7393e0857ed8d10e55dcb34bbdf7 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 29 Oct 2024 10:09:49 -0700 Subject: [PATCH 52/87] Update a few doc comments (#19911) Release Notes: - N/A --- crates/gpui/src/elements/list.rs | 7 ++++--- crates/remote/src/ssh_session.rs | 14 +++++++++----- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index d77c91e655..47f0a82774 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -1,8 +1,9 @@ //! A list element that can be used to render a large number of differently sized elements //! efficiently. Clients of this API need to ensure that elements outside of the scrolled -//! area do not change their height for this element to function correctly. In order to minimize -//! re-renders, this element's state is stored intrusively on your own views, so that your code -//! can coordinate directly with the list element's cached state. +//! area do not change their height for this element to function correctly. If your elements +//! do change height, notify the list element via [`ListState::splice`] or [`ListState::reset`]. +//! In order to minimize re-renders, this element's state is stored intrusively +//! on your own views, so that your code can coordinate directly with the list element's cached state. //! //! If all of your elements are the same height, see [`UniformList`] for a simpler API diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 16b7662871..d578e2eadd 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1023,7 +1023,7 @@ impl SshRemoteClient { server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server")); let connection: Arc = Arc::new(fake::FakeRemoteConnection { connection_options: opts.clone(), - server_cx: fake::SendableCx::new(server_cx.to_async()), + server_cx: fake::SendableCx::new(server_cx), server_channel: server_client.clone(), }); @@ -2251,7 +2251,7 @@ mod fake { }, select_biased, FutureExt, SinkExt, StreamExt, }; - use gpui::{AsyncAppContext, Task}; + use gpui::{AsyncAppContext, Task, TestAppContext}; use rpc::proto::Envelope; use super::{ @@ -2266,15 +2266,19 @@ mod fake { } pub(super) struct SendableCx(AsyncAppContext); - // safety: you can only get the other cx on the main thread. impl SendableCx { - pub(super) fn new(cx: AsyncAppContext) -> Self { - Self(cx) + // SAFETY: When run in test mode, GPUI is always single threaded. + pub(super) fn new(cx: &TestAppContext) -> Self { + Self(cx.to_async()) } + + // SAFETY: Enforce that we're on the main thread by requiring a valid AsyncAppContext fn get(&self, _: &AsyncAppContext) -> AsyncAppContext { self.0.clone() } } + + // SAFETY: There is no way to access a SendableCx from a different thread, see [`SendableCx::new`] and [`SendableCx::get`] unsafe impl Send for SendableCx {} unsafe impl Sync for SendableCx {} From cfa20ff22183f6f7c3f5fd81e5aeb29b663e213a Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 29 Oct 2024 11:21:10 -0600 Subject: [PATCH 53/87] Sketch in assistant edit button (#19705) Add an edit button to the assistant. This is totally hacked in for now, just to see how this would feel rendered simply in the UI. ![CleanShot 2024-10-24 at 16 26 14@2x](https://github.com/user-attachments/assets/e630d078-78b7-42d7-93f1-cf61c00bd20e) cc @as-cii @danilo-leal Release Notes: - N/A --------- Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Co-authored-by: Richard Feldman --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + crates/assistant/src/assistant.rs | 1 + crates/assistant/src/assistant_panel.rs | 130 ++++++++++++++---- crates/assistant/src/context.rs | 47 ++++++- crates/assistant/src/inline_assistant.rs | 4 +- .../src/terminal_inline_assistant.rs | 4 +- crates/ui/src/components/keybinding.rs | 2 +- 8 files changed, 155 insertions(+), 35 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 4f55fa9772..0ba76fba3f 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -532,6 +532,7 @@ "context": "ContextEditor > Editor", "bindings": { "ctrl-enter": "assistant::Assist", + "ctrl-shift-enter": "assistant::Edit", "ctrl-s": "workspace::Save", "ctrl->": "assistant::QuoteSelection", "ctrl-<": "assistant::InsertIntoEditor", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index cb9a86bd0b..964af3ce3d 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -201,6 +201,7 @@ "context": "ContextEditor > Editor", "bindings": { "cmd-enter": "assistant::Assist", + "cmd-shift-enter": "assistant::Edit", "cmd-s": "workspace::Save", "cmd->": "assistant::QuoteSelection", "cmd-<": "assistant::InsertIntoEditor", diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index a48f6d6c29..c96358ae99 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -59,6 +59,7 @@ actions!( assistant, [ Assist, + Edit, Split, CopyCode, CycleMessageRole, diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index b15026c1ea..f0b5a5d442 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -13,10 +13,11 @@ use crate::{ terminal_inline_assistant::TerminalInlineAssistant, Assist, AssistantPatch, AssistantPatchStatus, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, ContextStoreEvent, CopyCode, CycleMessageRole, - DeployHistory, DeployPromptLibrary, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, - Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, - NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, - RemoteContextMetadata, SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, + DeployHistory, DeployPromptLibrary, Edit, InlineAssistant, InsertDraggedFiles, + InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, + ModelSelector, NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, + RemoteContextMetadata, RequestType, SavedContextMetadata, Split, ToggleFocus, + ToggleModelSelector, }; use anyhow::Result; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection}; @@ -1588,23 +1589,11 @@ impl ContextEditor { } fn assist(&mut self, _: &Assist, cx: &mut ViewContext) { - let provider = LanguageModelRegistry::read_global(cx).active_provider(); - if provider - .as_ref() - .map_or(false, |provider| provider.must_accept_terms(cx)) - { - self.show_accept_terms = true; - cx.notify(); - return; - } + self.send_to_model(RequestType::Chat, cx); + } - if self.focus_active_patch(cx) { - return; - } - - self.last_error = None; - self.send_to_model(cx); - cx.notify(); + fn edit(&mut self, _: &Edit, cx: &mut ViewContext) { + self.send_to_model(RequestType::SuggestEdits, cx); } fn focus_active_patch(&mut self, cx: &mut ViewContext) -> bool { @@ -1622,8 +1611,27 @@ impl ContextEditor { false } - fn send_to_model(&mut self, cx: &mut ViewContext) { - if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) { + fn send_to_model(&mut self, request_type: RequestType, cx: &mut ViewContext) { + let provider = LanguageModelRegistry::read_global(cx).active_provider(); + if provider + .as_ref() + .map_or(false, |provider| provider.must_accept_terms(cx)) + { + self.show_accept_terms = true; + cx.notify(); + return; + } + + if self.focus_active_patch(cx) { + return; + } + + self.last_error = None; + + if let Some(user_message) = self + .context + .update(cx, |context, cx| context.assist(request_type, cx)) + { let new_selection = { let cursor = user_message .start @@ -1640,6 +1648,8 @@ impl ContextEditor { // Avoid scrolling to the new cursor position so the assistant's output is stable. cx.defer(|this, _| this.scroll_position = None); } + + cx.notify(); } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { @@ -3644,7 +3654,13 @@ impl ContextEditor { button.tooltip(move |_| tooltip.clone()) }) .layer(ElevationIndex::ModalSurface) - .child(Label::new("Send")) + .child(Label::new( + if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) { + "Chat" + } else { + "Send" + }, + )) .children( KeyBinding::for_action_in(&Assist, &focus_handle, cx) .map(|binding| binding.into_any_element()), @@ -3654,6 +3670,57 @@ impl ContextEditor { }) } + fn render_edit_button(&self, cx: &mut ViewContext) -> impl IntoElement { + let focus_handle = self.focus_handle(cx).clone(); + + let (style, tooltip) = match token_state(&self.context, cx) { + Some(TokenState::NoTokensLeft { .. }) => ( + ButtonStyle::Tinted(TintColor::Negative), + Some(Tooltip::text("Token limit reached", cx)), + ), + Some(TokenState::HasMoreTokens { + over_warn_threshold, + .. + }) => { + let (style, tooltip) = if over_warn_threshold { + ( + ButtonStyle::Tinted(TintColor::Warning), + Some(Tooltip::text("Token limit is close to exhaustion", cx)), + ) + } else { + (ButtonStyle::Filled, None) + }; + (style, tooltip) + } + None => (ButtonStyle::Filled, None), + }; + + let provider = LanguageModelRegistry::read_global(cx).active_provider(); + + let has_configuration_error = configuration_error(cx).is_some(); + let needs_to_accept_terms = self.show_accept_terms + && provider + .as_ref() + .map_or(false, |provider| provider.must_accept_terms(cx)); + let disabled = has_configuration_error || needs_to_accept_terms; + + ButtonLike::new("edit_button") + .disabled(disabled) + .style(style) + .when_some(tooltip, |button, tooltip| { + button.tooltip(move |_| tooltip.clone()) + }) + .layer(ElevationIndex::ModalSurface) + .child(Label::new("Suggest Edits")) + .children( + KeyBinding::for_action_in(&Edit, &focus_handle, cx) + .map(|binding| binding.into_any_element()), + ) + .on_click(move |_event, cx| { + focus_handle.dispatch_action(&Edit, cx); + }) + } + fn render_last_error(&self, cx: &mut ViewContext) -> Option { let last_error = self.last_error.as_ref()?; @@ -3910,6 +3977,7 @@ impl Render for ContextEditor { .capture_action(cx.listener(ContextEditor::paste)) .capture_action(cx.listener(ContextEditor::cycle_message_role)) .capture_action(cx.listener(ContextEditor::confirm_command)) + .on_action(cx.listener(ContextEditor::edit)) .on_action(cx.listener(ContextEditor::assist)) .on_action(cx.listener(ContextEditor::split)) .size_full() @@ -3974,7 +4042,21 @@ impl Render for ContextEditor { h_flex() .w_full() .justify_end() - .child(div().child(self.render_send_button(cx))), + .when( + AssistantSettings::get_global(cx).are_live_diffs_enabled(cx), + |buttons| { + buttons + .items_center() + .gap_1p5() + .child(self.render_edit_button(cx)) + .child( + Label::new("or") + .size(LabelSize::Small) + .color(Color::Muted), + ) + }, + ) + .child(self.render_send_button(cx)), ), ), ) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 78237e51b2..f5e8174748 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -66,6 +66,14 @@ impl ContextId { } } +#[derive(Clone, Copy, Debug)] +pub enum RequestType { + /// Request a normal chat response from the model. + Chat, + /// Add a preamble to the message, which tells the model to return a structured response that suggests edits. + SuggestEdits, +} + #[derive(Clone, Debug)] pub enum ContextOperation { InsertMessage { @@ -1028,7 +1036,7 @@ impl Context { } pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext) { - let request = self.to_completion_request(cx); + let request = self.to_completion_request(RequestType::SuggestEdits, cx); // Conservatively assume SuggestEdits, since it takes more tokens. let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else { return; }; @@ -1171,7 +1179,7 @@ impl Context { } let request = { - let mut req = self.to_completion_request(cx); + let mut req = self.to_completion_request(RequestType::Chat, cx); // Skip the last message because it's likely to change and // therefore would be a waste to cache. req.messages.pop(); @@ -1859,7 +1867,11 @@ impl Context { }) } - pub fn assist(&mut self, cx: &mut ModelContext) -> Option { + pub fn assist( + &mut self, + request_type: RequestType, + cx: &mut ModelContext, + ) -> Option { let model_registry = LanguageModelRegistry::read_global(cx); let provider = model_registry.active_provider()?; let model = model_registry.active_model()?; @@ -1872,7 +1884,7 @@ impl Context { // Compute which messages to cache, including the last one. self.mark_cache_anchors(&model.cache_configuration(), false, cx); - let mut request = self.to_completion_request(cx); + let mut request = self.to_completion_request(request_type, cx); if cx.has_flag::() { let tool_registry = ToolRegistry::global(cx); @@ -2074,7 +2086,11 @@ impl Context { Some(user_message) } - pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest { + pub fn to_completion_request( + &self, + request_type: RequestType, + cx: &AppContext, + ) -> LanguageModelRequest { let buffer = self.buffer.read(cx); let mut contents = self.contents(cx).peekable(); @@ -2163,6 +2179,25 @@ impl Context { completion_request.messages.push(request_message); } + if let RequestType::SuggestEdits = request_type { + if let Ok(preamble) = self.prompt_builder.generate_workflow_prompt() { + let last_elem_index = completion_request.messages.len(); + + completion_request + .messages + .push(LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::Text(preamble)], + cache: false, + }); + + // The preamble message should be sent right before the last actual user message. + completion_request + .messages + .swap(last_elem_index, last_elem_index.saturating_sub(1)); + } + } + completion_request } @@ -2477,7 +2512,7 @@ impl Context { return; } - let mut request = self.to_completion_request(cx); + let mut request = self.to_completion_request(RequestType::Chat, cx); request.messages.push(LanguageModelRequestMessage { role: Role::User, content: vec![ diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 9af8193605..4c79662cf1 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1,7 +1,7 @@ use crate::{ assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist, - CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff, + CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, RequestType, StreamingDiff, }; use anyhow::{anyhow, Context as _, Result}; use client::{telemetry::Telemetry, ErrorExt}; @@ -2234,7 +2234,7 @@ impl InlineAssist { .read(cx) .active_context(cx)? .read(cx) - .to_completion_request(cx), + .to_completion_request(RequestType::Chat, cx), ) } else { None diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 41b8d9eb88..3e472ae4a9 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -1,6 +1,6 @@ use crate::{ humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent, - ModelSelector, DEFAULT_CONTEXT_LINES, + ModelSelector, RequestType, DEFAULT_CONTEXT_LINES, }; use anyhow::{Context as _, Result}; use client::telemetry::Telemetry; @@ -251,7 +251,7 @@ impl TerminalInlineAssistant { .read(cx) .active_context(cx)? .read(cx) - .to_completion_request(cx), + .to_completion_request(RequestType::Chat, cx), ) }) } else { diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index cd45a11d9f..c1381e6fdf 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -184,7 +184,7 @@ pub struct KeyIcon { impl RenderOnce for KeyIcon { fn render(self, _cx: &mut WindowContext) -> impl IntoElement { Icon::new(self.icon) - .size(IconSize::Small) + .size(IconSize::XSmall) .color(Color::Muted) } } From 273cb1921f999e158a9de75ac8bb9fc5a9615c18 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 29 Oct 2024 11:22:41 -0600 Subject: [PATCH 54/87] Fix wrong UpdateWorktree chunk size being used in release mode (#19912) Release Notes: - Fixed slowness when collaborating Co-authored-by: Thorsten --- crates/language_model/Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 74a2ed0ed0..685b022340 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -38,7 +38,7 @@ menu.workspace = true ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } parking_lot.workspace = true -proto = { workspace = true, features = ["test-support"] } +proto.workspace = true project.workspace = true schemars.workspace = true serde.workspace = true @@ -62,6 +62,7 @@ env_logger.workspace = true language = { workspace = true, features = ["test-support"] } log.workspace = true project = { workspace = true, features = ["test-support"] } +proto = { workspace = true, features = ["test-support"] } rand.workspace = true text = { workspace = true, features = ["test-support"] } unindent.workspace = true From 21137d2ba7439b8045b664607459d822727e5121 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 29 Oct 2024 13:57:33 -0400 Subject: [PATCH 55/87] Delete /workflow (#19900) This a separate PR from https://github.com/zed-industries/zed/pull/19705 so we can revert it more easily if we want it back later. Release Notes: - Added "Suggest Edit" button to the assistant panel if `"enable_experimental_live_diffs": true` is set in the `"assistant"` section of `settings.json`. This button takes the place of the previous `/workflow` command, but it is experimental and may change! --------- Co-authored-by: Nathan Sobo Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> --- crates/assistant/src/assistant.rs | 18 ---- crates/assistant/src/slash_command.rs | 1 - .../src/slash_command/workflow_command.rs | 82 ------------------- 3 files changed, 101 deletions(-) delete mode 100644 crates/assistant/src/slash_command/workflow_command.rs diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index c96358ae99..c2857d06d4 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,12 +41,10 @@ use prompts::PromptLoadingParams; use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; -use slash_command::workflow_command::WorkflowSlashCommand; use slash_command::{ auto_command, cargo_workspace_command, context_server_command, default_command, delta_command, diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command, prompt_command, search_command, symbols_command, tab_command, terminal_command, - workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -445,22 +443,6 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); if let Some(prompt_builder) = prompt_builder { - cx.observe_global::({ - let slash_command_registry = slash_command_registry.clone(); - let prompt_builder = prompt_builder.clone(); - move |cx| { - if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) { - slash_command_registry.register_command( - workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), - true, - ); - } else { - slash_command_registry.unregister_command_by_name(WorkflowSlashCommand::NAME); - } - } - }) - .detach(); - cx.observe_flag::({ let slash_command_registry = slash_command_registry.clone(); move |is_enabled, _cx| { diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index e430e35622..ed20791d95 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -34,7 +34,6 @@ pub mod search_command; pub mod symbols_command; pub mod tab_command; pub mod terminal_command; -pub mod workflow_command; pub(crate) struct SlashCommandCompletionProvider { cancel_flag: Mutex>, diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs deleted file mode 100644 index ca6ccde92e..0000000000 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ /dev/null @@ -1,82 +0,0 @@ -use std::sync::atomic::AtomicBool; -use std::sync::Arc; - -use anyhow::Result; -use assistant_slash_command::{ - ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, - SlashCommandResult, -}; -use gpui::{Task, WeakView}; -use language::{BufferSnapshot, LspAdapterDelegate}; -use ui::prelude::*; -use workspace::Workspace; - -use crate::prompts::PromptBuilder; - -pub(crate) struct WorkflowSlashCommand { - prompt_builder: Arc, -} - -impl WorkflowSlashCommand { - pub const NAME: &'static str = "workflow"; - - pub fn new(prompt_builder: Arc) -> Self { - Self { prompt_builder } - } -} - -impl SlashCommand for WorkflowSlashCommand { - fn name(&self) -> String { - Self::NAME.into() - } - - fn description(&self) -> String { - "Insert prompt to opt into the edit workflow".into() - } - - fn menu_text(&self) -> String { - self.description() - } - - fn requires_argument(&self) -> bool { - false - } - - fn complete_argument( - self: Arc, - _arguments: &[String], - _cancel: Arc, - _workspace: Option>, - _cx: &mut WindowContext, - ) -> Task>> { - Task::ready(Ok(Vec::new())) - } - - fn run( - self: Arc, - _arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, - _workspace: WeakView, - _delegate: Option>, - cx: &mut WindowContext, - ) -> Task { - let prompt_builder = self.prompt_builder.clone(); - cx.spawn(|_cx| async move { - let text = prompt_builder.generate_workflow_prompt()?; - let range = 0..text.len(); - - Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::Route, - label: "Workflow".into(), - metadata: None, - }], - run_commands_in_text: false, - } - .to_event_stream()) - }) - } -} From 9d12308d06edffbf60459b06b7ce1a5cf6628dd8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 29 Oct 2024 14:07:54 -0400 Subject: [PATCH 56/87] Fix Julia icon extension lookup (#19916) Release Notes: - Fixed a bug where the Julia icon was not displayed for Julia files. --- assets/icons/file_icons/file_types.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json index ce5944e097..fe293256b3 100644 --- a/assets/icons/file_icons/file_types.json +++ b/assets/icons/file_icons/file_types.json @@ -84,6 +84,7 @@ "j2k": "image", "java": "java", "jfif": "image", + "jl": "julia", "jp2": "image", "jpeg": "image", "jpg": "image", @@ -91,7 +92,6 @@ "json": "storage", "jsonc": "storage", "jsx": "react", - "julia": "julia", "jxl": "image", "kt": "kotlin", "ldf": "storage", From 6ea46623260f42587e1c4991bb8a335237a983de Mon Sep 17 00:00:00 2001 From: Kyle Kelley Date: Tue, 29 Oct 2024 11:30:07 -0700 Subject: [PATCH 57/87] Initial Notebook UI structure (#19756) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is the start of a notebook UI for Zed. `🔔 Note: This won't be useable yet when it is merged! Read below. 🔔` This is going to be behind a feature flag so that we can merge this initial PR and then make follow up PRs. Release notes will be produced in a future PR. Minimum checklist for merging this: * [x] All functionality behind the `notebooks` feature flag (with env var opt out) * [x] Open notebook files in the workspace * [x] Remove the "Open Notebook" button from title bar * [x] Incorporate text style refinements for cell editors * [x] Rely on `nbformat` crate for parsing the notebook into our in-memory format * [x] Move notebook to a `gpui::List` * [x] Hook up output rendering Release Notes: - N/A --------- Co-authored-by: Nate Butler Co-authored-by: Thorsten Ball --- Cargo.lock | 45 +- Cargo.toml | 3 +- assets/icons/list_x.svg | 7 + crates/feature_flags/src/feature_flags.rs | 6 + crates/repl/Cargo.toml | 3 + crates/repl/src/notebook.rs | 4 + crates/repl/src/notebook/cell.rs | 733 ++++++++++++++++++++++ crates/repl/src/notebook/notebook_ui.rs | 672 ++++++++++++++++++++ crates/repl/src/outputs.rs | 4 +- crates/repl/src/repl.rs | 1 + crates/ui/src/components/icon.rs | 7 + crates/zed/src/main.rs | 1 + crates/zed/src/zed.rs | 1 + 13 files changed, 1478 insertions(+), 9 deletions(-) create mode 100644 assets/icons/list_x.svg create mode 100644 crates/repl/src/notebook.rs create mode 100644 crates/repl/src/notebook/cell.rs create mode 100644 crates/repl/src/notebook/notebook_ui.rs diff --git a/Cargo.lock b/Cargo.lock index c04ec535a6..4d95eee098 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1586,7 +1586,7 @@ dependencies = [ "bitflags 2.6.0", "cexpr", "clang-sys", - "itertools 0.10.5", + "itertools 0.12.1", "lazy_static", "lazycell", "proc-macro2", @@ -5584,7 +5584,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.5.7", "tokio", "tower-service", "tracing", @@ -6154,6 +6154,20 @@ dependencies = [ "simple_asn1", ] +[[package]] +name = "jupyter-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a444fb3f87ee6885eb316028cc998c7d84811663ef95d78c419419423d5a054" +dependencies = [ + "anyhow", + "chrono", + "serde", + "serde_json", + "thiserror", + "uuid", +] + [[package]] name = "khronos-egl" version = "6.0.0" @@ -6474,7 +6488,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -7137,6 +7151,21 @@ dependencies = [ "tempfile", ] +[[package]] +name = "nbformat" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146074ad45cab20f5d98ccded164826158471f21d04f96e40b9872529e10979d" +dependencies = [ + "anyhow", + "chrono", + "jupyter-serde", + "serde", + "serde_json", + "thiserror", + "uuid", +] + [[package]] name = "ndk" version = "0.8.0" @@ -9579,6 +9608,7 @@ dependencies = [ "command_palette_hooks", "editor", "env_logger 0.11.5", + "feature_flags", "futures 0.3.30", "gpui", "http_client", @@ -9588,7 +9618,9 @@ dependencies = [ "languages", "log", "markdown_preview", + "menu", "multi_buffer", + "nbformat", "project", "runtimelib", "schemars", @@ -9927,9 +9959,9 @@ dependencies = [ [[package]] name = "runtimelib" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7d76d28b882a7b889ebb04e79bc2b160b3061821ea596ff0f4a838fc7a76db0" +checksum = "263588fe9593333c4bfde258c9021fc64e766ea434e070c6b67c7100536d6499" dependencies = [ "anyhow", "async-dispatcher", @@ -9941,6 +9973,7 @@ dependencies = [ "dirs 5.0.1", "futures 0.3.30", "glob", + "jupyter-serde", "rand 0.8.5", "ring 0.17.8", "serde", @@ -14126,7 +14159,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 0697cc0c0b..e269dd99ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -371,6 +371,7 @@ linkify = "0.10.0" log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } markup5ever_rcdom = "0.3.0" nanoid = "0.4" +nbformat = "0.3.1" nix = "0.29" num-format = "0.4.4" once_cell = "1.19.0" @@ -402,7 +403,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f "stream", ] } rsa = "0.9.6" -runtimelib = { version = "0.15", default-features = false, features = [ +runtimelib = { version = "0.16.0", default-features = false, features = [ "async-dispatcher-runtime", ] } rustc-demangle = "0.1.23" diff --git a/assets/icons/list_x.svg b/assets/icons/list_x.svg new file mode 100644 index 0000000000..683f38ab5d --- /dev/null +++ b/assets/icons/list_x.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index fb4e192023..286acdfc98 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -59,6 +59,12 @@ impl FeatureFlag for ZedPro { const NAME: &'static str = "zed-pro"; } +pub struct NotebookFeatureFlag; + +impl FeatureFlag for NotebookFeatureFlag { + const NAME: &'static str = "notebooks"; +} + pub struct AutoCommand {} impl FeatureFlag for AutoCommand { const NAME: &'static str = "auto-command"; diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index 001bf157d5..f035878d33 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -21,13 +21,16 @@ client.workspace = true collections.workspace = true command_palette_hooks.workspace = true editor.workspace = true +feature_flags.workspace = true futures.workspace = true gpui.workspace = true image.workspace = true language.workspace = true log.workspace = true markdown_preview.workspace = true +menu.workspace = true multi_buffer.workspace = true +nbformat.workspace = true project.workspace = true runtimelib.workspace = true schemars.workspace = true diff --git a/crates/repl/src/notebook.rs b/crates/repl/src/notebook.rs new file mode 100644 index 0000000000..9c6738f799 --- /dev/null +++ b/crates/repl/src/notebook.rs @@ -0,0 +1,4 @@ +mod cell; +mod notebook_ui; +pub use cell::*; +pub use notebook_ui::*; diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs new file mode 100644 index 0000000000..bb6b6fbf38 --- /dev/null +++ b/crates/repl/src/notebook/cell.rs @@ -0,0 +1,733 @@ +#![allow(unused, dead_code)] +use std::sync::Arc; + +use editor::{Editor, EditorMode, MultiBuffer}; +use futures::future::Shared; +use gpui::{prelude::*, AppContext, Hsla, Task, TextStyleRefinement, View}; +use language::{Buffer, Language, LanguageRegistry}; +use markdown_preview::{markdown_parser::parse_markdown, markdown_renderer::render_markdown_block}; +use nbformat::v4::{CellId, CellMetadata, CellType}; +use settings::Settings as _; +use theme::ThemeSettings; +use ui::{prelude::*, IconButtonShape}; +use util::ResultExt; + +use crate::{ + notebook::{CODE_BLOCK_INSET, GUTTER_WIDTH}, + outputs::{plain::TerminalOutput, user_error::ErrorView, Output}, +}; + +#[derive(Copy, Clone, PartialEq, PartialOrd)] +pub enum CellPosition { + First, + Middle, + Last, +} + +pub enum CellControlType { + RunCell, + RerunCell, + ClearCell, + CellOptions, + CollapseCell, + ExpandCell, +} + +impl CellControlType { + fn icon_name(&self) -> IconName { + match self { + CellControlType::RunCell => IconName::Play, + CellControlType::RerunCell => IconName::ArrowCircle, + CellControlType::ClearCell => IconName::ListX, + CellControlType::CellOptions => IconName::Ellipsis, + CellControlType::CollapseCell => IconName::ChevronDown, + CellControlType::ExpandCell => IconName::ChevronRight, + } + } +} + +pub struct CellControl { + button: IconButton, +} + +impl CellControl { + fn new(id: impl Into, control_type: CellControlType) -> Self { + let icon_name = control_type.icon_name(); + let id = id.into(); + let button = IconButton::new(id, icon_name) + .icon_size(IconSize::Small) + .shape(IconButtonShape::Square); + Self { button } + } +} + +impl Clickable for CellControl { + fn on_click(self, handler: impl Fn(&gpui::ClickEvent, &mut WindowContext) + 'static) -> Self { + let button = self.button.on_click(handler); + Self { button } + } + + fn cursor_style(self, _cursor_style: gpui::CursorStyle) -> Self { + self + } +} + +/// A notebook cell +#[derive(Clone)] +pub enum Cell { + Code(View), + Markdown(View), + Raw(View), +} + +fn convert_outputs(outputs: &Vec, cx: &mut WindowContext) -> Vec { + outputs + .into_iter() + .map(|output| match output { + nbformat::v4::Output::Stream { text, .. } => Output::Stream { + content: cx.new_view(|cx| TerminalOutput::from(&text.0, cx)), + }, + nbformat::v4::Output::DisplayData(display_data) => { + Output::new(&display_data.data, None, cx) + } + nbformat::v4::Output::ExecuteResult(execute_result) => { + Output::new(&execute_result.data, None, cx) + } + nbformat::v4::Output::Error(error) => Output::ErrorOutput(ErrorView { + ename: error.ename.clone(), + evalue: error.evalue.clone(), + traceback: cx.new_view(|cx| TerminalOutput::from(&error.traceback.join("\n"), cx)), + }), + }) + .collect() +} + +impl Cell { + pub fn load( + cell: &nbformat::v4::Cell, + languages: &Arc, + notebook_language: Shared>>>, + cx: &mut WindowContext, + ) -> Self { + match cell { + nbformat::v4::Cell::Markdown { + id, + metadata, + source, + attachments: _, + } => { + let source = source.join(""); + + let view = cx.new_view(|cx| { + let markdown_parsing_task = { + let languages = languages.clone(); + let source = source.clone(); + + cx.spawn(|this, mut cx| async move { + let parsed_markdown = cx + .background_executor() + .spawn(async move { + parse_markdown(&source, None, Some(languages)).await + }) + .await; + + this.update(&mut cx, |cell: &mut MarkdownCell, _| { + cell.parsed_markdown = Some(parsed_markdown); + }) + .log_err(); + }) + }; + + MarkdownCell { + markdown_parsing_task, + languages: languages.clone(), + id: id.clone(), + metadata: metadata.clone(), + source: source.clone(), + parsed_markdown: None, + selected: false, + cell_position: None, + } + }); + + Cell::Markdown(view) + } + nbformat::v4::Cell::Code { + id, + metadata, + execution_count, + source, + outputs, + } => Cell::Code(cx.new_view(|cx| { + let text = source.join(""); + + let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx)); + let multi_buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + + let editor_view = cx.new_view(|cx| { + let mut editor = Editor::new( + EditorMode::AutoHeight { max_lines: 1024 }, + multi_buffer, + None, + false, + cx, + ); + + let theme = ThemeSettings::get_global(cx); + + let refinement = TextStyleRefinement { + font_family: Some(theme.buffer_font.family.clone()), + font_size: Some(theme.buffer_font_size.into()), + color: Some(cx.theme().colors().editor_foreground), + background_color: Some(gpui::transparent_black()), + ..Default::default() + }; + + editor.set_text(text, cx); + editor.set_show_gutter(false, cx); + editor.set_text_style_refinement(refinement); + + // editor.set_read_only(true); + editor + }); + + let buffer = buffer.clone(); + let language_task = cx.spawn(|this, mut cx| async move { + let language = notebook_language.await; + + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(language.clone(), cx); + }); + }); + + CodeCell { + id: id.clone(), + metadata: metadata.clone(), + execution_count: *execution_count, + source: source.join(""), + editor: editor_view, + outputs: convert_outputs(outputs, cx), + selected: false, + language_task, + cell_position: None, + } + })), + nbformat::v4::Cell::Raw { + id, + metadata, + source, + } => Cell::Raw(cx.new_view(|_| RawCell { + id: id.clone(), + metadata: metadata.clone(), + source: source.join(""), + selected: false, + cell_position: None, + })), + } + } +} + +pub trait RenderableCell: Render { + const CELL_TYPE: CellType; + + fn id(&self) -> &CellId; + fn cell_type(&self) -> CellType; + fn metadata(&self) -> &CellMetadata; + fn source(&self) -> &String; + fn selected(&self) -> bool; + fn set_selected(&mut self, selected: bool) -> &mut Self; + fn selected_bg_color(&self, cx: &ViewContext) -> Hsla { + if self.selected() { + let mut color = cx.theme().colors().icon_accent; + color.fade_out(0.9); + color + } else { + // TODO: this is wrong + cx.theme().colors().tab_bar_background + } + } + fn control(&self, _cx: &ViewContext) -> Option { + None + } + + fn cell_position_spacer( + &self, + is_first: bool, + cx: &ViewContext, + ) -> Option { + let cell_position = self.cell_position(); + + if (cell_position == Some(&CellPosition::First) && is_first) + || (cell_position == Some(&CellPosition::Last) && !is_first) + { + Some(div().flex().w_full().h(Spacing::XLarge.px(cx))) + } else { + None + } + } + + fn gutter(&self, cx: &ViewContext) -> impl IntoElement { + let is_selected = self.selected(); + + div() + .relative() + .h_full() + .w(px(GUTTER_WIDTH)) + .child( + div() + .w(px(GUTTER_WIDTH)) + .flex() + .flex_none() + .justify_center() + .h_full() + .child( + div() + .flex_none() + .w(px(1.)) + .h_full() + .when(is_selected, |this| this.bg(cx.theme().colors().icon_accent)) + .when(!is_selected, |this| this.bg(cx.theme().colors().border)), + ), + ) + .when_some(self.control(cx), |this, control| { + this.child( + div() + .absolute() + .top(px(CODE_BLOCK_INSET - 2.0)) + .left_0() + .flex() + .flex_none() + .w(px(GUTTER_WIDTH)) + .h(px(GUTTER_WIDTH + 12.0)) + .items_center() + .justify_center() + .bg(cx.theme().colors().tab_bar_background) + .child(control.button), + ) + }) + } + + fn cell_position(&self) -> Option<&CellPosition>; + fn set_cell_position(&mut self, position: CellPosition) -> &mut Self; +} + +pub trait RunnableCell: RenderableCell { + fn execution_count(&self) -> Option; + fn set_execution_count(&mut self, count: i32) -> &mut Self; + fn run(&mut self, cx: &mut ViewContext) -> (); +} + +pub struct MarkdownCell { + id: CellId, + metadata: CellMetadata, + source: String, + parsed_markdown: Option, + markdown_parsing_task: Task<()>, + selected: bool, + cell_position: Option, + languages: Arc, +} + +impl RenderableCell for MarkdownCell { + const CELL_TYPE: CellType = CellType::Markdown; + + fn id(&self) -> &CellId { + &self.id + } + + fn cell_type(&self) -> CellType { + CellType::Markdown + } + + fn metadata(&self) -> &CellMetadata { + &self.metadata + } + + fn source(&self) -> &String { + &self.source + } + + fn selected(&self) -> bool { + self.selected + } + + fn set_selected(&mut self, selected: bool) -> &mut Self { + self.selected = selected; + self + } + + fn control(&self, _: &ViewContext) -> Option { + None + } + + fn cell_position(&self) -> Option<&CellPosition> { + self.cell_position.as_ref() + } + + fn set_cell_position(&mut self, cell_position: CellPosition) -> &mut Self { + self.cell_position = Some(cell_position); + self + } +} + +impl Render for MarkdownCell { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let Some(parsed) = self.parsed_markdown.as_ref() else { + return div(); + }; + + let mut markdown_render_context = + markdown_preview::markdown_renderer::RenderContext::new(None, cx); + + v_flex() + .size_full() + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(true, cx)) + .child( + h_flex() + .w_full() + .pr_6() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter(cx)) + .child( + v_flex() + .size_full() + .flex_1() + .p_3() + .font_ui(cx) + .text_size(TextSize::Default.rems(cx)) + // + .children(parsed.children.iter().map(|child| { + div().relative().child(div().relative().child( + render_markdown_block(child, &mut markdown_render_context), + )) + })), + ), + ) + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(false, cx)) + } +} + +pub struct CodeCell { + id: CellId, + metadata: CellMetadata, + execution_count: Option, + source: String, + editor: View, + outputs: Vec, + selected: bool, + cell_position: Option, + language_task: Task<()>, +} + +impl CodeCell { + pub fn is_dirty(&self, cx: &AppContext) -> bool { + self.editor.read(cx).buffer().read(cx).is_dirty(cx) + } + pub fn has_outputs(&self) -> bool { + !self.outputs.is_empty() + } + + pub fn clear_outputs(&mut self) { + self.outputs.clear(); + } + + fn output_control(&self) -> Option { + if self.has_outputs() { + Some(CellControlType::ClearCell) + } else { + None + } + } + + pub fn gutter_output(&self, cx: &ViewContext) -> impl IntoElement { + let is_selected = self.selected(); + + div() + .relative() + .h_full() + .w(px(GUTTER_WIDTH)) + .child( + div() + .w(px(GUTTER_WIDTH)) + .flex() + .flex_none() + .justify_center() + .h_full() + .child( + div() + .flex_none() + .w(px(1.)) + .h_full() + .when(is_selected, |this| this.bg(cx.theme().colors().icon_accent)) + .when(!is_selected, |this| this.bg(cx.theme().colors().border)), + ), + ) + .when(self.has_outputs(), |this| { + this.child( + div() + .absolute() + .top(px(CODE_BLOCK_INSET - 2.0)) + .left_0() + .flex() + .flex_none() + .w(px(GUTTER_WIDTH)) + .h(px(GUTTER_WIDTH + 12.0)) + .items_center() + .justify_center() + .bg(cx.theme().colors().tab_bar_background) + .child(IconButton::new("control", IconName::Ellipsis)), + ) + }) + } +} + +impl RenderableCell for CodeCell { + const CELL_TYPE: CellType = CellType::Code; + + fn id(&self) -> &CellId { + &self.id + } + + fn cell_type(&self) -> CellType { + CellType::Code + } + + fn metadata(&self) -> &CellMetadata { + &self.metadata + } + + fn source(&self) -> &String { + &self.source + } + + fn control(&self, cx: &ViewContext) -> Option { + let cell_control = if self.has_outputs() { + CellControl::new("rerun-cell", CellControlType::RerunCell) + } else { + CellControl::new("run-cell", CellControlType::RunCell) + .on_click(cx.listener(move |this, _, cx| this.run(cx))) + }; + + Some(cell_control) + } + + fn selected(&self) -> bool { + self.selected + } + + fn set_selected(&mut self, selected: bool) -> &mut Self { + self.selected = selected; + self + } + + fn cell_position(&self) -> Option<&CellPosition> { + self.cell_position.as_ref() + } + + fn set_cell_position(&mut self, cell_position: CellPosition) -> &mut Self { + self.cell_position = Some(cell_position); + self + } +} + +impl RunnableCell for CodeCell { + fn run(&mut self, cx: &mut ViewContext) { + println!("Running code cell: {}", self.id); + } + + fn execution_count(&self) -> Option { + self.execution_count + .and_then(|count| if count > 0 { Some(count) } else { None }) + } + + fn set_execution_count(&mut self, count: i32) -> &mut Self { + self.execution_count = Some(count); + self + } +} + +impl Render for CodeCell { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let lines = self.source.lines().count(); + let height = lines as f32 * cx.line_height(); + + v_flex() + .size_full() + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(true, cx)) + // Editor portion + .child( + h_flex() + .w_full() + .pr_6() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter(cx)) + .child( + div().py_1p5().w_full().child( + div() + .flex() + .size_full() + .flex_1() + .py_3() + .px_5() + .rounded_lg() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_background) + .child(div().h(height).w_full().child(self.editor.clone())), + ), + ), + ) + // Output portion + .child( + h_flex() + .w_full() + .pr_6() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter_output(cx)) + .child( + div().py_1p5().w_full().child( + div() + .flex() + .size_full() + .flex_1() + .py_3() + .px_5() + .rounded_lg() + .border_1() + // .border_color(cx.theme().colors().border) + // .bg(cx.theme().colors().editor_background) + .child(div().w_full().children(self.outputs.iter().map( + |output| { + let content = match output { + Output::Plain { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Markdown { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Stream { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Image { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Message(message) => Some( + div().child(message.clone()).into_any_element(), + ), + Output::Table { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::ErrorOutput(error_view) => { + error_view.render(cx) + } + Output::ClearOutputWaitMarker => None, + }; + + div() + // .w_full() + // .mt_3() + // .p_3() + // .rounded_md() + // .bg(cx.theme().colors().editor_background) + // .border(px(1.)) + // .border_color(cx.theme().colors().border) + // .shadow_sm() + .children(content) + }, + ))), + ), + ), + ) + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(false, cx)) + } +} + +pub struct RawCell { + id: CellId, + metadata: CellMetadata, + source: String, + selected: bool, + cell_position: Option, +} + +impl RenderableCell for RawCell { + const CELL_TYPE: CellType = CellType::Raw; + + fn id(&self) -> &CellId { + &self.id + } + + fn cell_type(&self) -> CellType { + CellType::Raw + } + + fn metadata(&self) -> &CellMetadata { + &self.metadata + } + + fn source(&self) -> &String { + &self.source + } + + fn selected(&self) -> bool { + self.selected + } + + fn set_selected(&mut self, selected: bool) -> &mut Self { + self.selected = selected; + self + } + + fn cell_position(&self) -> Option<&CellPosition> { + self.cell_position.as_ref() + } + + fn set_cell_position(&mut self, cell_position: CellPosition) -> &mut Self { + self.cell_position = Some(cell_position); + self + } +} + +impl Render for RawCell { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .size_full() + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(true, cx)) + .child( + h_flex() + .w_full() + .pr_2() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter(cx)) + .child( + div() + .flex() + .size_full() + .flex_1() + .p_3() + .font_ui(cx) + .text_size(TextSize::Default.rems(cx)) + .child(self.source.clone()), + ), + ) + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(false, cx)) + } +} diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs new file mode 100644 index 0000000000..36d6e29385 --- /dev/null +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -0,0 +1,672 @@ +#![allow(unused, dead_code)] +use std::{path::PathBuf, sync::Arc}; + +use client::proto::ViewId; +use collections::HashMap; +use feature_flags::{FeatureFlagAppExt as _, NotebookFeatureFlag}; +use futures::FutureExt; +use gpui::{ + actions, list, prelude::*, AppContext, EventEmitter, FocusHandle, FocusableView, + ListScrollEvent, ListState, Model, Task, +}; +use language::LanguageRegistry; +use project::{Project, ProjectEntryId, ProjectPath}; +use ui::{prelude::*, Tooltip}; +use workspace::item::ItemEvent; +use workspace::{Item, ItemHandle, ProjectItem, ToolbarItemLocation}; +use workspace::{ToolbarItemEvent, ToolbarItemView}; + +use super::{Cell, CellPosition, RenderableCell}; + +use nbformat::v4::CellId; +use nbformat::v4::Metadata as NotebookMetadata; + +pub(crate) const DEFAULT_NOTEBOOK_FORMAT: i32 = 4; +pub(crate) const DEFAULT_NOTEBOOK_FORMAT_MINOR: i32 = 0; + +actions!( + notebook, + [ + OpenNotebook, + RunAll, + ClearOutputs, + MoveCellUp, + MoveCellDown, + AddMarkdownBlock, + AddCodeBlock, + ] +); + +pub(crate) const MAX_TEXT_BLOCK_WIDTH: f32 = 9999.0; +pub(crate) const SMALL_SPACING_SIZE: f32 = 8.0; +pub(crate) const MEDIUM_SPACING_SIZE: f32 = 12.0; +pub(crate) const LARGE_SPACING_SIZE: f32 = 16.0; +pub(crate) const GUTTER_WIDTH: f32 = 19.0; +pub(crate) const CODE_BLOCK_INSET: f32 = MEDIUM_SPACING_SIZE; +pub(crate) const CONTROL_SIZE: f32 = 20.0; + +pub fn init(cx: &mut AppContext) { + if cx.has_flag::() || std::env::var("LOCAL_NOTEBOOK_DEV").is_ok() { + workspace::register_project_item::(cx); + } + + cx.observe_flag::({ + move |is_enabled, cx| { + if is_enabled { + workspace::register_project_item::(cx); + } else { + // todo: there is no way to unregister a project item, so if the feature flag + // gets turned off they need to restart Zed. + } + } + }) + .detach(); +} + +pub struct NotebookEditor { + languages: Arc, + + focus_handle: FocusHandle, + project: Model, + path: ProjectPath, + + remote_id: Option, + cell_list: ListState, + + metadata: NotebookMetadata, + nbformat: i32, + nbformat_minor: i32, + selected_cell_index: usize, + cell_order: Vec, + cell_map: HashMap, +} + +impl NotebookEditor { + pub fn new( + project: Model, + notebook_item: Model, + cx: &mut ViewContext, + ) -> Self { + let focus_handle = cx.focus_handle(); + + let notebook = notebook_item.read(cx).notebook.clone(); + + let languages = project.read(cx).languages().clone(); + + let metadata = notebook.metadata; + let nbformat = notebook.nbformat; + let nbformat_minor = notebook.nbformat_minor; + + let language_name = metadata + .language_info + .as_ref() + .map(|l| l.name.clone()) + .or(metadata + .kernelspec + .as_ref() + .and_then(|spec| spec.language.clone())); + + let notebook_language = if let Some(language_name) = language_name { + cx.spawn(|_, _| { + let languages = languages.clone(); + async move { languages.language_for_name(&language_name).await.ok() } + }) + .shared() + } else { + Task::ready(None).shared() + }; + + let languages = project.read(cx).languages().clone(); + let notebook_language = cx + .spawn(|_, _| { + // todo: pull from notebook metadata + const TODO: &'static str = "Python"; + let languages = languages.clone(); + async move { languages.language_for_name(TODO).await.ok() } + }) + .shared(); + + let mut cell_order = vec![]; + let mut cell_map = HashMap::default(); + + for (index, cell) in notebook.cells.iter().enumerate() { + let cell_id = cell.id(); + cell_order.push(cell_id.clone()); + cell_map.insert( + cell_id.clone(), + Cell::load(cell, &languages, notebook_language.clone(), cx), + ); + } + + let view = cx.view().downgrade(); + let cell_count = cell_order.len(); + let cell_order_for_list = cell_order.clone(); + let cell_map_for_list = cell_map.clone(); + + let cell_list = ListState::new( + cell_count, + gpui::ListAlignment::Top, + // TODO: This is a totally random number, + // not sure what this should be + px(3000.), + move |ix, cx| { + let cell_order_for_list = cell_order_for_list.clone(); + let cell_id = cell_order_for_list[ix].clone(); + if let Some(view) = view.upgrade() { + let cell_id = cell_id.clone(); + if let Some(cell) = cell_map_for_list.clone().get(&cell_id) { + view.update(cx, |view, cx| { + view.render_cell(ix, cell, cx).into_any_element() + }) + } else { + div().into_any() + } + } else { + div().into_any() + } + }, + ); + + Self { + languages: languages.clone(), + focus_handle, + project, + path: notebook_item.read(cx).project_path.clone(), + remote_id: None, + cell_list, + selected_cell_index: 0, + metadata, + nbformat, + nbformat_minor, + cell_order: cell_order.clone(), + cell_map: cell_map.clone(), + } + } + + fn has_outputs(&self, cx: &ViewContext) -> bool { + self.cell_map.values().any(|cell| { + if let Cell::Code(code_cell) = cell { + code_cell.read(cx).has_outputs() + } else { + false + } + }) + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.cell_map.values().any(|cell| { + if let Cell::Code(code_cell) = cell { + code_cell.read(cx).is_dirty(cx) + } else { + false + } + }) + } + + fn clear_outputs(&mut self, cx: &mut ViewContext) { + for cell in self.cell_map.values() { + if let Cell::Code(code_cell) = cell { + code_cell.update(cx, |cell, _cx| { + cell.clear_outputs(); + }); + } + } + } + + fn run_cells(&mut self, cx: &mut ViewContext) { + println!("Cells would all run here, if that was implemented!"); + } + + fn open_notebook(&mut self, _: &OpenNotebook, _cx: &mut ViewContext) { + println!("Open notebook triggered"); + } + + fn move_cell_up(&mut self, cx: &mut ViewContext) { + println!("Move cell up triggered"); + } + + fn move_cell_down(&mut self, cx: &mut ViewContext) { + println!("Move cell down triggered"); + } + + fn add_markdown_block(&mut self, cx: &mut ViewContext) { + println!("Add markdown block triggered"); + } + + fn add_code_block(&mut self, cx: &mut ViewContext) { + println!("Add code block triggered"); + } + + fn cell_count(&self) -> usize { + self.cell_map.len() + } + + fn selected_index(&self) -> usize { + self.selected_cell_index + } + + pub fn set_selected_index( + &mut self, + index: usize, + jump_to_index: bool, + cx: &mut ViewContext, + ) { + // let previous_index = self.selected_cell_index; + self.selected_cell_index = index; + let current_index = self.selected_cell_index; + + // in the future we may have some `on_cell_change` event that we want to fire here + + if jump_to_index { + self.jump_to_cell(current_index, cx); + } + } + + pub fn select_next(&mut self, _: &menu::SelectNext, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + let index = self.selected_index(); + let ix = if index == count - 1 { + count - 1 + } else { + index + 1 + }; + self.set_selected_index(ix, true, cx); + cx.notify(); + } + } + + pub fn select_previous(&mut self, _: &menu::SelectPrev, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + let index = self.selected_index(); + let ix = if index == 0 { 0 } else { index - 1 }; + self.set_selected_index(ix, true, cx); + cx.notify(); + } + } + + pub fn select_first(&mut self, _: &menu::SelectFirst, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + self.set_selected_index(0, true, cx); + cx.notify(); + } + } + + pub fn select_last(&mut self, _: &menu::SelectLast, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + self.set_selected_index(count - 1, true, cx); + cx.notify(); + } + } + + fn jump_to_cell(&mut self, index: usize, _cx: &mut ViewContext) { + self.cell_list.scroll_to_reveal_item(index); + } + + fn button_group(cx: &ViewContext) -> Div { + v_flex() + .gap(Spacing::Small.rems(cx)) + .items_center() + .w(px(CONTROL_SIZE + 4.0)) + .overflow_hidden() + .rounded(px(5.)) + .bg(cx.theme().colors().title_bar_background) + .p_px() + .border_1() + .border_color(cx.theme().colors().border) + } + + fn render_notebook_control( + id: impl Into, + icon: IconName, + _cx: &ViewContext, + ) -> IconButton { + let id: ElementId = ElementId::Name(id.into()); + IconButton::new(id, icon).width(px(CONTROL_SIZE).into()) + } + + fn render_notebook_controls(&self, cx: &ViewContext) -> impl IntoElement { + let has_outputs = self.has_outputs(cx); + + v_flex() + .max_w(px(CONTROL_SIZE + 4.0)) + .items_center() + .gap(Spacing::XXLarge.rems(cx)) + .justify_between() + .flex_none() + .h_full() + .py(Spacing::XLarge.px(cx)) + .child( + v_flex() + .gap(Spacing::Large.rems(cx)) + .child( + Self::button_group(cx) + .child( + Self::render_notebook_control("run-all-cells", IconName::Play, cx) + .tooltip(move |cx| { + Tooltip::for_action("Execute all cells", &RunAll, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(RunAll)); + }), + ) + .child( + Self::render_notebook_control( + "clear-all-outputs", + IconName::ListX, + cx, + ) + .disabled(!has_outputs) + .tooltip(move |cx| { + Tooltip::for_action("Clear all outputs", &ClearOutputs, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(ClearOutputs)); + }), + ), + ) + .child( + Self::button_group(cx) + .child( + Self::render_notebook_control( + "move-cell-up", + IconName::ArrowUp, + cx, + ) + .tooltip(move |cx| { + Tooltip::for_action("Move cell up", &MoveCellUp, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(MoveCellUp)); + }), + ) + .child( + Self::render_notebook_control( + "move-cell-down", + IconName::ArrowDown, + cx, + ) + .tooltip(move |cx| { + Tooltip::for_action("Move cell down", &MoveCellDown, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(MoveCellDown)); + }), + ), + ) + .child( + Self::button_group(cx) + .child( + Self::render_notebook_control( + "new-markdown-cell", + IconName::Plus, + cx, + ) + .tooltip(move |cx| { + Tooltip::for_action("Add markdown block", &AddMarkdownBlock, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(AddMarkdownBlock)); + }), + ) + .child( + Self::render_notebook_control("new-code-cell", IconName::Code, cx) + .tooltip(move |cx| { + Tooltip::for_action("Add code block", &AddCodeBlock, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(AddCodeBlock)); + }), + ), + ), + ) + .child( + v_flex() + .gap(Spacing::Large.rems(cx)) + .items_center() + .child(Self::render_notebook_control( + "more-menu", + IconName::Ellipsis, + cx, + )) + .child( + Self::button_group(cx) + .child(IconButton::new("repl", IconName::ReplNeutral)), + ), + ) + } + + fn cell_position(&self, index: usize) -> CellPosition { + match index { + 0 => CellPosition::First, + index if index == self.cell_count() - 1 => CellPosition::Last, + _ => CellPosition::Middle, + } + } + + fn render_cell( + &self, + index: usize, + cell: &Cell, + cx: &mut ViewContext, + ) -> impl IntoElement { + let cell_position = self.cell_position(index); + + let is_selected = index == self.selected_cell_index; + + match cell { + Cell::Code(cell) => { + cell.update(cx, |cell, _cx| { + cell.set_selected(is_selected) + .set_cell_position(cell_position); + }); + cell.clone().into_any_element() + } + Cell::Markdown(cell) => { + cell.update(cx, |cell, _cx| { + cell.set_selected(is_selected) + .set_cell_position(cell_position); + }); + cell.clone().into_any_element() + } + Cell::Raw(cell) => { + cell.update(cx, |cell, _cx| { + cell.set_selected(is_selected) + .set_cell_position(cell_position); + }); + cell.clone().into_any_element() + } + } + } +} + +impl Render for NotebookEditor { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .key_context("notebook") + .track_focus(&self.focus_handle) + .on_action(cx.listener(|this, &OpenNotebook, cx| this.open_notebook(&OpenNotebook, cx))) + .on_action(cx.listener(|this, &ClearOutputs, cx| this.clear_outputs(cx))) + .on_action(cx.listener(|this, &RunAll, cx| this.run_cells(cx))) + .on_action(cx.listener(|this, &MoveCellUp, cx| this.move_cell_up(cx))) + .on_action(cx.listener(|this, &MoveCellDown, cx| this.move_cell_down(cx))) + .on_action(cx.listener(|this, &AddMarkdownBlock, cx| this.add_markdown_block(cx))) + .on_action(cx.listener(|this, &AddCodeBlock, cx| this.add_code_block(cx))) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .flex() + .items_start() + .size_full() + .overflow_hidden() + .px(Spacing::XLarge.px(cx)) + .gap(Spacing::XLarge.px(cx)) + .bg(cx.theme().colors().tab_bar_background) + .child( + v_flex() + .id("notebook-cells") + .flex_1() + .size_full() + .overflow_y_scroll() + .child(list(self.cell_list.clone()).size_full()), + ) + .child(self.render_notebook_controls(cx)) + } +} + +impl FocusableView for NotebookEditor { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +pub struct NotebookItem { + path: PathBuf, + project_path: ProjectPath, + notebook: nbformat::v4::Notebook, +} + +impl project::Item for NotebookItem { + fn try_open( + project: &Model, + path: &ProjectPath, + cx: &mut AppContext, + ) -> Option>>> { + let path = path.clone(); + let project = project.clone(); + + if path.path.extension().unwrap_or_default() == "ipynb" { + Some(cx.spawn(|mut cx| async move { + let abs_path = project + .read_with(&cx, |project, cx| project.absolute_path(&path, cx))? + .ok_or_else(|| anyhow::anyhow!("Failed to find the absolute path"))?; + + let file_content = std::fs::read_to_string(abs_path.clone())?; + let notebook = nbformat::parse_notebook(&file_content); + + let notebook = match notebook { + Ok(nbformat::Notebook::V4(notebook)) => notebook, + Ok(nbformat::Notebook::Legacy(legacy_notebook)) => { + // todo!(): Decide if we want to mutate the notebook by including Cell IDs + // and any other conversions + let notebook = nbformat::upgrade_legacy_notebook(legacy_notebook)?; + notebook + } + Err(e) => { + anyhow::bail!("Failed to parse notebook: {:?}", e); + } + }; + + cx.new_model(|_| NotebookItem { + path: abs_path, + project_path: path, + notebook, + }) + })) + } else { + None + } + } + + fn entry_id(&self, _: &AppContext) -> Option { + None + } + + fn project_path(&self, _: &AppContext) -> Option { + Some(self.project_path.clone()) + } +} + +impl EventEmitter<()> for NotebookEditor {} + +// pub struct NotebookControls { +// pane_focused: bool, +// active_item: Option>, +// // subscription: Option, +// } + +// impl NotebookControls { +// pub fn new() -> Self { +// Self { +// pane_focused: false, +// active_item: Default::default(), +// // subscription: Default::default(), +// } +// } +// } + +// impl EventEmitter for NotebookControls {} + +// impl Render for NotebookControls { +// fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { +// div().child("notebook controls") +// } +// } + +// impl ToolbarItemView for NotebookControls { +// fn set_active_pane_item( +// &mut self, +// active_pane_item: Option<&dyn workspace::ItemHandle>, +// cx: &mut ViewContext, +// ) -> workspace::ToolbarItemLocation { +// cx.notify(); +// self.active_item = None; + +// let Some(item) = active_pane_item else { +// return ToolbarItemLocation::Hidden; +// }; + +// ToolbarItemLocation::PrimaryLeft +// } + +// fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext) { +// self.pane_focused = pane_focused; +// } +// } + +impl Item for NotebookEditor { + type Event = (); + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + let path = self.path.path.clone(); + + path.file_stem() + .map(|stem| stem.to_string_lossy().into_owned()) + .map(SharedString::from) + } + + fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + Some(IconName::Book.into()) + } + + fn show_toolbar(&self) -> bool { + false + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + // self.is_dirty(cx) + false + } +} + +// TODO: Implement this to allow us to persist to the database, etc: +// impl SerializableItem for NotebookEditor {} + +impl ProjectItem for NotebookEditor { + type Item = NotebookItem; + + fn for_project_item( + project: Model, + item: Model, + cx: &mut ViewContext, + ) -> Self + where + Self: Sized, + { + Self::new(project, item, cx) + } +} diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index 95fcb98ae3..b705a15568 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -56,7 +56,7 @@ use table::TableView; pub mod plain; use plain::TerminalOutput; -mod user_error; +pub(crate) mod user_error; use user_error::ErrorView; use workspace::Workspace; @@ -201,7 +201,7 @@ impl Output { ) } - fn render( + pub fn render( &self, workspace: WeakView, diff --git a/crates/repl/src/repl.rs b/crates/repl/src/repl.rs index b5b791665b..75a3da6456 100644 --- a/crates/repl/src/repl.rs +++ b/crates/repl/src/repl.rs @@ -1,6 +1,7 @@ mod components; mod jupyter_settings; mod kernels; +pub mod notebook; mod outputs; mod repl_editor; mod repl_sessions_ui; diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index 0727f7ed9d..890476f5fe 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -212,6 +212,7 @@ pub enum IconName { LineHeight, Link, ListTree, + ListX, MagnifyingGlass, MailOpen, Maximize, @@ -291,6 +292,12 @@ pub enum IconName { ZedXCopilot, } +impl From for Icon { + fn from(icon: IconName) -> Self { + Icon::new(icon) + } +} + #[derive(IntoElement)] pub struct Icon { path: SharedString, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 83f30f3e6d..998289f920 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -418,6 +418,7 @@ fn main() { app_state.languages.set_theme(cx.theme().clone()); editor::init(cx); image_viewer::init(cx); + repl::notebook::init(cx); diagnostics::init(cx); audio::init(Assets, cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 7b630489cf..a5621cfbd8 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3505,6 +3505,7 @@ mod tests { app_state.client.telemetry().clone(), cx, ); + repl::notebook::init(cx); tasks_ui::init(cx); initialize_workspace(app_state.clone(), prompt_builder, cx); search::init(cx); From fc5cde943443b0ea700d7e9f9178463e08d39f36 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 29 Oct 2024 12:35:34 -0600 Subject: [PATCH 58/87] Fix quotes in Rust (#19914) Release Notes: - (preview only) Fixed quote-autoclose in Rust --- crates/editor/src/editor.rs | 30 +++++++++++++-------------- crates/languages/src/rust/config.toml | 6 +++--- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 81ee6a01de..40cdf27f2e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3244,9 +3244,21 @@ impl Editor { } if enabled && pair.start.ends_with(text.as_ref()) { - bracket_pair = Some(pair.clone()); - is_bracket_pair_start = true; - break; + let prefix_len = pair.start.len() - text.len(); + let preceding_text_matches_prefix = prefix_len == 0 + || (selection.start.column >= (prefix_len as u32) + && snapshot.contains_str_at( + Point::new( + selection.start.row, + selection.start.column - (prefix_len as u32), + ), + &pair.start[..prefix_len], + )); + if preceding_text_matches_prefix { + bracket_pair = Some(pair.clone()); + is_bracket_pair_start = true; + break; + } } if pair.end.as_str() == text.as_ref() { bracket_pair = Some(pair.clone()); @@ -3263,8 +3275,6 @@ impl Editor { self.use_auto_surround && snapshot_settings.use_auto_surround; if selection.is_empty() { if is_bracket_pair_start { - let prefix_len = bracket_pair.start.len() - text.len(); - // If the inserted text is a suffix of an opening bracket and the // selection is preceded by the rest of the opening bracket, then // insert the closing bracket. @@ -3272,15 +3282,6 @@ impl Editor { .chars_at(selection.start) .next() .map_or(true, |c| scope.should_autoclose_before(c)); - let preceding_text_matches_prefix = prefix_len == 0 - || (selection.start.column >= (prefix_len as u32) - && snapshot.contains_str_at( - Point::new( - selection.start.row, - selection.start.column - (prefix_len as u32), - ), - &bracket_pair.start[..prefix_len], - )); let is_closing_quote = if bracket_pair.end == bracket_pair.start && bracket_pair.start.len() == 1 @@ -3299,7 +3300,6 @@ impl Editor { if autoclose && bracket_pair.close && following_text_allows_autoclose - && preceding_text_matches_prefix && !is_closing_quote { let anchor = snapshot.anchor_before(selection.end); diff --git a/crates/languages/src/rust/config.toml b/crates/languages/src/rust/config.toml index 81b9c1e2d9..96207904f5 100644 --- a/crates/languages/src/rust/config.toml +++ b/crates/languages/src/rust/config.toml @@ -5,9 +5,9 @@ line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, - { start = "r#\"", end = "\"#", close = true, newline = true }, - { start = "r##\"", end = "\"##", close = true, newline = true }, - { start = "r###\"", end = "\"###", close = true, newline = true }, + { start = "r#\"", end = "\"#", close = true, newline = true, not_in = ["string", "comment"] }, + { start = "r##\"", end = "\"##", close = true, newline = true, not_in = ["string", "comment"] }, + { start = "r###\"", end = "\"###", close = true, newline = true, not_in = ["string", "comment"] }, { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, From f3b7f5944d9acf324d8dd93b436bf6e60675c45b Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 29 Oct 2024 12:30:55 -0700 Subject: [PATCH 59/87] Fix a rare crash on startup (#19922) Release Notes: - Fixed a rare crash that could happen when certain SQL statements are prepared --- crates/sqlez/src/statement.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/sqlez/src/statement.rs b/crates/sqlez/src/statement.rs index 462f902239..f1d89919ec 100644 --- a/crates/sqlez/src/statement.rs +++ b/crates/sqlez/src/statement.rs @@ -58,13 +58,13 @@ impl<'a> Statement<'a> { &mut remaining_sql_ptr, ); - remaining_sql = CStr::from_ptr(remaining_sql_ptr); - statement.raw_statements.push(raw_statement); - connection.last_error().with_context(|| { format!("Prepare call failed for query:\n{}", query.as_ref()) })?; + remaining_sql = CStr::from_ptr(remaining_sql_ptr); + statement.raw_statements.push(raw_statement); + if !connection.can_write() && sqlite3_stmt_readonly(raw_statement) == 0 { let sql = CStr::from_ptr(sqlite3_sql(raw_statement)); From 9818835c9d7df477c1a4fa06f70921606a7a3484 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 29 Oct 2024 13:02:21 -0700 Subject: [PATCH 60/87] Fix the log spam from the BlameBuffer request (#19921) Release Notes: - N/A --- crates/editor/src/git/blame.rs | 20 ++++++++------- crates/project/src/buffer_store.rs | 40 ++++++++++++++++++++---------- crates/project/src/project.rs | 13 +--------- crates/proto/proto/zed.proto | 14 ++++++++--- 4 files changed, 49 insertions(+), 38 deletions(-) diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 1ac1345305..9dfc379ae7 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -368,12 +368,15 @@ impl GitBlame { .spawn({ let snapshot = snapshot.clone(); async move { - let Blame { + let Some(Blame { entries, permalinks, messages, remote_url, - } = blame.await?; + }) = blame.await? + else { + return Ok(None); + }; let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row); let commit_details = parse_commit_messages( @@ -385,13 +388,16 @@ impl GitBlame { ) .await; - anyhow::Ok((entries, commit_details)) + anyhow::Ok(Some((entries, commit_details))) } }) .await; this.update(&mut cx, |this, cx| match result { - Ok((entries, commit_details)) => { + Ok(None) => { + // Nothing to do, e.g. no repository found + } + Ok(Some((entries, commit_details))) => { this.buffer_edits = buffer_edits; this.buffer_snapshot = snapshot; this.entries = entries; @@ -410,11 +416,7 @@ impl GitBlame { } else { // If we weren't triggered by a user, we just log errors in the background, instead of sending // notifications. - // Except for `NoRepositoryError`, which can happen often if a user has inline-blame turned on - // and opens a non-git file. - if error.downcast_ref::().is_none() { - log::error!("failed to get git blame data: {error:?}"); - } + log::error!("failed to get git blame data: {error:?}"); } }), }) diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 8948ed6ee7..5d8fb3ab39 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -1,7 +1,7 @@ use crate::{ search::SearchQuery, worktree_store::{WorktreeStore, WorktreeStoreEvent}, - Item, NoRepositoryError, ProjectPath, + Item, ProjectPath, }; use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; use anyhow::{anyhow, Context as _, Result}; @@ -1118,7 +1118,7 @@ impl BufferStore { buffer: &Model, version: Option, cx: &AppContext, - ) -> Task> { + ) -> Task>> { let buffer = buffer.read(cx); let Some(file) = File::from_dyn(buffer.file()) else { return Task::ready(Err(anyhow!("buffer has no file"))); @@ -1130,7 +1130,7 @@ impl BufferStore { let blame_params = maybe!({ let (repo_entry, local_repo_entry) = match worktree.repo_for_path(&file.path) { Some(repo_for_path) => repo_for_path, - None => anyhow::bail!(NoRepositoryError {}), + None => return Ok(None), }; let relative_path = repo_entry @@ -1144,13 +1144,16 @@ impl BufferStore { None => buffer.as_rope().clone(), }; - anyhow::Ok((repo, relative_path, content)) + anyhow::Ok(Some((repo, relative_path, content))) }); cx.background_executor().spawn(async move { - let (repo, relative_path, content) = blame_params?; + let Some((repo, relative_path, content)) = blame_params? else { + return Ok(None); + }; repo.blame(&relative_path, content) .with_context(|| format!("Failed to blame {:?}", relative_path.0)) + .map(Some) }) } Worktree::Remote(worktree) => { @@ -2112,7 +2115,13 @@ fn is_not_found_error(error: &anyhow::Error) -> bool { .is_some_and(|err| err.kind() == io::ErrorKind::NotFound) } -fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBufferResponse { +fn serialize_blame_buffer_response(blame: Option) -> proto::BlameBufferResponse { + let Some(blame) = blame else { + return proto::BlameBufferResponse { + blame_response: None, + }; + }; + let entries = blame .entries .into_iter() @@ -2154,14 +2163,19 @@ fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBuff .collect::>(); proto::BlameBufferResponse { - entries, - messages, - permalinks, - remote_url: blame.remote_url, + blame_response: Some(proto::blame_buffer_response::BlameResponse { + entries, + messages, + permalinks, + remote_url: blame.remote_url, + }), } } -fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> git::blame::Blame { +fn deserialize_blame_buffer_response( + response: proto::BlameBufferResponse, +) -> Option { + let response = response.blame_response?; let entries = response .entries .into_iter() @@ -2202,10 +2216,10 @@ fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> gi }) .collect::>(); - Blame { + Some(Blame { entries, permalinks, messages, remote_url: response.remote_url, - } + }) } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f5a295a3a3..788de66996 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3420,7 +3420,7 @@ impl Project { buffer: &Model, version: Option, cx: &AppContext, - ) -> Task> { + ) -> Task>> { self.buffer_store.read(cx).blame_buffer(buffer, version, cx) } @@ -4273,17 +4273,6 @@ impl Completion { } } -#[derive(Debug)] -pub struct NoRepositoryError {} - -impl std::fmt::Display for NoRepositoryError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "no git repository for worktree found") - } -} - -impl std::error::Error for NoRepositoryError {} - pub fn sort_worktree_entries(entries: &mut [Entry]) { entries.sort_by(|entry_a, entry_b| { compare_paths( diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index d78795eed9..439531ccb3 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -2117,10 +2117,16 @@ message CommitPermalink { } message BlameBufferResponse { - repeated BlameEntry entries = 1; - repeated CommitMessage messages = 2; - repeated CommitPermalink permalinks = 3; - optional string remote_url = 4; + message BlameResponse { + repeated BlameEntry entries = 1; + repeated CommitMessage messages = 2; + repeated CommitPermalink permalinks = 3; + optional string remote_url = 4; + } + + optional BlameResponse blame_response = 5; + + reserved 1 to 4; } message MultiLspQuery { From d310a1269f5da96f6e2fe7cfaa7c27ea3eaf3c7e Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 29 Oct 2024 14:02:32 -0600 Subject: [PATCH 61/87] SSH Remoting: Fix diagnostic summary syncing (#19923) Co-Authored-By: Mikayla Release Notes: - SSH Remoting: Fix diagnostics summary over collab Co-authored-by: Mikayla --- crates/project/src/lsp_store.rs | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index fe39dc0914..6387fc65d2 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4044,6 +4044,20 @@ impl LspStore { .or_default() .insert(server_id, summary); } + if let Some((downstream_client, project_id)) = &this.downstream_client { + downstream_client + .send(proto::UpdateDiagnosticSummary { + project_id: *project_id, + worktree_id: worktree_id.to_proto(), + summary: Some(proto::DiagnosticSummary { + path: project_path.path.to_string_lossy().to_string(), + language_server_id: server_id.0 as u64, + error_count: summary.error_count as u32, + warning_count: summary.warning_count as u32, + }), + }) + .log_err(); + } cx.emit(LspStoreEvent::DiagnosticsUpdated { language_server_id: LanguageServerId(message.language_server_id as usize), path: project_path, @@ -5915,7 +5929,6 @@ impl LspStore { let adapter = adapter.clone(); if let Some(this) = this.upgrade() { adapter.process_diagnostics(&mut params); - // Everything else has to be on the server, Can we make it on the client? this.update(&mut cx, |this, cx| { this.update_diagnostics( server_id, From 5b7fa05a870c3fcf8889f3f485670e47811fed35 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 29 Oct 2024 16:19:05 -0400 Subject: [PATCH 62/87] Make Git remote URL parsing more robust (#19924) This PR improves the parsing of Git remote URLs in order to make features that depend on them more robust. Previously we were just treating these as plain strings and doing one-off shotgun parsing to massage them into the right format. This meant that we weren't accounting for edge cases in URL structure. One of these cases was HTTPS Git URLs containing a username, which can arise when using GitHub Enterprise (see https://github.com/zed-industries/zed/issues/11160). We now have a `RemoteUrl` typed to represent a parsed Git remote URL and use the `Url` parser to parse it. Release Notes: - Improved the parsing of Git remote URLs to support additional scenarios. --- crates/git/src/git.rs | 16 ++-- crates/git/src/hosting_provider.rs | 10 +-- crates/git/src/remote.rs | 85 +++++++++++++++++++ .../src/providers/bitbucket.rs | 53 +++++++----- .../src/providers/codeberg.rs | 49 ++++++----- .../src/providers/gitee.rs | 54 +++++++----- .../src/providers/github.rs | 69 +++++++++------ .../src/providers/gitlab.rs | 64 +++++++------- .../src/providers/sourcehut.rs | 65 ++++++++------ 9 files changed, 304 insertions(+), 161 deletions(-) create mode 100644 crates/git/src/remote.rs diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index fb204fba82..cf07b74ac5 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -1,4 +1,10 @@ +pub mod blame; +pub mod commit; +pub mod diff; mod hosting_provider; +mod remote; +pub mod repository; +pub mod status; use anyhow::{anyhow, Context, Result}; use serde::{Deserialize, Serialize}; @@ -7,15 +13,9 @@ use std::fmt; use std::str::FromStr; use std::sync::LazyLock; -pub use git2 as libgit; - pub use crate::hosting_provider::*; - -pub mod blame; -pub mod commit; -pub mod diff; -pub mod repository; -pub mod status; +pub use crate::remote::*; +pub use git2 as libgit; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); diff --git a/crates/git/src/hosting_provider.rs b/crates/git/src/hosting_provider.rs index 72ed92e8ab..4afbcf42a4 100644 --- a/crates/git/src/hosting_provider.rs +++ b/crates/git/src/hosting_provider.rs @@ -69,7 +69,7 @@ pub trait GitHostingProvider { /// Returns a formatted range of line numbers to be placed in a permalink URL. fn format_line_numbers(&self, start_line: u32, end_line: u32) -> String; - fn parse_remote_url<'a>(&self, url: &'a str) -> Option>; + fn parse_remote_url(&self, url: &str) -> Option; fn extract_pull_request( &self, @@ -159,10 +159,10 @@ impl GitHostingProviderRegistry { } } -#[derive(Debug)] -pub struct ParsedGitRemote<'a> { - pub owner: &'a str, - pub repo: &'a str, +#[derive(Debug, PartialEq)] +pub struct ParsedGitRemote { + pub owner: Arc, + pub repo: Arc, } pub fn parse_git_remote_url( diff --git a/crates/git/src/remote.rs b/crates/git/src/remote.rs new file mode 100644 index 0000000000..430836fcf3 --- /dev/null +++ b/crates/git/src/remote.rs @@ -0,0 +1,85 @@ +use derive_more::Deref; +use url::Url; + +/// The URL to a Git remote. +#[derive(Debug, PartialEq, Eq, Clone, Deref)] +pub struct RemoteUrl(Url); + +impl std::str::FromStr for RemoteUrl { + type Err = url::ParseError; + + fn from_str(input: &str) -> Result { + if input.starts_with("git@") { + // Rewrite remote URLs like `git@github.com:user/repo.git` to `ssh://git@github.com/user/repo.git` + let ssh_url = input.replacen(':', "/", 1).replace("git@", "ssh://git@"); + Ok(RemoteUrl(Url::parse(&ssh_url)?)) + } else { + Ok(RemoteUrl(Url::parse(input)?)) + } + } +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_parsing_valid_remote_urls() { + let valid_urls = vec![ + ( + "https://github.com/octocat/zed.git", + "https", + "github.com", + "/octocat/zed.git", + ), + ( + "git@github.com:octocat/zed.git", + "ssh", + "github.com", + "/octocat/zed.git", + ), + ( + "ssh://git@github.com/octocat/zed.git", + "ssh", + "github.com", + "/octocat/zed.git", + ), + ( + "file:///path/to/local/zed", + "file", + "", + "/path/to/local/zed", + ), + ]; + + for (input, expected_scheme, expected_host, expected_path) in valid_urls { + let parsed = input.parse::().expect("failed to parse URL"); + let url = parsed.0; + assert_eq!( + url.scheme(), + expected_scheme, + "unexpected scheme for {input:?}", + ); + assert_eq!( + url.host_str().unwrap_or(""), + expected_host, + "unexpected host for {input:?}", + ); + assert_eq!(url.path(), expected_path, "unexpected path for {input:?}"); + } + } + + #[test] + fn test_parsing_invalid_remote_urls() { + let invalid_urls = vec!["not_a_url", "http://"]; + + for url in invalid_urls { + assert!( + url.parse::().is_err(), + "expected \"{url}\" to not parse as a Git remote URL", + ); + } + } +} diff --git a/crates/git_hosting_providers/src/providers/bitbucket.rs b/crates/git_hosting_providers/src/providers/bitbucket.rs index 50c453442f..da95f256da 100644 --- a/crates/git_hosting_providers/src/providers/bitbucket.rs +++ b/crates/git_hosting_providers/src/providers/bitbucket.rs @@ -1,6 +1,11 @@ +use std::str::FromStr; + use url::Url; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; pub struct Bitbucket; @@ -25,18 +30,22 @@ impl GitHostingProvider for Bitbucket { format!("lines-{start_line}:{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.contains("bitbucket.org") { - let (_, repo_with_owner) = url.trim_end_matches(".git").split_once("bitbucket.org")?; - let (owner, repo) = repo_with_owner - .trim_start_matches('/') - .trim_start_matches(':') - .split_once('/')?; + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "bitbucket.org" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -88,8 +97,8 @@ mod tests { let url = "https://thorstenballzed@bitbucket.org/thorstenzed/testingrepo.git"; let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner, "thorstenzed"); - assert_eq!(parsed.repo, "testingrepo"); + assert_eq!(parsed.owner.as_ref(), "thorstenzed"); + assert_eq!(parsed.repo.as_ref(), "testingrepo"); } #[test] @@ -99,8 +108,8 @@ mod tests { let url = "https://bitbucket.org/thorstenzed/testingrepo.git"; let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner, "thorstenzed"); - assert_eq!(parsed.repo, "testingrepo"); + assert_eq!(parsed.owner.as_ref(), "thorstenzed"); + assert_eq!(parsed.repo.as_ref(), "testingrepo"); } #[test] @@ -110,15 +119,15 @@ mod tests { let url = "git@bitbucket.org:thorstenzed/testingrepo.git"; let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner, "thorstenzed"); - assert_eq!(parsed.repo, "testingrepo"); + assert_eq!(parsed.owner.as_ref(), "thorstenzed"); + assert_eq!(parsed.repo.as_ref(), "testingrepo"); } #[test] fn test_build_bitbucket_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "thorstenzed", - repo: "testingrepo", + owner: "thorstenzed".into(), + repo: "testingrepo".into(), }; let permalink = Bitbucket.build_permalink( remote, @@ -136,8 +145,8 @@ mod tests { #[test] fn test_build_bitbucket_permalink_from_ssh_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "thorstenzed", - repo: "testingrepo", + owner: "thorstenzed".into(), + repo: "testingrepo".into(), }; let permalink = Bitbucket.build_permalink( remote, @@ -156,8 +165,8 @@ mod tests { #[test] fn test_build_bitbucket_permalink_from_ssh_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "thorstenzed", - repo: "testingrepo", + owner: "thorstenzed".into(), + repo: "testingrepo".into(), }; let permalink = Bitbucket.build_permalink( remote, diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index 3f6a016f68..afd1c564aa 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -1,3 +1,4 @@ +use std::str::FromStr; use std::sync::Arc; use anyhow::{bail, Context, Result}; @@ -9,6 +10,7 @@ use url::Url; use git::{ BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote, + RemoteUrl, }; #[derive(Debug, Deserialize)] @@ -103,19 +105,22 @@ impl GitHostingProvider for Codeberg { format!("L{start_line}-L{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@codeberg.org:") || url.starts_with("https://codeberg.org/") { - let repo_with_owner = url - .trim_start_matches("git@codeberg.org:") - .trim_start_matches("https://codeberg.org/") - .trim_end_matches(".git"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "codeberg.org" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -175,8 +180,8 @@ mod tests { #[test] fn test_build_codeberg_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Codeberg.build_permalink( remote, @@ -194,8 +199,8 @@ mod tests { #[test] fn test_build_codeberg_permalink_from_ssh_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Codeberg.build_permalink( remote, @@ -213,8 +218,8 @@ mod tests { #[test] fn test_build_codeberg_permalink_from_ssh_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Codeberg.build_permalink( remote, @@ -232,8 +237,8 @@ mod tests { #[test] fn test_build_codeberg_permalink_from_https_url() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Codeberg.build_permalink( remote, @@ -251,8 +256,8 @@ mod tests { #[test] fn test_build_codeberg_permalink_from_https_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Codeberg.build_permalink( remote, @@ -270,8 +275,8 @@ mod tests { #[test] fn test_build_codeberg_permalink_from_https_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Codeberg.build_permalink( remote, diff --git a/crates/git_hosting_providers/src/providers/gitee.rs b/crates/git_hosting_providers/src/providers/gitee.rs index 34d1da262d..2333964e16 100644 --- a/crates/git_hosting_providers/src/providers/gitee.rs +++ b/crates/git_hosting_providers/src/providers/gitee.rs @@ -1,6 +1,11 @@ +use std::str::FromStr; + use url::Url; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; pub struct Gitee; @@ -25,19 +30,22 @@ impl GitHostingProvider for Gitee { format!("L{start_line}-{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@gitee.com:") || url.starts_with("https://gitee.com/") { - let repo_with_owner = url - .trim_start_matches("git@gitee.com:") - .trim_start_matches("https://gitee.com/") - .trim_end_matches(".git"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "gitee.com" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -81,8 +89,8 @@ mod tests { #[test] fn test_build_gitee_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", + owner: "libkitten".into(), + repo: "zed".into(), }; let permalink = Gitee.build_permalink( remote, @@ -100,8 +108,8 @@ mod tests { #[test] fn test_build_gitee_permalink_from_ssh_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", + owner: "libkitten".into(), + repo: "zed".into(), }; let permalink = Gitee.build_permalink( remote, @@ -119,8 +127,8 @@ mod tests { #[test] fn test_build_gitee_permalink_from_ssh_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", + owner: "libkitten".into(), + repo: "zed".into(), }; let permalink = Gitee.build_permalink( remote, @@ -138,8 +146,8 @@ mod tests { #[test] fn test_build_gitee_permalink_from_https_url() { let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", + owner: "libkitten".into(), + repo: "zed".into(), }; let permalink = Gitee.build_permalink( remote, @@ -157,8 +165,8 @@ mod tests { #[test] fn test_build_gitee_permalink_from_https_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", + owner: "libkitten".into(), + repo: "zed".into(), }; let permalink = Gitee.build_permalink( remote, @@ -176,8 +184,8 @@ mod tests { #[test] fn test_build_gitee_permalink_from_https_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", + owner: "libkitten".into(), + repo: "zed".into(), }; let permalink = Gitee.build_permalink( remote, diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 4078025fa0..1b9d200a7c 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -1,3 +1,4 @@ +use std::str::FromStr; use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; @@ -10,7 +11,7 @@ use url::Url; use git::{ BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote, - PullRequest, + PullRequest, RemoteUrl, }; fn pull_request_number_regex() -> &'static Regex { @@ -107,19 +108,22 @@ impl GitHostingProvider for Github { format!("L{start_line}-L{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@github.com:") || url.starts_with("https://github.com/") { - let repo_with_owner = url - .trim_start_matches("git@github.com:") - .trim_start_matches("https://github.com/") - .trim_end_matches(".git"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "github.com" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -198,11 +202,26 @@ mod tests { use super::*; + #[test] + fn test_parse_remote_url_given_https_url_with_username() { + let parsed_remote = Github + .parse_remote_url("https://jlannister@github.com/some-org/some-repo.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "some-org".into(), + repo: "some-repo".into(), + } + ); + } + #[test] fn test_build_github_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, @@ -220,8 +239,8 @@ mod tests { #[test] fn test_build_github_permalink_from_ssh_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, @@ -239,8 +258,8 @@ mod tests { #[test] fn test_build_github_permalink_from_ssh_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, @@ -258,8 +277,8 @@ mod tests { #[test] fn test_build_github_permalink_from_https_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, @@ -277,8 +296,8 @@ mod tests { #[test] fn test_build_github_permalink_from_https_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, @@ -296,8 +315,8 @@ mod tests { #[test] fn test_build_github_permalink_from_https_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, @@ -315,8 +334,8 @@ mod tests { #[test] fn test_github_pull_requests() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let message = "This does not contain a pull request"; diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index a8b97182c0..bf97fd4d67 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -1,8 +1,13 @@ +use std::str::FromStr; + use anyhow::{anyhow, bail, Result}; use url::Url; use util::maybe; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; #[derive(Debug)] pub struct Gitlab { @@ -64,21 +69,22 @@ impl GitHostingProvider for Gitlab { format!("L{start_line}-{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - let host = self.base_url.host_str()?; + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - if url.starts_with(&format!("git@{host}")) || url.starts_with(&format!("https://{host}/")) { - let repo_with_owner = url - .trim_start_matches(&format!("git@{host}:")) - .trim_start_matches(&format!("https://{host}/")) - .trim_end_matches(".git"); - - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != self.base_url.host_str()? { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -127,8 +133,8 @@ mod tests { #[test] fn test_build_gitlab_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Gitlab::new().build_permalink( remote, @@ -146,8 +152,8 @@ mod tests { #[test] fn test_build_gitlab_permalink_from_ssh_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Gitlab::new().build_permalink( remote, @@ -165,8 +171,8 @@ mod tests { #[test] fn test_build_gitlab_permalink_from_ssh_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Gitlab::new().build_permalink( remote, @@ -184,8 +190,8 @@ mod tests { #[test] fn test_build_gitlab_permalink_from_https_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Gitlab::new().build_permalink( remote, @@ -203,8 +209,8 @@ mod tests { #[test] fn test_build_gitlab_permalink_from_https_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Gitlab::new().build_permalink( remote, @@ -222,8 +228,8 @@ mod tests { #[test] fn test_build_gitlab_permalink_from_https_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Gitlab::new().build_permalink( remote, @@ -241,8 +247,8 @@ mod tests { #[test] fn test_build_gitlab_self_hosted_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let gitlab = Gitlab::from_remote_url("git@gitlab.some-enterprise.com:zed-industries/zed.git") @@ -263,8 +269,8 @@ mod tests { #[test] fn test_build_gitlab_self_hosted_permalink_from_https_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let gitlab = Gitlab::from_remote_url("https://gitlab-instance.big-co.com/zed-industries/zed.git") diff --git a/crates/git_hosting_providers/src/providers/sourcehut.rs b/crates/git_hosting_providers/src/providers/sourcehut.rs index 623b23ab6c..99ab53c8a3 100644 --- a/crates/git_hosting_providers/src/providers/sourcehut.rs +++ b/crates/git_hosting_providers/src/providers/sourcehut.rs @@ -1,6 +1,11 @@ +use std::str::FromStr; + use url::Url; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; pub struct Sourcehut; @@ -25,21 +30,27 @@ impl GitHostingProvider for Sourcehut { format!("L{start_line}-{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@git.sr.ht:") || url.starts_with("https://git.sr.ht/") { - // sourcehut indicates a repo with '.git' suffix as a separate repo. - // For example, "git@git.sr.ht:~username/repo" and "git@git.sr.ht:~username/repo.git" - // are two distinct repositories. - let repo_with_owner = url - .trim_start_matches("git@git.sr.ht:~") - .trim_start_matches("https://git.sr.ht/~"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "git.sr.ht" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + // We don't trim the `.git` suffix here like we do elsewhere, as + // sourcehut treats a repo with `.git` suffix as a separate repo. + // + // For example, `git@git.sr.ht:~username/repo` and `git@git.sr.ht:~username/repo.git` + // are two distinct repositories. + let repo = path_segments.next()?; + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -83,8 +94,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Sourcehut.build_permalink( remote, @@ -102,8 +113,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_ssh_url_with_git_prefix() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed.git", + owner: "rajveermalviya".into(), + repo: "zed.git".into(), }; let permalink = Sourcehut.build_permalink( remote, @@ -121,8 +132,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_ssh_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Sourcehut.build_permalink( remote, @@ -140,8 +151,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_ssh_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Sourcehut.build_permalink( remote, @@ -159,8 +170,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_https_url() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Sourcehut.build_permalink( remote, @@ -178,8 +189,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_https_url_single_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Sourcehut.build_permalink( remote, @@ -197,8 +208,8 @@ mod tests { #[test] fn test_build_sourcehut_permalink_from_https_url_multi_line_selection() { let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", + owner: "rajveermalviya".into(), + repo: "zed".into(), }; let permalink = Sourcehut.build_permalink( remote, From fb97e462dec8238c02059e78bf7e00b3ca44dbf7 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 29 Oct 2024 14:43:34 -0600 Subject: [PATCH 63/87] Better handle interrupted connections for shared SSH (#19925) Co-Authored-By: Mikayla --- crates/remote/src/ssh_session.rs | 34 ++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index d578e2eadd..ff6dfd7751 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1288,6 +1288,7 @@ impl SshRemoteConnection { ) -> Result { use futures::AsyncWriteExt as _; use futures::{io::BufReader, AsyncBufReadExt as _}; + use smol::net::unix::UnixStream; use smol::{fs::unix::PermissionsExt as _, net::unix::UnixListener}; use util::ResultExt as _; @@ -1304,6 +1305,9 @@ impl SshRemoteConnection { let listener = UnixListener::bind(&askpass_socket).context("failed to create askpass socket")?; + let (askpass_kill_master_tx, askpass_kill_master_rx) = oneshot::channel::(); + let mut kill_tx = Some(askpass_kill_master_tx); + let askpass_task = cx.spawn({ let delegate = delegate.clone(); |mut cx| async move { @@ -1327,6 +1331,11 @@ impl SshRemoteConnection { .log_err() { stream.write_all(password.as_bytes()).await.log_err(); + } else { + if let Some(kill_tx) = kill_tx.take() { + kill_tx.send(stream).log_err(); + break; + } } } } @@ -1347,6 +1356,7 @@ impl SshRemoteConnection { // the connection and keep it open, allowing other ssh commands to reuse it // via a control socket. let socket_path = temp_dir.path().join("ssh.sock"); + let mut master_process = process::Command::new("ssh") .stdin(Stdio::null()) .stdout(Stdio::piped()) @@ -1369,20 +1379,28 @@ impl SshRemoteConnection { // Wait for this ssh process to close its stdout, indicating that authentication // has completed. - let stdout = master_process.stdout.as_mut().unwrap(); + let mut stdout = master_process.stdout.take().unwrap(); let mut output = Vec::new(); let connection_timeout = Duration::from_secs(10); let result = select_biased! { _ = askpass_opened_rx.fuse() => { - // If the askpass script has opened, that means the user is typing - // their password, in which case we don't want to timeout anymore, - // since we know a connection has been established. - stdout.read_to_end(&mut output).await?; - Ok(()) + select_biased! { + stream = askpass_kill_master_rx.fuse() => { + master_process.kill().ok(); + drop(stream); + Err(anyhow!("SSH connection canceled")) + } + // If the askpass script has opened, that means the user is typing + // their password, in which case we don't want to timeout anymore, + // since we know a connection has been established. + result = stdout.read_to_end(&mut output).fuse() => { + result?; + Ok(()) + } + } } - result = stdout.read_to_end(&mut output).fuse() => { - result?; + _ = stdout.read_to_end(&mut output).fuse() => { Ok(()) } _ = futures::FutureExt::fuse(smol::Timer::after(connection_timeout)) => { From 518f6b529b2698208587cd5c42b2279f2d8ca585 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 29 Oct 2024 13:47:43 -0700 Subject: [PATCH 64/87] Fix missing diagnostic and text highlights after blocks (#19920) Release Notes: - Fixed an issue where diagnostic underlines and certain text highlights were not rendered correctly below block decorations such as the inline assistant prompt. Co-authored-by: Antonio Co-authored-by: Richard --- crates/editor/src/display_map.rs | 128 ++++++++++++++++++++- crates/editor/src/display_map/inlay_map.rs | 16 +++ crates/language/src/buffer.rs | 4 + 3 files changed, 146 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index c176213682..c11cbec328 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1260,16 +1260,21 @@ pub mod tests { use super::*; use crate::{movement, test::marked_display_snapshot}; use block_map::BlockPlacement; - use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla}; + use gpui::{ + div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla, Rgba, + }; use language::{ language_settings::{AllLanguageSettings, AllLanguageSettingsContent}, - Buffer, Language, LanguageConfig, LanguageMatcher, + Buffer, Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageConfig, + LanguageMatcher, }; + use lsp::LanguageServerId; use project::Project; use rand::{prelude::*, Rng}; use settings::SettingsStore; use smol::stream::StreamExt; use std::{env, sync::Arc}; + use text::PointUtf16; use theme::{LoadThemes, SyntaxTheme}; use unindent::Unindent as _; use util::test::{marked_text_ranges, sample_text}; @@ -1924,6 +1929,125 @@ pub mod tests { ); } + #[gpui::test] + async fn test_chunks_with_diagnostics_across_blocks(cx: &mut gpui::TestAppContext) { + cx.background_executor + .set_block_on_ticks(usize::MAX..=usize::MAX); + + let text = r#" + struct A { + b: usize; + } + const c: usize = 1; + "# + .unindent(); + + cx.update(|cx| init_test(cx, |_| {})); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx)); + + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics( + LanguageServerId(0), + DiagnosticSet::new( + [DiagnosticEntry { + range: PointUtf16::new(0, 0)..PointUtf16::new(2, 1), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + group_id: 1, + message: "hi".into(), + ..Default::default() + }, + }], + buffer, + ), + cx, + ) + }); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + + let map = cx.new_model(|cx| { + DisplayMap::new( + buffer, + font("Courier"), + px(16.0), + None, + true, + 1, + 1, + 0, + FoldPlaceholder::test(), + cx, + ) + }); + + let black = gpui::black().to_rgb(); + let red = gpui::red().to_rgb(); + + // Insert a block in the middle of a multi-line diagnostic. + map.update(cx, |map, cx| { + map.highlight_text( + TypeId::of::(), + vec![ + buffer_snapshot.anchor_before(Point::new(3, 9)) + ..buffer_snapshot.anchor_after(Point::new(3, 14)), + buffer_snapshot.anchor_before(Point::new(3, 17)) + ..buffer_snapshot.anchor_after(Point::new(3, 18)), + ], + red.into(), + ); + map.insert_blocks( + [BlockProperties { + placement: BlockPlacement::Below( + buffer_snapshot.anchor_before(Point::new(1, 0)), + ), + height: 1, + style: BlockStyle::Sticky, + render: Box::new(|_| div().into_any()), + priority: 0, + }], + cx, + ) + }); + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + let mut chunks = Vec::<(String, Option, Rgba)>::new(); + for chunk in snapshot.chunks(DisplayRow(0)..DisplayRow(5), true, Default::default()) { + let color = chunk + .highlight_style + .and_then(|style| style.color) + .map_or(black, |color| color.to_rgb()); + if let Some((last_chunk, last_severity, last_color)) = chunks.last_mut() { + if *last_severity == chunk.diagnostic_severity && *last_color == color { + last_chunk.push_str(chunk.text); + continue; + } + } + + chunks.push((chunk.text.to_string(), chunk.diagnostic_severity, color)); + } + + assert_eq!( + chunks, + [ + ( + "struct A {\n b: usize;\n".into(), + Some(DiagnosticSeverity::ERROR), + black + ), + ("\n".into(), None, black), + ("}".into(), Some(DiagnosticSeverity::ERROR), black), + ("\nconst c: ".into(), None, black), + ("usize".into(), None, red), + (" = ".into(), None, black), + ("1".into(), None, red), + (";\n".into(), None, black), + ] + ); + } + // todo(linux) fails due to pixel differences in text rendering #[cfg(target_os = "macos")] #[gpui::test] diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index d4e39f2df9..673b9383bc 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -255,6 +255,22 @@ impl<'a> InlayChunks<'a> { self.buffer_chunk = None; self.output_offset = new_range.start; self.max_output_offset = new_range.end; + + let mut highlight_endpoints = Vec::new(); + if let Some(text_highlights) = self.highlights.text_highlights { + if !text_highlights.is_empty() { + self.snapshot.apply_text_highlights( + &mut self.transforms, + &new_range, + text_highlights, + &mut highlight_endpoints, + ); + self.transforms.seek(&new_range.start, Bias::Right, &()); + highlight_endpoints.sort(); + } + } + self.highlight_endpoints = highlight_endpoints.into_iter().peekable(); + self.active_highlights.clear(); } pub fn offset(&self) -> InlayOffset { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 62f2f370b0..b41ca08c2d 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -4103,6 +4103,10 @@ impl<'a> BufferChunks<'a> { diagnostic_endpoints .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); *diagnostics = diagnostic_endpoints.into_iter().peekable(); + self.hint_depth = 0; + self.error_depth = 0; + self.warning_depth = 0; + self.information_depth = 0; } } } From 90edb7189fa7575039411fb182c1e29dfbe2f5af Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 29 Oct 2024 17:24:32 -0400 Subject: [PATCH 65/87] git_hosting_providers: Clean up tests (#19927) This PR cleans up the tests for the various Git hosting providers. These tests had rotted a bit over time, to the point that some of them weren't even testing what they claimed anymore. Release Notes: - N/A --- Cargo.lock | 2 +- crates/git_hosting_providers/Cargo.toml | 2 +- .../src/providers/bitbucket.rs | 104 ++++++------ .../src/providers/codeberg.rs | 128 ++++++-------- .../src/providers/gitee.rs | 128 ++++++-------- .../src/providers/github.rs | 123 +++++++------- .../src/providers/gitlab.rs | 156 ++++++++--------- .../src/providers/sourcehut.rs | 158 ++++++++---------- 8 files changed, 356 insertions(+), 445 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4d95eee098..9c73baad36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4909,11 +4909,11 @@ dependencies = [ "git", "gpui", "http_client", + "indoc", "pretty_assertions", "regex", "serde", "serde_json", - "unindent", "url", "util", ] diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index eac30b72d9..be0ca56eef 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -25,6 +25,6 @@ url.workspace = true util.workspace = true [dev-dependencies] -unindent.workspace = true +indoc.workspace = true serde_json.workspace = true pretty_assertions.workspace = true diff --git a/crates/git_hosting_providers/src/providers/bitbucket.rs b/crates/git_hosting_providers/src/providers/bitbucket.rs index da95f256da..59be1713e7 100644 --- a/crates/git_hosting_providers/src/providers/bitbucket.rs +++ b/crates/git_hosting_providers/src/providers/bitbucket.rs @@ -84,53 +84,62 @@ impl GitHostingProvider for Bitbucket { #[cfg(test)] mod tests { - use std::sync::Arc; - - use git::{parse_git_remote_url, GitHostingProviderRegistry}; + use pretty_assertions::assert_eq; use super::*; #[test] - fn test_parse_git_remote_url_bitbucket_https_with_username() { - let provider_registry = Arc::new(GitHostingProviderRegistry::new()); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - let url = "https://thorstenballzed@bitbucket.org/thorstenzed/testingrepo.git"; - let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); - assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner.as_ref(), "thorstenzed"); - assert_eq!(parsed.repo.as_ref(), "testingrepo"); + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Bitbucket + .parse_remote_url("git@bitbucket.org:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_parse_git_remote_url_bitbucket_https_without_username() { - let provider_registry = Arc::new(GitHostingProviderRegistry::new()); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - let url = "https://bitbucket.org/thorstenzed/testingrepo.git"; - let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); - assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner.as_ref(), "thorstenzed"); - assert_eq!(parsed.repo.as_ref(), "testingrepo"); + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Bitbucket + .parse_remote_url("https://bitbucket.org/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_parse_git_remote_url_bitbucket_git() { - let provider_registry = Arc::new(GitHostingProviderRegistry::new()); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - let url = "git@bitbucket.org:thorstenzed/testingrepo.git"; - let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); - assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner.as_ref(), "thorstenzed"); - assert_eq!(parsed.repo.as_ref(), "testingrepo"); + fn test_parse_remote_url_given_https_url_with_username() { + let parsed_remote = Bitbucket + .parse_remote_url("https://thorstenballzed@bitbucket.org/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_bitbucket_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "thorstenzed".into(), - repo: "testingrepo".into(), - }; + fn test_build_bitbucket_permalink() { let permalink = Bitbucket.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "f00b4r", path: "main.rs", @@ -138,18 +147,17 @@ mod tests { }, ); - let expected_url = "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs"; + let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_bitbucket_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "thorstenzed".into(), - repo: "testingrepo".into(), - }; + fn test_build_bitbucket_permalink_with_single_line_selection() { let permalink = Bitbucket.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "f00b4r", path: "main.rs", @@ -157,19 +165,17 @@ mod tests { }, ); - let expected_url = - "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs#lines-7"; + let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_bitbucket_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "thorstenzed".into(), - repo: "testingrepo".into(), - }; + fn test_build_bitbucket_permalink_with_multi_line_selection() { let permalink = Bitbucket.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "f00b4r", path: "main.rs", @@ -178,7 +184,7 @@ mod tests { ); let expected_url = - "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs#lines-24:48"; + "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-24:48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index afd1c564aa..cb917823c5 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -175,16 +175,47 @@ impl GitHostingProvider for Codeberg { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_codeberg_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Codeberg + .parse_remote_url("git@codeberg.org:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Codeberg + .parse_remote_url("https://codeberg.org/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_build_codeberg_permalink() { let permalink = Codeberg.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -192,18 +223,17 @@ mod tests { }, ); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs"; + let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_codeberg_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; + fn test_build_codeberg_permalink_with_single_line_selection() { let permalink = Codeberg.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -211,18 +241,17 @@ mod tests { }, ); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7"; + let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_codeberg_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; + fn test_build_codeberg_permalink_with_multi_line_selection() { let permalink = Codeberg.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -230,64 +259,7 @@ mod tests { }, ); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_codeberg_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; - let permalink = Codeberg.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", - selection: None, - }, - ); - - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_codeberg_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; - let permalink = Codeberg.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", - selection: Some(6..6), - }, - ); - - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_codeberg_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; - let permalink = Codeberg.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", - selection: Some(23..47), - }, - ); - - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs#L24-L48"; + let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/gitee.rs b/crates/git_hosting_providers/src/providers/gitee.rs index 2333964e16..5090cd0d74 100644 --- a/crates/git_hosting_providers/src/providers/gitee.rs +++ b/crates/git_hosting_providers/src/providers/gitee.rs @@ -84,16 +84,47 @@ impl GitHostingProvider for Gitee { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_gitee_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "libkitten".into(), - repo: "zed".into(), - }; + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Gitee + .parse_remote_url("git@gitee.com:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Gitee + .parse_remote_url("https://gitee.com/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_build_gitee_permalink() { let permalink = Gitee.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", path: "crates/editor/src/git/permalink.rs", @@ -101,18 +132,17 @@ mod tests { }, ); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs"; + let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_gitee_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten".into(), - repo: "zed".into(), - }; + fn test_build_gitee_permalink_with_single_line_selection() { let permalink = Gitee.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", path: "crates/editor/src/git/permalink.rs", @@ -120,18 +150,17 @@ mod tests { }, ); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7"; + let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_gitee_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten".into(), - repo: "zed".into(), - }; + fn test_build_gitee_permalink_with_multi_line_selection() { let permalink = Gitee.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", path: "crates/editor/src/git/permalink.rs", @@ -139,64 +168,7 @@ mod tests { }, ); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_gitee_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "libkitten".into(), - repo: "zed".into(), - }; - let permalink = Gitee.build_permalink( - remote, - BuildPermalinkParams { - sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/zed/src/main.rs", - selection: None, - }, - ); - - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_gitee_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten".into(), - repo: "zed".into(), - }; - let permalink = Gitee.build_permalink( - remote, - BuildPermalinkParams { - sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/zed/src/main.rs", - selection: Some(6..6), - }, - ); - - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_gitee_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten".into(), - repo: "zed".into(), - }; - let permalink = Gitee.build_permalink( - remote, - BuildPermalinkParams { - sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/zed/src/main.rs", - selection: Some(23..47), - }, - ); - - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L24-48"; + let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 1b9d200a7c..cbd1cc73a8 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -197,11 +197,41 @@ impl GitHostingProvider for Github { #[cfg(test)] mod tests { - // TODO: Replace with `indoc`. - use unindent::Unindent; + use indoc::indoc; + use pretty_assertions::assert_eq; use super::*; + #[test] + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Github + .parse_remote_url("git@github.com:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Github + .parse_remote_url("https://github.com/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + #[test] fn test_parse_remote_url_given_https_url_with_username() { let parsed_remote = Github @@ -237,51 +267,12 @@ mod tests { } #[test] - fn test_build_github_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; + fn test_build_github_permalink() { let permalink = Github.build_permalink( - remote, - BuildPermalinkParams { - sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", - path: "crates/editor/src/git/permalink.rs", - selection: Some(6..6), + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), }, - ); - - let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_github_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; - let permalink = Github.build_permalink( - remote, - BuildPermalinkParams { - sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", - path: "crates/editor/src/git/permalink.rs", - selection: Some(23..47), - }, - ); - - let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-L48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_github_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; - let permalink = Github.build_permalink( - remote, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", path: "crates/zed/src/main.rs", @@ -294,40 +285,38 @@ mod tests { } #[test] - fn test_build_github_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; + fn test_build_github_permalink_with_single_line_selection() { let permalink = Github.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", + sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", + path: "crates/editor/src/git/permalink.rs", selection: Some(6..6), }, ); - let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L7"; + let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_github_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; + fn test_build_github_permalink_with_multi_line_selection() { let permalink = Github.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", + sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", + path: "crates/editor/src/git/permalink.rs", selection: Some(23..47), }, ); - let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-L48"; + let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-L48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } @@ -342,7 +331,7 @@ mod tests { assert!(Github.extract_pull_request(&remote, message).is_none()); // Pull request number at end of first line - let message = r#" + let message = indoc! {r#" project panel: do not expand collapsed worktrees on "collapse all entries" (#10687) Fixes #10597 @@ -351,7 +340,7 @@ mod tests { - Fixed "project panel: collapse all entries" expanding collapsed worktrees. "# - .unindent(); + }; assert_eq!( Github @@ -363,12 +352,12 @@ mod tests { ); // Pull request number in middle of line, which we want to ignore - let message = r#" + let message = indoc! {r#" Follow-up to #10687 to fix problems See the original PR, this is a fix. "# - .unindent(); + }; assert_eq!(Github.extract_pull_request(&remote, &message), None); } } diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index bf97fd4d67..1e7bdbb88b 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -131,13 +131,60 @@ mod tests { use super::*; #[test] - fn test_build_gitlab_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Gitlab::new() + .parse_remote_url("git@gitlab.com:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Gitlab::new() + .parse_remote_url("https://gitlab.com/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_ssh_url() { + let remote_url = "git@gitlab.my-enterprise.com:zed-industries/zed.git"; + + let parsed_remote = Gitlab::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_build_gitlab_permalink() { let permalink = Gitlab::new().build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -150,13 +197,12 @@ mod tests { } #[test] - fn test_build_gitlab_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; + fn test_build_gitlab_permalink_with_single_line_selection() { let permalink = Gitlab::new().build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -169,13 +215,12 @@ mod tests { } #[test] - fn test_build_gitlab_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; + fn test_build_gitlab_permalink_with_multi_line_selection() { let permalink = Gitlab::new().build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -187,74 +232,16 @@ mod tests { assert_eq!(permalink.to_string(), expected_url.to_string()) } - #[test] - fn test_build_gitlab_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; - let permalink = Gitlab::new().build_permalink( - remote, - BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", - selection: None, - }, - ); - - let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_gitlab_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; - let permalink = Gitlab::new().build_permalink( - remote, - BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", - selection: Some(6..6), - }, - ); - - let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_gitlab_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; - let permalink = Gitlab::new().build_permalink( - remote, - BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", - selection: Some(23..47), - }, - ); - - let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - #[test] fn test_build_gitlab_self_hosted_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; let gitlab = Gitlab::from_remote_url("git@gitlab.some-enterprise.com:zed-industries/zed.git") .unwrap(); let permalink = gitlab.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -268,15 +255,14 @@ mod tests { #[test] fn test_build_gitlab_self_hosted_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "zed-industries".into(), - repo: "zed".into(), - }; let gitlab = Gitlab::from_remote_url("https://gitlab-instance.big-co.com/zed-industries/zed.git") .unwrap(); let permalink = gitlab.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", path: "crates/zed/src/main.rs", diff --git a/crates/git_hosting_providers/src/providers/sourcehut.rs b/crates/git_hosting_providers/src/providers/sourcehut.rs index 99ab53c8a3..a2dd14a345 100644 --- a/crates/git_hosting_providers/src/providers/sourcehut.rs +++ b/crates/git_hosting_providers/src/providers/sourcehut.rs @@ -39,7 +39,7 @@ impl GitHostingProvider for Sourcehut { } let mut path_segments = url.path_segments()?; - let owner = path_segments.next()?; + let owner = path_segments.next()?.trim_start_matches('~'); // We don't trim the `.git` suffix here like we do elsewhere, as // sourcehut treats a repo with `.git` suffix as a separate repo. // @@ -89,16 +89,62 @@ impl GitHostingProvider for Sourcehut { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_sourcehut_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Sourcehut + .parse_remote_url("git@git.sr.ht:~zed-industries/zed") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_ssh_url_with_git_suffix() { + let parsed_remote = Sourcehut + .parse_remote_url("git@git.sr.ht:~zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed.git".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Sourcehut + .parse_remote_url("https://git.sr.ht/~zed-industries/zed") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_build_sourcehut_permalink() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -106,18 +152,17 @@ mod tests { }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; + let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_sourcehut_permalink_from_ssh_url_with_git_prefix() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed.git".into(), - }; + fn test_build_sourcehut_permalink_with_git_suffix() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed.git".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -125,18 +170,17 @@ mod tests { }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; + let expected_url = "https://git.sr.ht/~zed-industries/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_sourcehut_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; + fn test_build_sourcehut_permalink_with_single_line_selection() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -144,18 +188,17 @@ mod tests { }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7"; + let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_sourcehut_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; + fn test_build_sourcehut_permalink_with_multi_line_selection() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", @@ -163,64 +206,7 @@ mod tests { }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_sourcehut_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; - let permalink = Sourcehut.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", - selection: None, - }, - ); - - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_sourcehut_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; - let permalink = Sourcehut.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", - selection: Some(6..6), - }, - ); - - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_sourcehut_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya".into(), - repo: "zed".into(), - }; - let permalink = Sourcehut.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", - selection: Some(23..47), - }, - ); - - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs#L24-48"; + let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } From 63524a2354a84be67ab1f48cdb7bec57c8954427 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Tue, 29 Oct 2024 21:16:45 -0400 Subject: [PATCH 66/87] Add missing full-size styles for panes (#19935) As we don't use scrolling flex layouts directly in panes that often, the methods that would normally be applied to containers that should fill the space weren't applied here. Should help un-stuck #19872's layout issue, but I'm merging this change separately in case it creates some other layout issue in panes. Release Notes: - N/A --- crates/workspace/src/pane.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 01a1f0271e..97672fd245 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2696,6 +2696,7 @@ impl Render for Pane { .flex_1() .relative() .group("") + .overflow_hidden() .on_drag_move::(cx.listener(Self::handle_drag_move)) .on_drag_move::(cx.listener(Self::handle_drag_move)) .when(is_local, |div| { @@ -2704,6 +2705,8 @@ impl Render for Pane { .map(|div| { if let Some(item) = self.active_item() { div.v_flex() + .size_full() + .overflow_hidden() .child(self.toolbar.clone()) .child(item.to_any()) } else { From a5f52f0f04b06b9b09aa4b97aac6aaae19e0440c Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Tue, 29 Oct 2024 22:30:58 -0400 Subject: [PATCH 67/87] Use theme families to refine user themes (#19936) This PR changes the way we load user themes into the ThemeRegistry. Rather than directly pass a theme family's themes to `insert_user_themes`, instead we use the new `refine_theme_family ` and `ThemeFamily::refine_theme`. This PR should have net zero change to themes today, but sets up enabling theme variables. We need to do it this way so each theme has access to it's family when it is refined. Release Notes: - N/A --- crates/theme/src/registry.rs | 92 ++-------------------------- crates/theme/src/theme.rs | 112 ++++++++++++++++++++++++++++++++++- 2 files changed, 114 insertions(+), 90 deletions(-) diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index 9f95d19937..73e8fe8c66 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -6,16 +6,11 @@ use collections::HashMap; use derive_more::{Deref, DerefMut}; use fs::Fs; use futures::StreamExt; -use gpui::{AppContext, AssetSource, Global, HighlightStyle, SharedString}; +use gpui::{AppContext, AssetSource, Global, SharedString}; use parking_lot::RwLock; -use refineable::Refineable; use util::ResultExt; -use crate::{ - try_parse_color, AccentColors, Appearance, AppearanceContent, PlayerColors, StatusColors, - SyntaxTheme, SystemColors, Theme, ThemeColors, ThemeContent, ThemeFamily, ThemeFamilyContent, - ThemeStyles, -}; +use crate::{refine_theme_family, Appearance, Theme, ThemeFamily, ThemeFamilyContent}; /// The metadata for a theme. #[derive(Debug, Clone)] @@ -97,89 +92,12 @@ impl ThemeRegistry { #[allow(unused)] fn insert_user_theme_families(&self, families: impl IntoIterator) { for family in families.into_iter() { - self.insert_user_themes(family.themes); + let refined_family = refine_theme_family(family); + + self.insert_themes(refined_family.themes); } } - /// Inserts user themes into the registry. - pub fn insert_user_themes(&self, themes: impl IntoIterator) { - self.insert_themes(themes.into_iter().map(|user_theme| { - let mut theme_colors = match user_theme.appearance { - AppearanceContent::Light => ThemeColors::light(), - AppearanceContent::Dark => ThemeColors::dark(), - }; - theme_colors.refine(&user_theme.style.theme_colors_refinement()); - - let mut status_colors = match user_theme.appearance { - AppearanceContent::Light => StatusColors::light(), - AppearanceContent::Dark => StatusColors::dark(), - }; - status_colors.refine(&user_theme.style.status_colors_refinement()); - - let mut player_colors = match user_theme.appearance { - AppearanceContent::Light => PlayerColors::light(), - AppearanceContent::Dark => PlayerColors::dark(), - }; - player_colors.merge(&user_theme.style.players); - - let mut accent_colors = match user_theme.appearance { - AppearanceContent::Light => AccentColors::light(), - AppearanceContent::Dark => AccentColors::dark(), - }; - accent_colors.merge(&user_theme.style.accents); - - let syntax_highlights = user_theme - .style - .syntax - .iter() - .map(|(syntax_token, highlight)| { - ( - syntax_token.clone(), - HighlightStyle { - color: highlight - .color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - background_color: highlight - .background_color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - font_style: highlight.font_style.map(Into::into), - font_weight: highlight.font_weight.map(Into::into), - ..Default::default() - }, - ) - }) - .collect::>(); - let syntax_theme = - SyntaxTheme::merge(Arc::new(SyntaxTheme::default()), syntax_highlights); - - let window_background_appearance = user_theme - .style - .window_background_appearance - .map(Into::into) - .unwrap_or_default(); - - Theme { - id: uuid::Uuid::new_v4().to_string(), - name: user_theme.name.into(), - appearance: match user_theme.appearance { - AppearanceContent::Light => Appearance::Light, - AppearanceContent::Dark => Appearance::Dark, - }, - styles: ThemeStyles { - system: SystemColors::default(), - window_background_appearance, - accents: accent_colors, - colors: theme_colors, - status: status_colors, - player: player_colors, - syntax: syntax_theme, - }, - } - })); - } - /// Removes the themes with the given names from the registry. pub fn remove_user_themes(&self, themes_to_remove: &[SharedString]) { self.state diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index c62359242d..307ea6b287 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -29,10 +29,11 @@ pub use settings::*; pub use styles::*; use gpui::{ - px, AppContext, AssetSource, Hsla, Pixels, SharedString, WindowAppearance, - WindowBackgroundAppearance, + px, AppContext, AssetSource, HighlightStyle, Hsla, Pixels, Refineable, SharedString, + WindowAppearance, WindowBackgroundAppearance, }; use serde::Deserialize; +use uuid::Uuid; /// Defines window border radius for platforms that use client side decorations. pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); @@ -137,7 +138,112 @@ pub struct ThemeFamily { pub scales: ColorScales, } -impl ThemeFamily {} +impl ThemeFamily { + // This is on ThemeFamily because we will have variables here we will need + // in the future to resolve @references. + /// Refines ThemeContent into a theme, merging it's contents with the base theme. + pub fn refine_theme(&self, theme: &ThemeContent) -> Theme { + let appearance = match theme.appearance { + AppearanceContent::Light => Appearance::Light, + AppearanceContent::Dark => Appearance::Dark, + }; + + let mut refined_theme_colors = match theme.appearance { + AppearanceContent::Light => ThemeColors::light(), + AppearanceContent::Dark => ThemeColors::dark(), + }; + refined_theme_colors.refine(&theme.style.theme_colors_refinement()); + + let mut refined_status_colors = match theme.appearance { + AppearanceContent::Light => StatusColors::light(), + AppearanceContent::Dark => StatusColors::dark(), + }; + refined_status_colors.refine(&theme.style.status_colors_refinement()); + + let mut refined_player_colors = match theme.appearance { + AppearanceContent::Light => PlayerColors::light(), + AppearanceContent::Dark => PlayerColors::dark(), + }; + refined_player_colors.merge(&theme.style.players); + + let mut refined_accent_colors = match theme.appearance { + AppearanceContent::Light => AccentColors::light(), + AppearanceContent::Dark => AccentColors::dark(), + }; + refined_accent_colors.merge(&theme.style.accents); + + let syntax_highlights = theme + .style + .syntax + .iter() + .map(|(syntax_token, highlight)| { + ( + syntax_token.clone(), + HighlightStyle { + color: highlight + .color + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + background_color: highlight + .background_color + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + font_style: highlight.font_style.map(Into::into), + font_weight: highlight.font_weight.map(Into::into), + ..Default::default() + }, + ) + }) + .collect::>(); + let syntax_theme = SyntaxTheme::merge(Arc::new(SyntaxTheme::default()), syntax_highlights); + + let window_background_appearance = theme + .style + .window_background_appearance + .map(Into::into) + .unwrap_or_default(); + + Theme { + id: uuid::Uuid::new_v4().to_string(), + name: theme.name.clone().into(), + appearance, + styles: ThemeStyles { + system: SystemColors::default(), + window_background_appearance, + accents: refined_accent_colors, + colors: refined_theme_colors, + status: refined_status_colors, + player: refined_player_colors, + syntax: syntax_theme, + }, + } + } +} + +/// Refines a [ThemeFamilyContent] and it's [ThemeContent]s into a [ThemeFamily]. +pub fn refine_theme_family(theme_family_content: ThemeFamilyContent) -> ThemeFamily { + let id = Uuid::new_v4().to_string(); + let name = theme_family_content.name.clone(); + let author = theme_family_content.author.clone(); + + let mut theme_family = ThemeFamily { + id: id.clone(), + name: name.clone().into(), + author: author.clone().into(), + themes: vec![], + scales: default_color_scales(), + }; + + let refined_themes = theme_family_content + .themes + .iter() + .map(|theme_content| theme_family.refine_theme(theme_content)) + .collect(); + + theme_family.themes = refined_themes; + + theme_family +} /// A theme is the primary mechanism for defining the appearance of the UI. #[derive(Clone, PartialEq)] From b3f0ba14304f7aa8d780dbbdf8bfe7f20dd08bda Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 29 Oct 2024 23:54:00 -0700 Subject: [PATCH 68/87] Implement panic reporting saving and uploads (#19932) TODO: - [x] check that the app version is well formatted for zed.dev Release Notes: - N/A --------- Co-authored-by: Trace --- Cargo.lock | 5 + crates/gpui/src/app.rs | 44 +++++- crates/proto/proto/zed.proto | 13 +- crates/proto/src/proto.rs | 9 +- crates/remote_server/Cargo.toml | 8 +- crates/remote_server/src/unix.rs | 98 ++++++++++++- .../telemetry_events/src/telemetry_events.rs | 4 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 4 +- crates/zed/src/reliability.rs | 137 ++++++++++++++---- 10 files changed, 278 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9c73baad36..f473a0f74d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9553,6 +9553,7 @@ dependencies = [ "async-watch", "backtrace", "cargo_toml", + "chrono", "clap", "client", "clock", @@ -9572,6 +9573,8 @@ dependencies = [ "node_runtime", "paths", "project", + "proto", + "release_channel", "remote", "reqwest_client", "rpc", @@ -9581,6 +9584,7 @@ dependencies = [ "settings", "shellexpand 2.1.2", "smol", + "telemetry_events", "toml 0.8.19", "util", "worktree", @@ -15092,6 +15096,7 @@ dependencies = [ "project", "project_panel", "project_symbols", + "proto", "quick_action_bar", "recent_projects", "release_channel", diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 096f495a88..ffbc757369 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -217,6 +217,7 @@ pub(crate) type KeystrokeObserver = type QuitHandler = Box LocalBoxFuture<'static, ()> + 'static>; type ReleaseListener = Box; type NewViewListener = Box; +type NewModelListener = Box; /// Contains the state of the full application, and passed as a reference to a variety of callbacks. /// Other contexts such as [ModelContext], [WindowContext], and [ViewContext] deref to this type, making it the most general context type. @@ -237,6 +238,7 @@ pub struct AppContext { http_client: Arc, pub(crate) globals_by_type: FxHashMap>, pub(crate) entities: EntityMap, + pub(crate) new_model_observers: SubscriberSet, pub(crate) new_view_observers: SubscriberSet, pub(crate) windows: SlotMap>, pub(crate) window_handles: FxHashMap, @@ -296,6 +298,7 @@ impl AppContext { globals_by_type: FxHashMap::default(), entities, new_view_observers: SubscriberSet::new(), + new_model_observers: SubscriberSet::new(), window_handles: FxHashMap::default(), windows: SlotMap::with_key(), keymap: Rc::new(RefCell::new(Keymap::default())), @@ -1016,6 +1019,7 @@ impl AppContext { activate(); subscription } + /// Arrange for the given function to be invoked whenever a view of the specified type is created. /// The function will be passed a mutable reference to the view along with an appropriate context. pub fn observe_new_views( @@ -1035,6 +1039,31 @@ impl AppContext { ) } + pub(crate) fn new_model_observer(&self, key: TypeId, value: NewModelListener) -> Subscription { + let (subscription, activate) = self.new_model_observers.insert(key, value); + activate(); + subscription + } + + /// Arrange for the given function to be invoked whenever a view of the specified type is created. + /// The function will be passed a mutable reference to the view along with an appropriate context. + pub fn observe_new_models( + &self, + on_new: impl 'static + Fn(&mut T, &mut ModelContext), + ) -> Subscription { + self.new_model_observer( + TypeId::of::(), + Box::new(move |any_model: AnyModel, cx: &mut AppContext| { + any_model + .downcast::() + .unwrap() + .update(cx, |model_state, cx| { + on_new(model_state, cx); + }) + }), + ) + } + /// Observe the release of a model or view. The callback is invoked after the model or view /// has no more strong references but before it has been dropped. pub fn observe_release( @@ -1346,8 +1375,21 @@ impl Context for AppContext { ) -> Model { self.update(|cx| { let slot = cx.entities.reserve(); + let model = slot.clone(); let entity = build_model(&mut ModelContext::new(cx, slot.downgrade())); - cx.entities.insert(slot, entity) + cx.entities.insert(slot, entity); + + // Non-generic part to avoid leaking SubscriberSet to invokers of `new_view`. + fn notify_observers(cx: &mut AppContext, tid: TypeId, model: AnyModel) { + cx.new_model_observers.clone().retain(&tid, |observer| { + let any_model = model.clone(); + (observer)(any_model, cx); + true + }); + } + notify_observers(cx, TypeId::of::(), AnyModel::from(model.clone())); + + model }) } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 439531ccb3..5dce1fbda6 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -289,7 +289,10 @@ message Envelope { ActiveToolchainResponse active_toolchain_response = 277; GetPathMetadata get_path_metadata = 278; - GetPathMetadataResponse get_path_metadata_response = 279; // current max + GetPathMetadataResponse get_path_metadata_response = 279; + + GetPanicFiles get_panic_files = 280; + GetPanicFilesResponse get_panic_files_response = 281; // current max } reserved 87 to 88; @@ -2489,5 +2492,11 @@ message UpdateGitBranch { uint64 project_id = 1; string branch_name = 2; ProjectPath repository = 3; - +} + +message GetPanicFiles { +} + +message GetPanicFilesResponse { + repeated string file_contents = 2; } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 4bae2d9931..8ff10a6056 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -363,7 +363,9 @@ messages!( (ActiveToolchain, Foreground), (ActiveToolchainResponse, Foreground), (GetPathMetadata, Background), - (GetPathMetadataResponse, Background) + (GetPathMetadataResponse, Background), + (GetPanicFiles, Background), + (GetPanicFilesResponse, Background), ); request_messages!( @@ -483,7 +485,8 @@ request_messages!( (ListToolchains, ListToolchainsResponse), (ActivateToolchain, Ack), (ActiveToolchain, ActiveToolchainResponse), - (GetPathMetadata, GetPathMetadataResponse) + (GetPathMetadata, GetPathMetadataResponse), + (GetPanicFiles, GetPanicFilesResponse) ); entity_messages!( @@ -566,7 +569,7 @@ entity_messages!( ListToolchains, ActivateToolchain, ActiveToolchain, - GetPathMetadata + GetPathMetadata, ); entity_messages!( diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index d2623d5f47..92ddbee094 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -22,9 +22,10 @@ debug-embed = ["dep:rust-embed"] test-support = ["fs/test-support"] [dependencies] -async-watch.workspace = true anyhow.workspace = true +async-watch.workspace = true backtrace = "0.3" +chrono.workspace = true clap.workspace = true client.workspace = true env_logger.workspace = true @@ -39,8 +40,10 @@ languages.workspace = true log.workspace = true lsp.workspace = true node_runtime.workspace = true -project.workspace = true paths = { workspace = true } +project.workspace = true +proto.workspace = true +release_channel.workspace = true remote.workspace = true reqwest_client.workspace = true rpc.workspace = true @@ -50,6 +53,7 @@ serde_json.workspace = true settings.workspace = true shellexpand.workspace = true smol.workspace = true +telemetry_events.workspace = true util.workspace = true worktree.workspace = true diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index f6f98a41c1..a4add3354e 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -1,12 +1,13 @@ use crate::headless_project::HeadlessAppState; use crate::HeadlessProject; use anyhow::{anyhow, Context, Result}; -use client::ProxySettings; +use chrono::Utc; +use client::{telemetry, ProxySettings}; use fs::{Fs, RealFs}; use futures::channel::mpsc; use futures::{select, select_biased, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, SinkExt}; use git::GitHostingProviderRegistry; -use gpui::{AppContext, Context as _, ModelContext, UpdateGlobal as _}; +use gpui::{AppContext, Context as _, Model, ModelContext, UpdateGlobal as _}; use http_client::{read_proxy_from_env, Uri}; use language::LanguageRegistry; use node_runtime::{NodeBinaryOptions, NodeRuntime}; @@ -21,19 +22,23 @@ use remote::{ }; use reqwest_client::ReqwestClient; use rpc::proto::{self, Envelope, SSH_PROJECT_ID}; +use rpc::{AnyProtoClient, TypedEnvelope}; use settings::{watch_config_file, Settings, SettingsStore}; use smol::channel::{Receiver, Sender}; use smol::io::AsyncReadExt; use smol::Async; use smol::{net::unix::UnixListener, stream::StreamExt as _}; +use std::ffi::OsStr; use std::ops::ControlFlow; +use std::{env, thread}; use std::{ io::Write, mem, path::{Path, PathBuf}, sync::Arc, }; +use telemetry_events::LocationData; use util::ResultExt; fn init_logging_proxy() { @@ -131,16 +136,97 @@ fn init_panic_hook() { backtrace.drain(0..=ix); } + let thread = thread::current(); + let thread_name = thread.name().unwrap_or(""); + log::error!( "panic occurred: {}\nBacktrace:\n{}", - payload, - backtrace.join("\n") + &payload, + (&backtrace).join("\n") ); + let panic_data = telemetry_events::Panic { + thread: thread_name.into(), + payload: payload.clone(), + location_data: info.location().map(|location| LocationData { + file: location.file().into(), + line: location.line(), + }), + app_version: format!( + "remote-server-{}", + option_env!("ZED_COMMIT_SHA").unwrap_or(&env!("ZED_PKG_VERSION")) + ), + release_channel: release_channel::RELEASE_CHANNEL.display_name().into(), + os_name: telemetry::os_name(), + os_version: Some(telemetry::os_version()), + architecture: env::consts::ARCH.into(), + panicked_on: Utc::now().timestamp_millis(), + backtrace, + system_id: None, // Set on SSH client + installation_id: None, // Set on SSH client + session_id: "".to_string(), // Set on SSH client + }; + + if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() { + let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); + let panic_file_path = paths::logs_dir().join(format!("zed-{timestamp}.panic")); + let panic_file = std::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&panic_file_path) + .log_err(); + if let Some(mut panic_file) = panic_file { + writeln!(&mut panic_file, "{panic_data_json}").log_err(); + panic_file.flush().log_err(); + } + } + std::process::abort(); })); } +fn handle_panic_requests(project: &Model, client: &Arc) { + let client: AnyProtoClient = client.clone().into(); + client.add_request_handler( + project.downgrade(), + |_, _: TypedEnvelope, _cx| async move { + let mut children = smol::fs::read_dir(paths::logs_dir()).await?; + let mut panic_files = Vec::new(); + while let Some(child) = children.next().await { + let child = child?; + let child_path = child.path(); + + if child_path.extension() != Some(OsStr::new("panic")) { + continue; + } + let filename = if let Some(filename) = child_path.file_name() { + filename.to_string_lossy() + } else { + continue; + }; + + if !filename.starts_with("zed") { + continue; + } + + let file_contents = smol::fs::read_to_string(&child_path) + .await + .context("error reading panic file")?; + + panic_files.push(file_contents); + + // We've done what we can, delete the file + std::fs::remove_file(child_path) + .context("error removing panic") + .log_err(); + } + anyhow::Ok(proto::GetPanicFilesResponse { + file_contents: panic_files, + }) + }, + ); +} + struct ServerListeners { stdin: UnixListener, stdout: UnixListener, @@ -368,7 +454,7 @@ pub fn execute_run( HeadlessProject::new( HeadlessAppState { - session, + session: session.clone(), fs, http_client, node_runtime, @@ -378,6 +464,8 @@ pub fn execute_run( ) }); + handle_panic_requests(&project, &session); + mem::forget(project); }); log::info!("gpui app is shut down. quitting."); diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 47e66a46a7..26db3cf8d8 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -222,13 +222,13 @@ pub struct HangReport { pub installation_id: Option, } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct LocationData { pub file: String, pub line: u32, } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct Panic { /// The name of the thread that panicked pub thread: String, diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e2a3f2be36..272d423f24 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -77,6 +77,7 @@ profiling.workspace = true project.workspace = true project_panel.workspace = true project_symbols.workspace = true +proto.workspace = true quick_action_bar.workspace = true recent_projects.workspace = true release_channel.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 998289f920..f366323ff5 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -329,7 +329,7 @@ fn main() { telemetry.start( system_id.as_ref().map(|id| id.to_string()), installation_id.as_ref().map(|id| id.to_string()), - session_id, + session_id.clone(), cx, ); @@ -365,7 +365,9 @@ fn main() { auto_update::init(client.http_client(), cx); reliability::init( client.http_client(), + system_id.as_ref().map(|id| id.to_string()), installation_id.clone().map(|id| id.to_string()), + session_id.clone(), cx, ); diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 9d76a6c47f..b02afb8c0d 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -1,13 +1,14 @@ use anyhow::{Context, Result}; use backtrace::{self, Backtrace}; use chrono::Utc; -use client::telemetry; +use client::{telemetry, TelemetrySettings}; use db::kvp::KEY_VALUE_STORE; use gpui::{AppContext, SemanticVersion}; use http_client::{HttpRequestExt, Method}; use http_client::{self, HttpClient, HttpClientWithUrl}; use paths::{crashes_dir, crashes_retired_dir}; +use project::Project; use release_channel::ReleaseChannel; use release_channel::RELEASE_CHANNEL; use settings::Settings; @@ -21,6 +22,7 @@ use std::{io::Write, panic, sync::atomic::AtomicU32, thread}; use telemetry_events::LocationData; use telemetry_events::Panic; use telemetry_events::PanicRequest; +use url::Url; use util::ResultExt; use crate::stdout_is_a_pty; @@ -133,13 +135,73 @@ pub fn init_panic_hook( pub fn init( http_client: Arc, + system_id: Option, installation_id: Option, + session_id: String, cx: &mut AppContext, ) { #[cfg(target_os = "macos")] monitor_main_thread_hangs(http_client.clone(), installation_id.clone(), cx); - upload_panics_and_crashes(http_client, installation_id, cx) + let Some(panic_report_url) = http_client + .build_zed_api_url("/telemetry/panics", &[]) + .log_err() + else { + return; + }; + + upload_panics_and_crashes( + http_client.clone(), + panic_report_url.clone(), + installation_id.clone(), + cx, + ); + + cx.observe_new_models(move |project: &mut Project, cx| { + let http_client = http_client.clone(); + let panic_report_url = panic_report_url.clone(); + let session_id = session_id.clone(); + let installation_id = installation_id.clone(); + let system_id = system_id.clone(); + + if let Some(ssh_client) = project.ssh_client() { + ssh_client.update(cx, |client, cx| { + if TelemetrySettings::get_global(cx).diagnostics { + let request = client.proto_client().request(proto::GetPanicFiles {}); + cx.background_executor() + .spawn(async move { + let panic_files = request.await?; + for file in panic_files.file_contents { + let panic: Option = serde_json::from_str(&file) + .log_err() + .or_else(|| { + file.lines() + .next() + .and_then(|line| serde_json::from_str(line).ok()) + }) + .unwrap_or_else(|| { + log::error!("failed to deserialize panic file {:?}", file); + None + }); + + if let Some(mut panic) = panic { + panic.session_id = session_id.clone(); + panic.system_id = system_id.clone(); + panic.installation_id = installation_id.clone(); + + upload_panic(&http_client, &panic_report_url, panic, &mut None) + .await?; + } + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + }) + } + }) + .detach(); } #[cfg(target_os = "macos")] @@ -346,16 +408,18 @@ pub fn monitor_main_thread_hangs( fn upload_panics_and_crashes( http: Arc, + panic_report_url: Url, installation_id: Option, cx: &AppContext, ) { let telemetry_settings = *client::TelemetrySettings::get_global(cx); cx.background_executor() .spawn(async move { - let most_recent_panic = upload_previous_panics(http.clone(), telemetry_settings) - .await - .log_err() - .flatten(); + let most_recent_panic = + upload_previous_panics(http.clone(), &panic_report_url, telemetry_settings) + .await + .log_err() + .flatten(); upload_previous_crashes(http, most_recent_panic, installation_id, telemetry_settings) .await .log_err() @@ -366,9 +430,9 @@ fn upload_panics_and_crashes( /// Uploads panics via `zed.dev`. async fn upload_previous_panics( http: Arc, + panic_report_url: &Url, telemetry_settings: client::TelemetrySettings, -) -> Result> { - let panic_report_url = http.build_zed_api_url("/telemetry/panics", &[])?; +) -> anyhow::Result> { let mut children = smol::fs::read_dir(paths::logs_dir()).await?; let mut most_recent_panic = None; @@ -396,7 +460,7 @@ async fn upload_previous_panics( .context("error reading panic file")?; let panic: Option = serde_json::from_str(&panic_file_content) - .ok() + .log_err() .or_else(|| { panic_file_content .lines() @@ -409,26 +473,8 @@ async fn upload_previous_panics( }); if let Some(panic) = panic { - most_recent_panic = Some((panic.panicked_on, panic.payload.clone())); - - let json_bytes = serde_json::to_vec(&PanicRequest { panic }).unwrap(); - - let Some(checksum) = client::telemetry::calculate_json_checksum(&json_bytes) else { + if !upload_panic(&http, &panic_report_url, panic, &mut most_recent_panic).await? { continue; - }; - - let Ok(request) = http_client::Request::builder() - .method(Method::POST) - .uri(panic_report_url.as_ref()) - .header("x-zed-checksum", checksum) - .body(json_bytes.into()) - else { - continue; - }; - - let response = http.send(request).await.context("error sending panic")?; - if !response.status().is_success() { - log::error!("Error uploading panic to server: {}", response.status()); } } } @@ -438,9 +484,42 @@ async fn upload_previous_panics( .context("error removing panic") .log_err(); } - Ok::<_, anyhow::Error>(most_recent_panic) + Ok(most_recent_panic) } +async fn upload_panic( + http: &Arc, + panic_report_url: &Url, + panic: telemetry_events::Panic, + most_recent_panic: &mut Option<(i64, String)>, +) -> Result { + *most_recent_panic = Some((panic.panicked_on, panic.payload.clone())); + + let json_bytes = serde_json::to_vec(&PanicRequest { + panic: panic.clone(), + }) + .unwrap(); + + let Some(checksum) = client::telemetry::calculate_json_checksum(&json_bytes) else { + return Ok(false); + }; + + let Ok(request) = http_client::Request::builder() + .method(Method::POST) + .uri(panic_report_url.as_ref()) + .header("x-zed-checksum", checksum) + .body(json_bytes.into()) + else { + return Ok(false); + }; + + let response = http.send(request).await.context("error sending panic")?; + if !response.status().is_success() { + log::error!("Error uploading panic to server: {}", response.status()); + } + + Ok(true) +} const LAST_CRASH_UPLOADED: &str = "LAST_CRASH_UPLOADED"; /// upload crashes from apple's diagnostic reports to our server. From 4431ef1870d097c22b8a94827abffe2493314519 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 30 Oct 2024 10:59:03 +0100 Subject: [PATCH 69/87] Speed up point translation in the Rope (#19913) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This pull request introduces an index of Unicode codepoints, newlines and UTF-16 codepoints. Benchmarks worth a thousand words: ``` push/4096 time: [467.06 µs 470.07 µs 473.24 µs] thrpt: [8.2543 MiB/s 8.3100 MiB/s 8.3635 MiB/s] change: time: [-4.1462% -3.0990% -2.0527%] (p = 0.00 < 0.05) thrpt: [+2.0957% +3.1981% +4.3255%] Performance has improved. Found 3 outliers among 100 measurements (3.00%) 1 (1.00%) low mild 2 (2.00%) high mild push/65536 time: [1.4650 ms 1.4796 ms 1.4922 ms] thrpt: [41.885 MiB/s 42.242 MiB/s 42.664 MiB/s] change: time: [-3.2871% -2.3489% -1.4555%] (p = 0.00 < 0.05) thrpt: [+1.4770% +2.4054% +3.3988%] Performance has improved. Found 6 outliers among 100 measurements (6.00%) 3 (3.00%) low severe 3 (3.00%) low mild append/4096 time: [729.00 ns 730.57 ns 732.14 ns] thrpt: [5.2103 GiB/s 5.2215 GiB/s 5.2327 GiB/s] change: time: [-81.884% -81.836% -81.790%] (p = 0.00 < 0.05) thrpt: [+449.16% +450.53% +452.01%] Performance has improved. Found 11 outliers among 100 measurements (11.00%) 3 (3.00%) low mild 6 (6.00%) high mild 2 (2.00%) high severe append/65536 time: [504.44 ns 505.58 ns 506.77 ns] thrpt: [120.44 GiB/s 120.72 GiB/s 121.00 GiB/s] change: time: [-94.833% -94.807% -94.782%] (p = 0.00 < 0.05) thrpt: [+1816.3% +1825.8% +1835.5%] Performance has improved. Found 4 outliers among 100 measurements (4.00%) 3 (3.00%) high mild 1 (1.00%) high severe slice/4096 time: [29.661 µs 29.733 µs 29.816 µs] thrpt: [131.01 MiB/s 131.38 MiB/s 131.70 MiB/s] change: time: [-48.833% -48.533% -48.230%] (p = 0.00 < 0.05) thrpt: [+93.161% +94.298% +95.440%] Performance has improved. slice/65536 time: [588.00 µs 590.22 µs 592.17 µs] thrpt: [105.54 MiB/s 105.89 MiB/s 106.29 MiB/s] change: time: [-45.599% -45.347% -45.099%] (p = 0.00 < 0.05) thrpt: [+82.147% +82.971% +83.821%] Performance has improved. Found 2 outliers among 100 measurements (2.00%) 1 (1.00%) low severe 1 (1.00%) high mild bytes_in_range/4096 time: [3.8630 µs 3.8811 µs 3.8994 µs] thrpt: [1001.8 MiB/s 1006.5 MiB/s 1011.2 MiB/s] change: time: [+0.0600% +0.6000% +1.1833%] (p = 0.03 < 0.05) thrpt: [-1.1695% -0.5964% -0.0600%] Change within noise threshold. bytes_in_range/65536 time: [98.178 µs 98.545 µs 98.931 µs] thrpt: [631.75 MiB/s 634.23 MiB/s 636.60 MiB/s] change: time: [-0.6513% +0.7537% +2.2265%] (p = 0.30 > 0.05) thrpt: [-2.1780% -0.7481% +0.6555%] No change in performance detected. Found 11 outliers among 100 measurements (11.00%) 8 (8.00%) high mild 3 (3.00%) high severe chars/4096 time: [878.91 ns 879.45 ns 880.06 ns] thrpt: [4.3346 GiB/s 4.3376 GiB/s 4.3403 GiB/s] change: time: [+9.1679% +9.4000% +9.6304%] (p = 0.00 < 0.05) thrpt: [-8.7844% -8.5923% -8.3979%] Performance has regressed. Found 8 outliers among 100 measurements (8.00%) 1 (1.00%) low severe 1 (1.00%) low mild 3 (3.00%) high mild 3 (3.00%) high severe chars/65536 time: [15.615 µs 15.691 µs 15.757 µs] thrpt: [3.8735 GiB/s 3.8899 GiB/s 3.9087 GiB/s] change: time: [+5.4902% +5.9345% +6.4044%] (p = 0.00 < 0.05) thrpt: [-6.0190% -5.6021% -5.2045%] Performance has regressed. Found 2 outliers among 100 measurements (2.00%) 2 (2.00%) low mild clip_point/4096 time: [29.677 µs 29.835 µs 30.019 µs] thrpt: [130.13 MiB/s 130.93 MiB/s 131.63 MiB/s] change: time: [-46.306% -45.866% -45.436%] (p = 0.00 < 0.05) thrpt: [+83.272% +84.728% +86.240%] Performance has improved. Found 11 outliers among 100 measurements (11.00%) 3 (3.00%) high mild 8 (8.00%) high severe clip_point/65536 time: [1.5933 ms 1.6116 ms 1.6311 ms] thrpt: [38.318 MiB/s 38.782 MiB/s 39.226 MiB/s] change: time: [-30.388% -29.598% -28.717%] (p = 0.00 < 0.05) thrpt: [+40.286% +42.040% +43.653%] Performance has improved. Found 3 outliers among 100 measurements (3.00%) 3 (3.00%) high mild running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 7 filtered out; finished in 0.00s point_to_offset/4096 time: [14.493 µs 14.591 µs 14.707 µs] thrpt: [265.61 MiB/s 267.72 MiB/s 269.52 MiB/s] change: time: [-71.990% -71.787% -71.588%] (p = 0.00 < 0.05) thrpt: [+251.96% +254.45% +257.01%] Performance has improved. Found 9 outliers among 100 measurements (9.00%) 5 (5.00%) high mild 4 (4.00%) high severe point_to_offset/65536 time: [700.72 µs 713.75 µs 727.26 µs] thrpt: [85.939 MiB/s 87.566 MiB/s 89.194 MiB/s] change: time: [-61.778% -61.015% -60.256%] (p = 0.00 < 0.05) thrpt: [+151.61% +156.51% +161.63%] Performance has improved. ``` Calling `Rope::chars` got slightly slower but I don't think it's a big issue (we don't really call `chars` for an entire `Rope`). In a future pull request, I want to use the tab index (which we're not yet using) and the char index to make `TabMap` a lot faster. Release Notes: - N/A --- Cargo.lock | 1 + Cargo.toml | 1 + crates/rope/Cargo.toml | 1 + crates/rope/benches/rope_benchmark.rs | 19 + crates/rope/src/chunk.rs | 878 ++++++++++++++++++++++++++ crates/rope/src/rope.rs | 446 ++++--------- crates/rope/src/unclipped.rs | 6 +- crates/sum_tree/Cargo.toml | 2 +- 8 files changed, 1029 insertions(+), 325 deletions(-) create mode 100644 crates/rope/src/chunk.rs diff --git a/Cargo.lock b/Cargo.lock index f473a0f74d..56d538a883 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9898,6 +9898,7 @@ dependencies = [ "gpui", "log", "rand 0.8.5", + "rayon", "smallvec", "sum_tree", "unicode-segmentation", diff --git a/Cargo.toml b/Cargo.toml index e269dd99ea..d67f78dc2b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -392,6 +392,7 @@ prost-build = "0.9" prost-types = "0.9" pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" +rayon = "1.8" regex = "1.5" repair_json = "0.1.0" reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = [ diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 309ceaf0bf..13f5979732 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -14,6 +14,7 @@ path = "src/rope.rs" [dependencies] arrayvec = "0.7.1" log.workspace = true +rayon.workspace = true smallvec.workspace = true sum_tree.workspace = true unicode-segmentation.workspace = true diff --git a/crates/rope/benches/rope_benchmark.rs b/crates/rope/benches/rope_benchmark.rs index 1f95559d77..01811c0c86 100644 --- a/crates/rope/benches/rope_benchmark.rs +++ b/crates/rope/benches/rope_benchmark.rs @@ -171,6 +171,25 @@ fn rope_benchmarks(c: &mut Criterion) { }); } group.finish(); + + let mut group = c.benchmark_group("point_to_offset"); + for size in sizes.iter() { + group.throughput(Throughput::Bytes(*size as u64)); + group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { + let rope = generate_random_rope(rng.clone(), *size); + + b.iter_batched( + || generate_random_rope_points(rng.clone(), &rope), + |offsets| { + for offset in offsets.iter() { + black_box(rope.point_to_offset(*offset)); + } + }, + BatchSize::SmallInput, + ); + }); + } + group.finish(); } criterion_group!(benches, rope_benchmarks); diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs new file mode 100644 index 0000000000..0490c5a9cd --- /dev/null +++ b/crates/rope/src/chunk.rs @@ -0,0 +1,878 @@ +use crate::{OffsetUtf16, Point, PointUtf16, TextSummary, Unclipped}; +use arrayvec::ArrayString; +use std::{cmp, ops::Range}; +use sum_tree::Bias; +use unicode_segmentation::GraphemeCursor; +use util::debug_panic; + +pub(crate) const MIN_BASE: usize = if cfg!(test) { 6 } else { 64 }; +pub(crate) const MAX_BASE: usize = MIN_BASE * 2; + +#[derive(Clone, Debug, Default)] +pub struct Chunk { + chars: u128, + chars_utf16: u128, + newlines: u128, + pub text: ArrayString, +} + +impl Chunk { + #[inline(always)] + pub fn new(text: &str) -> Self { + let mut this = Chunk::default(); + this.push_str(text); + this + } + + #[inline(always)] + pub fn push_str(&mut self, text: &str) { + for (char_ix, c) in text.char_indices() { + let ix = self.text.len() + char_ix; + self.chars |= 1 << ix; + self.chars_utf16 |= 1 << ix; + self.chars_utf16 |= (c.len_utf16() as u128) << ix; + self.newlines |= ((c == '\n') as u128) << ix; + } + self.text.push_str(text); + } + + #[inline(always)] + pub fn append(&mut self, slice: ChunkSlice) { + if slice.is_empty() { + return; + }; + + let base_ix = self.text.len(); + self.chars |= slice.chars << base_ix; + self.chars_utf16 |= slice.chars_utf16 << base_ix; + self.newlines |= slice.newlines << base_ix; + self.text.push_str(&slice.text); + } + + #[inline(always)] + pub fn as_slice(&self) -> ChunkSlice { + ChunkSlice { + chars: self.chars, + chars_utf16: self.chars_utf16, + newlines: self.newlines, + text: &self.text, + } + } + + #[inline(always)] + pub fn slice(&self, range: Range) -> ChunkSlice { + self.as_slice().slice(range) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct ChunkSlice<'a> { + chars: u128, + chars_utf16: u128, + newlines: u128, + text: &'a str, +} + +impl<'a> Into for ChunkSlice<'a> { + fn into(self) -> Chunk { + Chunk { + chars: self.chars, + chars_utf16: self.chars_utf16, + newlines: self.newlines, + text: self.text.try_into().unwrap(), + } + } +} + +impl<'a> ChunkSlice<'a> { + #[inline(always)] + pub fn is_empty(self) -> bool { + self.text.is_empty() + } + + #[inline(always)] + pub fn is_char_boundary(self, offset: usize) -> bool { + self.text.is_char_boundary(offset) + } + + #[inline(always)] + pub fn split_at(self, mid: usize) -> (ChunkSlice<'a>, ChunkSlice<'a>) { + if mid == MAX_BASE { + let left = self; + let right = ChunkSlice { + chars: 0, + chars_utf16: 0, + newlines: 0, + text: "", + }; + (left, right) + } else { + let mask = if mid == MAX_BASE { + u128::MAX + } else { + (1u128 << mid) - 1 + }; + let (left_text, right_text) = self.text.split_at(mid); + let left = ChunkSlice { + chars: self.chars & mask, + chars_utf16: self.chars_utf16 & mask, + newlines: self.newlines & mask, + text: left_text, + }; + let right = ChunkSlice { + chars: self.chars >> mid, + chars_utf16: self.chars_utf16 >> mid, + newlines: self.newlines >> mid, + text: right_text, + }; + (left, right) + } + } + + #[inline(always)] + pub fn slice(self, range: Range) -> Self { + let mask = if range.end == MAX_BASE { + u128::MAX + } else { + (1u128 << range.end) - 1 + }; + if range.start == MAX_BASE { + Self { + chars: 0, + chars_utf16: 0, + newlines: 0, + text: "", + } + } else { + Self { + chars: (self.chars & mask) >> range.start, + chars_utf16: (self.chars_utf16 & mask) >> range.start, + newlines: (self.newlines & mask) >> range.start, + text: &self.text[range], + } + } + } + + #[inline(always)] + pub fn text_summary(&self) -> TextSummary { + let (longest_row, longest_row_chars) = self.longest_row(); + TextSummary { + len: self.len(), + len_utf16: self.len_utf16(), + lines: self.lines(), + first_line_chars: self.first_line_chars(), + last_line_chars: self.last_line_chars(), + last_line_len_utf16: self.last_line_len_utf16(), + longest_row, + longest_row_chars, + } + } + + /// Get length in bytes + #[inline(always)] + pub fn len(&self) -> usize { + self.text.len() + } + + /// Get length in UTF-16 code units + #[inline(always)] + pub fn len_utf16(&self) -> OffsetUtf16 { + OffsetUtf16(self.chars_utf16.count_ones() as usize) + } + + /// Get point representing number of lines and length of last line + #[inline(always)] + pub fn lines(&self) -> Point { + let row = self.newlines.count_ones(); + let column = self.newlines.leading_zeros() - (u128::BITS - self.text.len() as u32); + Point::new(row, column) + } + + /// Get number of chars in first line + #[inline(always)] + pub fn first_line_chars(&self) -> u32 { + if self.newlines == 0 { + self.chars.count_ones() + } else { + let mask = (1u128 << self.newlines.trailing_zeros()) - 1; + (self.chars & mask).count_ones() + } + } + + /// Get number of chars in last line + #[inline(always)] + pub fn last_line_chars(&self) -> u32 { + if self.newlines == 0 { + self.chars.count_ones() + } else { + let mask = !(u128::MAX >> self.newlines.leading_zeros()); + (self.chars & mask).count_ones() + } + } + + /// Get number of UTF-16 code units in last line + #[inline(always)] + pub fn last_line_len_utf16(&self) -> u32 { + if self.newlines == 0 { + self.chars_utf16.count_ones() + } else { + let mask = !(u128::MAX >> self.newlines.leading_zeros()); + (self.chars_utf16 & mask).count_ones() + } + } + + /// Get the longest row in the chunk and its length in characters. + #[inline(always)] + pub fn longest_row(&self) -> (u32, u32) { + let mut chars = self.chars; + let mut newlines = self.newlines; + let mut row = 0; + let mut longest_row = 0; + let mut longest_row_chars = 0; + while newlines > 0 { + let newline_ix = newlines.trailing_zeros(); + let row_chars = (chars & ((1 << newline_ix) - 1)).count_ones() as u8; + if row_chars > longest_row_chars { + longest_row = row; + longest_row_chars = row_chars; + } + + newlines >>= newline_ix; + newlines >>= 1; + chars >>= newline_ix; + chars >>= 1; + row += 1; + } + + let row_chars = chars.count_ones() as u8; + if row_chars > longest_row_chars { + (row, row_chars as u32) + } else { + (longest_row, longest_row_chars as u32) + } + } + + #[inline(always)] + pub fn offset_to_point(&self, offset: usize) -> Point { + let mask = if offset == MAX_BASE { + u128::MAX + } else { + (1u128 << offset) - 1 + }; + let row = (self.newlines & mask).count_ones(); + let newline_ix = u128::BITS - (self.newlines & mask).leading_zeros(); + let column = (offset - newline_ix as usize) as u32; + Point::new(row, column) + } + + #[inline(always)] + pub fn point_to_offset(&self, point: Point) -> usize { + if point.row > self.lines().row { + debug_panic!( + "point {:?} extends beyond rows for string {:?}", + point, + self.text + ); + return self.len(); + } + + let row_offset_range = self.offset_range_for_row(point.row); + if point.column > row_offset_range.len() as u32 { + debug_panic!( + "point {:?} extends beyond row for string {:?}", + point, + self.text + ); + row_offset_range.end + } else { + row_offset_range.start + point.column as usize + } + } + + #[inline(always)] + pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { + let mask = if offset == MAX_BASE { + u128::MAX + } else { + (1u128 << offset) - 1 + }; + OffsetUtf16((self.chars_utf16 & mask).count_ones() as usize) + } + + #[inline(always)] + pub fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize { + if target.0 == 0 { + 0 + } else { + let ix = nth_set_bit(self.chars_utf16, target.0) + 1; + if ix == MAX_BASE { + MAX_BASE + } else { + let utf8_additional_len = cmp::min( + (self.chars_utf16 >> ix).trailing_zeros() as usize, + self.text.len() - ix, + ); + ix + utf8_additional_len + } + } + } + + #[inline(always)] + pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { + let mask = if offset == MAX_BASE { + u128::MAX + } else { + (1u128 << offset) - 1 + }; + let row = (self.newlines & mask).count_ones(); + let newline_ix = u128::BITS - (self.newlines & mask).leading_zeros(); + let column = if newline_ix as usize == MAX_BASE { + 0 + } else { + ((self.chars_utf16 & mask) >> newline_ix).count_ones() + }; + PointUtf16::new(row, column) + } + + #[inline(always)] + pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 { + self.offset_to_point_utf16(self.point_to_offset(point)) + } + + #[inline(always)] + pub fn point_utf16_to_offset(&self, point: PointUtf16, clip: bool) -> usize { + let lines = self.lines(); + if point.row > lines.row { + if !clip { + debug_panic!( + "point {:?} is beyond this chunk's extent {:?}", + point, + self.text + ); + } + return self.len(); + } + + let row_offset_range = self.offset_range_for_row(point.row); + let line = self.slice(row_offset_range.clone()); + if point.column > line.last_line_len_utf16() { + if !clip { + debug_panic!( + "point {:?} is beyond the end of the line in chunk {:?}", + point, + self.text + ); + } + return line.len(); + } + + let mut offset = row_offset_range.start; + if point.column > 0 { + offset += line.offset_utf16_to_offset(OffsetUtf16(point.column as usize)); + if !self.text.is_char_boundary(offset) { + offset -= 1; + while !self.text.is_char_boundary(offset) { + offset -= 1; + } + if !clip { + debug_panic!( + "point {:?} is within character in chunk {:?}", + point, + self.text, + ); + } + } + } + offset + } + + #[inline(always)] + pub fn unclipped_point_utf16_to_point(&self, point: Unclipped) -> Point { + let max_point = self.lines(); + if point.0.row > max_point.row { + return max_point; + } + + let row_offset_range = self.offset_range_for_row(point.0.row); + let line = self.slice(row_offset_range.clone()); + if point.0.column == 0 { + Point::new(point.0.row, 0) + } else if point.0.column >= line.len_utf16().0 as u32 { + Point::new(point.0.row, line.len() as u32) + } else { + let mut column = line.offset_utf16_to_offset(OffsetUtf16(point.0.column as usize)); + while !line.text.is_char_boundary(column) { + column -= 1; + } + Point::new(point.0.row, column as u32) + } + } + + #[inline(always)] + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + let max_point = self.lines(); + if point.row > max_point.row { + return max_point; + } + + let line = self.slice(self.offset_range_for_row(point.row)); + if point.column == 0 { + point + } else if point.column >= line.len() as u32 { + Point::new(point.row, line.len() as u32) + } else { + let mut column = point.column as usize; + let bytes = line.text.as_bytes(); + if bytes[column - 1] < 128 && bytes[column] < 128 { + return Point::new(point.row, column as u32); + } + + let mut grapheme_cursor = GraphemeCursor::new(column, bytes.len(), true); + loop { + if line.is_char_boundary(column) + && grapheme_cursor.is_boundary(line.text, 0).unwrap_or(false) + { + break; + } + + match bias { + Bias::Left => column -= 1, + Bias::Right => column += 1, + } + grapheme_cursor.set_cursor(column); + } + Point::new(point.row, column as u32) + } + } + + #[inline(always)] + pub fn clip_point_utf16(&self, point: Unclipped, bias: Bias) -> PointUtf16 { + let max_point = self.lines(); + if point.0.row > max_point.row { + PointUtf16::new(max_point.row, self.last_line_len_utf16()) + } else { + let line = self.slice(self.offset_range_for_row(point.0.row)); + let column = line.clip_offset_utf16(OffsetUtf16(point.0.column as usize), bias); + PointUtf16::new(point.0.row, column.0 as u32) + } + } + + #[inline(always)] + pub fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 { + if target == OffsetUtf16::default() { + OffsetUtf16::default() + } else if target >= self.len_utf16() { + self.len_utf16() + } else { + let mut offset = self.offset_utf16_to_offset(target); + while !self.text.is_char_boundary(offset) { + if bias == Bias::Left { + offset -= 1; + } else { + offset += 1; + } + } + self.offset_to_offset_utf16(offset) + } + } + + #[inline(always)] + fn offset_range_for_row(&self, row: u32) -> Range { + let row_start = if row > 0 { + nth_set_bit(self.newlines, row as usize) + 1 + } else { + 0 + }; + let row_len = if row_start == MAX_BASE { + 0 + } else { + cmp::min( + (self.newlines >> row_start).trailing_zeros(), + (self.text.len() - row_start) as u32, + ) + }; + row_start..row_start + row_len as usize + } +} + +/// Finds the n-th bit that is set to 1. +#[inline(always)] +fn nth_set_bit(v: u128, n: usize) -> usize { + let low = v as u64; + let high = (v >> 64) as u64; + + let low_count = low.count_ones() as usize; + if n > low_count { + 64 + nth_set_bit_u64(high, (n - low_count) as u64) as usize + } else { + nth_set_bit_u64(low, n as u64) as usize + } +} + +#[inline(always)] +fn nth_set_bit_u64(v: u64, mut n: u64) -> u64 { + let v = v.reverse_bits(); + let mut s: u64 = 64; + + // Parallel bit count intermediates + let a = v - ((v >> 1) & (u64::MAX / 3)); + let b = (a & (u64::MAX / 5)) + ((a >> 2) & (u64::MAX / 5)); + let c = (b + (b >> 4)) & (u64::MAX / 0x11); + let d = (c + (c >> 8)) & (u64::MAX / 0x101); + + // Branchless select + let t = (d >> 32) + (d >> 48); + s -= (t.wrapping_sub(n) & 256) >> 3; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (d >> (s - 16)) & 0xff; + s -= (t.wrapping_sub(n) & 256) >> 4; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (c >> (s - 8)) & 0xf; + s -= (t.wrapping_sub(n) & 256) >> 5; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (b >> (s - 4)) & 0x7; + s -= (t.wrapping_sub(n) & 256) >> 6; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (a >> (s - 2)) & 0x3; + s -= (t.wrapping_sub(n) & 256) >> 7; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (v >> (s - 1)) & 0x1; + s -= (t.wrapping_sub(n) & 256) >> 8; + + 65 - s - 1 +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + use util::RandomCharIter; + + #[gpui::test(iterations = 100)] + fn test_random_chunks(mut rng: StdRng) { + let chunk_len = rng.gen_range(0..=MAX_BASE); + let text = RandomCharIter::new(&mut rng) + .take(chunk_len) + .collect::(); + let mut ix = chunk_len; + while !text.is_char_boundary(ix) { + ix -= 1; + } + let text = &text[..ix]; + + log::info!("Chunk: {:?}", text); + let chunk = Chunk::new(&text); + verify_chunk(chunk.as_slice(), text); + + for _ in 0..10 { + let mut start = rng.gen_range(0..=chunk.text.len()); + let mut end = rng.gen_range(start..=chunk.text.len()); + while !chunk.text.is_char_boundary(start) { + start -= 1; + } + while !chunk.text.is_char_boundary(end) { + end -= 1; + } + let range = start..end; + log::info!("Range: {:?}", range); + let text_slice = &text[range.clone()]; + let chunk_slice = chunk.slice(range); + verify_chunk(chunk_slice, text_slice); + } + } + + #[gpui::test(iterations = 1000)] + fn test_nth_set_bit_random(mut rng: StdRng) { + let set_count = rng.gen_range(0..=128); + let mut set_bits = (0..128).choose_multiple(&mut rng, set_count); + set_bits.sort(); + let mut n = 0; + for ix in set_bits.iter().copied() { + n |= 1 << ix; + } + + for (mut ix, position) in set_bits.into_iter().enumerate() { + ix += 1; + assert_eq!( + nth_set_bit(n, ix), + position, + "nth_set_bit({:0128b}, {})", + n, + ix + ); + } + } + + fn verify_chunk(chunk: ChunkSlice<'_>, text: &str) { + let mut offset = 0; + let mut offset_utf16 = OffsetUtf16(0); + let mut point = Point::zero(); + let mut point_utf16 = PointUtf16::zero(); + + log::info!("Verifying chunk {:?}", text); + assert_eq!(chunk.offset_to_point(0), Point::zero()); + + for c in text.chars() { + let expected_point = chunk.offset_to_point(offset); + assert_eq!(point, expected_point, "mismatch at offset {}", offset); + assert_eq!( + chunk.point_to_offset(point), + offset, + "mismatch at point {:?}", + point + ); + assert_eq!( + chunk.offset_to_offset_utf16(offset), + offset_utf16, + "mismatch at offset {}", + offset + ); + assert_eq!( + chunk.offset_utf16_to_offset(offset_utf16), + offset, + "mismatch at offset_utf16 {:?}", + offset_utf16 + ); + assert_eq!( + chunk.point_to_point_utf16(point), + point_utf16, + "mismatch at point {:?}", + point + ); + assert_eq!( + chunk.point_utf16_to_offset(point_utf16, false), + offset, + "mismatch at point_utf16 {:?}", + point_utf16 + ); + assert_eq!( + chunk.unclipped_point_utf16_to_point(Unclipped(point_utf16)), + point, + "mismatch for unclipped_point_utf16_to_point at {:?}", + point_utf16 + ); + + assert_eq!( + chunk.clip_point(point, Bias::Left), + point, + "incorrect left clip at {:?}", + point + ); + assert_eq!( + chunk.clip_point(point, Bias::Right), + point, + "incorrect right clip at {:?}", + point + ); + + for i in 1..c.len_utf8() { + let test_point = Point::new(point.row, point.column + i as u32); + assert_eq!( + chunk.clip_point(test_point, Bias::Left), + point, + "incorrect left clip within multi-byte char at {:?}", + test_point + ); + assert_eq!( + chunk.clip_point(test_point, Bias::Right), + Point::new(point.row, point.column + c.len_utf8() as u32), + "incorrect right clip within multi-byte char at {:?}", + test_point + ); + } + + for i in 1..c.len_utf16() { + let test_point = Unclipped(PointUtf16::new( + point_utf16.row, + point_utf16.column + i as u32, + )); + assert_eq!( + chunk.unclipped_point_utf16_to_point(test_point), + point, + "incorrect unclipped_point_utf16_to_point within multi-byte char at {:?}", + test_point + ); + assert_eq!( + chunk.clip_point_utf16(test_point, Bias::Left), + point_utf16, + "incorrect left clip_point_utf16 within multi-byte char at {:?}", + test_point + ); + assert_eq!( + chunk.clip_point_utf16(test_point, Bias::Right), + PointUtf16::new(point_utf16.row, point_utf16.column + c.len_utf16() as u32), + "incorrect right clip_point_utf16 within multi-byte char at {:?}", + test_point + ); + + let test_offset = OffsetUtf16(offset_utf16.0 + i); + assert_eq!( + chunk.clip_offset_utf16(test_offset, Bias::Left), + offset_utf16, + "incorrect left clip_offset_utf16 within multi-byte char at {:?}", + test_offset + ); + assert_eq!( + chunk.clip_offset_utf16(test_offset, Bias::Right), + OffsetUtf16(offset_utf16.0 + c.len_utf16()), + "incorrect right clip_offset_utf16 within multi-byte char at {:?}", + test_offset + ); + } + + if c == '\n' { + point.row += 1; + point.column = 0; + point_utf16.row += 1; + point_utf16.column = 0; + } else { + point.column += c.len_utf8() as u32; + point_utf16.column += c.len_utf16() as u32; + } + + offset += c.len_utf8(); + offset_utf16.0 += c.len_utf16(); + } + + let final_point = chunk.offset_to_point(offset); + assert_eq!(point, final_point, "mismatch at final offset {}", offset); + assert_eq!( + chunk.point_to_offset(point), + offset, + "mismatch at point {:?}", + point + ); + assert_eq!( + chunk.offset_to_offset_utf16(offset), + offset_utf16, + "mismatch at offset {}", + offset + ); + assert_eq!( + chunk.offset_utf16_to_offset(offset_utf16), + offset, + "mismatch at offset_utf16 {:?}", + offset_utf16 + ); + assert_eq!( + chunk.point_to_point_utf16(point), + point_utf16, + "mismatch at final point {:?}", + point + ); + assert_eq!( + chunk.point_utf16_to_offset(point_utf16, false), + offset, + "mismatch at final point_utf16 {:?}", + point_utf16 + ); + assert_eq!( + chunk.unclipped_point_utf16_to_point(Unclipped(point_utf16)), + point, + "mismatch for unclipped_point_utf16_to_point at final point {:?}", + point_utf16 + ); + assert_eq!( + chunk.clip_point(point, Bias::Left), + point, + "incorrect left clip at final point {:?}", + point + ); + assert_eq!( + chunk.clip_point(point, Bias::Right), + point, + "incorrect right clip at final point {:?}", + point + ); + assert_eq!( + chunk.clip_point_utf16(Unclipped(point_utf16), Bias::Left), + point_utf16, + "incorrect left clip_point_utf16 at final point {:?}", + point_utf16 + ); + assert_eq!( + chunk.clip_point_utf16(Unclipped(point_utf16), Bias::Right), + point_utf16, + "incorrect right clip_point_utf16 at final point {:?}", + point_utf16 + ); + assert_eq!( + chunk.clip_offset_utf16(offset_utf16, Bias::Left), + offset_utf16, + "incorrect left clip_offset_utf16 at final offset {:?}", + offset_utf16 + ); + assert_eq!( + chunk.clip_offset_utf16(offset_utf16, Bias::Right), + offset_utf16, + "incorrect right clip_offset_utf16 at final offset {:?}", + offset_utf16 + ); + + // Verify length methods + assert_eq!(chunk.len(), text.len()); + assert_eq!( + chunk.len_utf16().0, + text.chars().map(|c| c.len_utf16()).sum::() + ); + + // Verify line counting + let lines = chunk.lines(); + let mut newline_count = 0; + let mut last_line_len = 0; + for c in text.chars() { + if c == '\n' { + newline_count += 1; + last_line_len = 0; + } else { + last_line_len += c.len_utf8() as u32; + } + } + assert_eq!(lines, Point::new(newline_count, last_line_len)); + + // Verify first/last line chars + if !text.is_empty() { + let first_line = text.split('\n').next().unwrap(); + assert_eq!(chunk.first_line_chars(), first_line.chars().count() as u32); + + let last_line = text.split('\n').last().unwrap(); + assert_eq!(chunk.last_line_chars(), last_line.chars().count() as u32); + assert_eq!( + chunk.last_line_len_utf16(), + last_line.chars().map(|c| c.len_utf16() as u32).sum::() + ); + } + + // Verify longest row + let (longest_row, longest_chars) = chunk.longest_row(); + let mut max_chars = 0; + let mut current_row = 0; + let mut current_chars = 0; + let mut max_row = 0; + + for c in text.chars() { + if c == '\n' { + if current_chars > max_chars { + max_chars = current_chars; + max_row = current_row; + } + current_row += 1; + current_chars = 0; + } else { + current_chars += 1; + } + } + + if current_chars > max_chars { + max_chars = current_chars; + max_row = current_row; + } + + assert_eq!((max_row, max_chars as u32), (longest_row, longest_chars)); + } +} diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 68ff7d5c69..89cb1e7b63 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -1,9 +1,11 @@ +mod chunk; mod offset_utf16; mod point; mod point_utf16; mod unclipped; -use arrayvec::ArrayString; +use chunk::{Chunk, ChunkSlice}; +use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; use smallvec::SmallVec; use std::{ cmp, fmt, io, mem, @@ -11,20 +13,12 @@ use std::{ str, }; use sum_tree::{Bias, Dimension, SumTree}; -use unicode_segmentation::GraphemeCursor; -use util::debug_panic; pub use offset_utf16::OffsetUtf16; pub use point::Point; pub use point_utf16::PointUtf16; pub use unclipped::Unclipped; -#[cfg(test)] -const CHUNK_BASE: usize = 6; - -#[cfg(not(test))] -const CHUNK_BASE: usize = 64; - #[derive(Clone, Default)] pub struct Rope { chunks: SumTree, @@ -36,18 +30,25 @@ impl Rope { } pub fn append(&mut self, rope: Rope) { - let mut chunks = rope.chunks.cursor::<()>(&()); - chunks.next(&()); - if let Some(chunk) = chunks.item() { - if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE) - || chunk.0.len() < CHUNK_BASE + if let Some(chunk) = rope.chunks.first() { + if self + .chunks + .last() + .map_or(false, |c| c.text.len() < chunk::MIN_BASE) + || chunk.text.len() < chunk::MIN_BASE { - self.push(&chunk.0); + self.push_chunk(chunk.as_slice()); + + let mut chunks = rope.chunks.cursor::<()>(&()); chunks.next(&()); + chunks.next(&()); + self.chunks.append(chunks.suffix(&()), &()); + self.check_invariants(); + return; } } - self.chunks.append(chunks.suffix(&()), &()); + self.chunks.append(rope.chunks.clone(), &()); self.check_invariants(); } @@ -77,11 +78,13 @@ impl Rope { pub fn push(&mut self, mut text: &str) { self.chunks.update_last( |last_chunk| { - let split_ix = if last_chunk.0.len() + text.len() <= 2 * CHUNK_BASE { + let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE { text.len() } else { - let mut split_ix = - cmp::min(CHUNK_BASE.saturating_sub(last_chunk.0.len()), text.len()); + let mut split_ix = cmp::min( + chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), + text.len(), + ); while !text.is_char_boundary(split_ix) { split_ix += 1; } @@ -89,7 +92,7 @@ impl Rope { }; let (suffix, remainder) = text.split_at(split_ix); - last_chunk.0.push_str(suffix); + last_chunk.push_str(suffix); text = remainder; }, &(), @@ -101,12 +104,12 @@ impl Rope { let mut new_chunks = SmallVec::<[_; 16]>::new(); while !text.is_empty() { - let mut split_ix = cmp::min(2 * CHUNK_BASE, text.len()); + let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); while !text.is_char_boundary(split_ix) { split_ix -= 1; } let (chunk, remainder) = text.split_at(split_ix); - new_chunks.push(Chunk(ArrayString::from(chunk).unwrap())); + new_chunks.push(chunk); text = remainder; } @@ -116,9 +119,11 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - self.chunks.par_extend(new_chunks.into_vec(), &()); + self.chunks + .par_extend(new_chunks.into_vec().into_par_iter().map(Chunk::new), &()); } else { - self.chunks.extend(new_chunks, &()); + self.chunks + .extend(new_chunks.into_iter().map(Chunk::new), &()); } self.check_invariants(); @@ -135,7 +140,7 @@ impl Rope { // a chunk ends with 3 bytes of a 4-byte character. These 3 bytes end up being stored in the following chunk, thus wasting // 3 bytes of storage in current chunk. // For example, a 1024-byte string can occupy between 32 (full ASCII, 1024/32) and 36 (full 4-byte UTF-8, 1024 / 29 rounded up) chunks. - const MIN_CHUNK_SIZE: usize = 2 * CHUNK_BASE - 3; + const MIN_CHUNK_SIZE: usize = chunk::MAX_BASE - 3; // We also round up the capacity up by one, for a good measure; we *really* don't want to realloc here, as we assume that the # of characters // we're working with there is large. @@ -143,12 +148,12 @@ impl Rope { let mut new_chunks = Vec::with_capacity(capacity); while !text.is_empty() { - let mut split_ix = cmp::min(2 * CHUNK_BASE, text.len()); + let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); while !text.is_char_boundary(split_ix) { split_ix -= 1; } let (chunk, remainder) = text.split_at(split_ix); - new_chunks.push(Chunk(ArrayString::from(chunk).unwrap())); + new_chunks.push(chunk); text = remainder; } @@ -158,13 +163,44 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - self.chunks.par_extend(new_chunks, &()); + self.chunks + .par_extend(new_chunks.into_par_iter().map(Chunk::new), &()); } else { - self.chunks.extend(new_chunks, &()); + self.chunks + .extend(new_chunks.into_iter().map(Chunk::new), &()); } self.check_invariants(); } + + fn push_chunk(&mut self, mut chunk: ChunkSlice) { + self.chunks.update_last( + |last_chunk| { + let split_ix = if last_chunk.text.len() + chunk.len() <= chunk::MAX_BASE { + chunk.len() + } else { + let mut split_ix = cmp::min( + chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), + chunk.len(), + ); + while !chunk.is_char_boundary(split_ix) { + split_ix += 1; + } + split_ix + }; + + let (suffix, remainder) = chunk.split_at(split_ix); + last_chunk.append(suffix); + chunk = remainder; + }, + &(), + ); + + if !chunk.is_empty() { + self.chunks.push(chunk.into(), &()); + } + } + pub fn push_front(&mut self, text: &str) { let suffix = mem::replace(self, Rope::from(text)); self.append(suffix); @@ -178,7 +214,7 @@ impl Rope { let mut chunks = self.chunks.cursor::<()>(&()).peekable(); while let Some(chunk) = chunks.next() { if chunks.peek().is_some() { - assert!(chunk.0.len() + 3 >= CHUNK_BASE); + assert!(chunk.text.len() + 3 >= chunk::MIN_BASE); } } } @@ -250,7 +286,7 @@ impl Rope { let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Default::default(), |chunk| { - chunk.offset_to_offset_utf16(overshoot) + chunk.as_slice().offset_to_offset_utf16(overshoot) }) } @@ -263,7 +299,7 @@ impl Rope { let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Default::default(), |chunk| { - chunk.offset_utf16_to_offset(overshoot) + chunk.as_slice().offset_utf16_to_offset(overshoot) }) } @@ -275,9 +311,9 @@ impl Rope { cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 - + cursor - .item() - .map_or(Point::zero(), |chunk| chunk.offset_to_point(overshoot)) + + cursor.item().map_or(Point::zero(), |chunk| { + chunk.as_slice().offset_to_point(overshoot) + }) } pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { @@ -289,7 +325,7 @@ impl Rope { let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(PointUtf16::zero(), |chunk| { - chunk.offset_to_point_utf16(overshoot) + chunk.as_slice().offset_to_point_utf16(overshoot) }) } @@ -302,7 +338,7 @@ impl Rope { let overshoot = point - cursor.start().0; cursor.start().1 + cursor.item().map_or(PointUtf16::zero(), |chunk| { - chunk.point_to_point_utf16(overshoot) + chunk.as_slice().point_to_point_utf16(overshoot) }) } @@ -316,7 +352,7 @@ impl Rope { cursor.start().1 + cursor .item() - .map_or(0, |chunk| chunk.point_to_offset(overshoot)) + .map_or(0, |chunk| chunk.as_slice().point_to_offset(overshoot)) } pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { @@ -335,9 +371,9 @@ impl Rope { cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 - + cursor - .item() - .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot, clip)) + + cursor.item().map_or(0, |chunk| { + chunk.as_slice().point_utf16_to_offset(overshoot, clip) + }) } pub fn unclipped_point_utf16_to_point(&self, point: Unclipped) -> Point { @@ -349,7 +385,7 @@ impl Rope { let overshoot = Unclipped(point.0 - cursor.start().0); cursor.start().1 + cursor.item().map_or(Point::zero(), |chunk| { - chunk.unclipped_point_utf16_to_point(overshoot) + chunk.as_slice().unclipped_point_utf16_to_point(overshoot) }) } @@ -358,7 +394,7 @@ impl Rope { cursor.seek(&offset, Bias::Left, &()); if let Some(chunk) = cursor.item() { let mut ix = offset - cursor.start(); - while !chunk.0.is_char_boundary(ix) { + while !chunk.text.is_char_boundary(ix) { match bias { Bias::Left => { ix -= 1; @@ -381,7 +417,7 @@ impl Rope { cursor.seek(&offset, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = offset - cursor.start(); - *cursor.start() + chunk.clip_offset_utf16(overshoot, bias) + *cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias) } else { self.summary().len_utf16 } @@ -392,7 +428,7 @@ impl Rope { cursor.seek(&point, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = point - cursor.start(); - *cursor.start() + chunk.clip_point(overshoot, bias) + *cursor.start() + chunk.as_slice().clip_point(overshoot, bias) } else { self.summary().lines } @@ -403,7 +439,7 @@ impl Rope { cursor.seek(&point.0, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = Unclipped(point.0 - cursor.start()); - *cursor.start() + chunk.clip_point_utf16(overshoot, bias) + *cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias) } else { self.summary().lines_utf16() } @@ -500,7 +536,7 @@ impl<'a> Cursor<'a> { if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); - slice.push(&start_chunk.0[start_ix..end_ix]); + slice.push_chunk(start_chunk.slice(start_ix..end_ix)); } if end_offset > self.chunks.end(&()) { @@ -510,7 +546,7 @@ impl<'a> Cursor<'a> { }); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); - slice.push(&end_chunk.0[..end_ix]); + slice.push_chunk(end_chunk.slice(0..end_ix)); } } @@ -525,9 +561,7 @@ impl<'a> Cursor<'a> { if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); - summary.add_assign(&D::from_text_summary(&TextSummary::from( - &start_chunk.0[start_ix..end_ix], - ))); + summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix))); } if end_offset > self.chunks.end(&()) { @@ -535,9 +569,7 @@ impl<'a> Cursor<'a> { summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &())); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); - summary.add_assign(&D::from_text_summary(&TextSummary::from( - &end_chunk.0[..end_ix], - ))); + summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix))); } } @@ -678,11 +710,11 @@ impl<'a> Chunks<'a> { if let Some(chunk) = self.chunks.item() { let mut end_ix = self.offset - *self.chunks.start(); - if chunk.0.as_bytes()[end_ix - 1] == b'\n' { + if chunk.text.as_bytes()[end_ix - 1] == b'\n' { end_ix -= 1; } - if let Some(newline_ix) = chunk.0[..end_ix].rfind('\n') { + if let Some(newline_ix) = chunk.text[..end_ix].rfind('\n') { self.offset = *self.chunks.start() + newline_ix + 1; if self.offset_is_valid() { return true; @@ -694,7 +726,7 @@ impl<'a> Chunks<'a> { .search_backward(|summary| summary.text.lines.row > 0, &()); self.offset = *self.chunks.start(); if let Some(chunk) = self.chunks.item() { - if let Some(newline_ix) = chunk.0.rfind('\n') { + if let Some(newline_ix) = chunk.text.rfind('\n') { self.offset += newline_ix + 1; if self.offset_is_valid() { if self.offset == self.chunks.end(&()) { @@ -731,7 +763,7 @@ impl<'a> Chunks<'a> { slice_start..slice_end }; - Some(&chunk.0[slice_range]) + Some(&chunk.text[slice_range]) } pub fn lines(self) -> Lines<'a> { @@ -798,7 +830,7 @@ impl<'a> Bytes<'a> { } let start = self.range.start.saturating_sub(chunk_start); let end = self.range.end - chunk_start; - Some(&chunk.0.as_bytes()[start..chunk.0.len().min(end)]) + Some(&chunk.text.as_bytes()[start..chunk.text.len().min(end)]) } } @@ -902,265 +934,13 @@ impl<'a> Lines<'a> { } } -#[derive(Clone, Debug, Default)] -struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>); - -impl Chunk { - fn offset_to_offset_utf16(&self, target: usize) -> OffsetUtf16 { - let mut offset = 0; - let mut offset_utf16 = OffsetUtf16(0); - for ch in self.0.chars() { - if offset >= target { - break; - } - - offset += ch.len_utf8(); - offset_utf16.0 += ch.len_utf16(); - } - offset_utf16 - } - - fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize { - let mut offset_utf16 = OffsetUtf16(0); - let mut offset = 0; - for ch in self.0.chars() { - if offset_utf16 >= target { - break; - } - - offset += ch.len_utf8(); - offset_utf16.0 += ch.len_utf16(); - } - offset - } - - fn offset_to_point(&self, target: usize) -> Point { - let mut offset = 0; - let mut point = Point::new(0, 0); - for ch in self.0.chars() { - if offset >= target { - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - } else { - point.column += ch.len_utf8() as u32; - } - offset += ch.len_utf8(); - } - point - } - - fn offset_to_point_utf16(&self, target: usize) -> PointUtf16 { - let mut offset = 0; - let mut point = PointUtf16::new(0, 0); - for ch in self.0.chars() { - if offset >= target { - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - } else { - point.column += ch.len_utf16() as u32; - } - offset += ch.len_utf8(); - } - point - } - - fn point_to_offset(&self, target: Point) -> usize { - let mut offset = 0; - let mut point = Point::new(0, 0); - - for ch in self.0.chars() { - if point >= target { - if point > target { - debug_panic!("point {target:?} is inside of character {ch:?}"); - } - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - - if point.row > target.row { - debug_panic!( - "point {target:?} is beyond the end of a line with length {}", - point.column - ); - break; - } - } else { - point.column += ch.len_utf8() as u32; - } - - offset += ch.len_utf8(); - } - - offset - } - - fn point_to_point_utf16(&self, target: Point) -> PointUtf16 { - let mut point = Point::zero(); - let mut point_utf16 = PointUtf16::new(0, 0); - for ch in self.0.chars() { - if point >= target { - break; - } - - if ch == '\n' { - point_utf16.row += 1; - point_utf16.column = 0; - point.row += 1; - point.column = 0; - } else { - point_utf16.column += ch.len_utf16() as u32; - point.column += ch.len_utf8() as u32; - } - } - point_utf16 - } - - fn point_utf16_to_offset(&self, target: PointUtf16, clip: bool) -> usize { - let mut offset = 0; - let mut point = PointUtf16::new(0, 0); - - for ch in self.0.chars() { - if point == target { - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - - if point.row > target.row { - if !clip { - debug_panic!( - "point {target:?} is beyond the end of a line with length {}", - point.column - ); - } - // Return the offset of the newline - return offset; - } - } else { - point.column += ch.len_utf16() as u32; - } - - if point > target { - if !clip { - debug_panic!("point {target:?} is inside of codepoint {ch:?}"); - } - // Return the offset of the codepoint which we have landed within, bias left - return offset; - } - - offset += ch.len_utf8(); - } - - offset - } - - fn unclipped_point_utf16_to_point(&self, target: Unclipped) -> Point { - let mut point = Point::zero(); - let mut point_utf16 = PointUtf16::zero(); - - for ch in self.0.chars() { - if point_utf16 == target.0 { - break; - } - - if point_utf16 > target.0 { - // If the point is past the end of a line or inside of a code point, - // return the last valid point before the target. - return point; - } - - if ch == '\n' { - point_utf16 += PointUtf16::new(1, 0); - point += Point::new(1, 0); - } else { - point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32); - point += Point::new(0, ch.len_utf8() as u32); - } - } - - point - } - - fn clip_point(&self, target: Point, bias: Bias) -> Point { - for (row, line) in self.0.split('\n').enumerate() { - if row == target.row as usize { - let bytes = line.as_bytes(); - let mut column = target.column.min(bytes.len() as u32) as usize; - if column == 0 - || column == bytes.len() - || (bytes[column - 1] < 128 && bytes[column] < 128) - { - return Point::new(row as u32, column as u32); - } - - let mut grapheme_cursor = GraphemeCursor::new(column, bytes.len(), true); - loop { - if line.is_char_boundary(column) - && grapheme_cursor.is_boundary(line, 0).unwrap_or(false) - { - break; - } - - match bias { - Bias::Left => column -= 1, - Bias::Right => column += 1, - } - grapheme_cursor.set_cursor(column); - } - return Point::new(row as u32, column as u32); - } - } - unreachable!() - } - - fn clip_point_utf16(&self, target: Unclipped, bias: Bias) -> PointUtf16 { - for (row, line) in self.0.split('\n').enumerate() { - if row == target.0.row as usize { - let mut code_units = line.encode_utf16(); - let mut column = code_units.by_ref().take(target.0.column as usize).count(); - if char::decode_utf16(code_units).next().transpose().is_err() { - match bias { - Bias::Left => column -= 1, - Bias::Right => column += 1, - } - } - return PointUtf16::new(row as u32, column as u32); - } - } - unreachable!() - } - - fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 { - let mut code_units = self.0.encode_utf16(); - let mut offset = code_units.by_ref().take(target.0).count(); - if char::decode_utf16(code_units).next().transpose().is_err() { - match bias { - Bias::Left => offset -= 1, - Bias::Right => offset += 1, - } - } - OffsetUtf16(offset) - } -} - impl sum_tree::Item for Chunk { type Summary = ChunkSummary; fn summary(&self, _cx: &()) -> Self::Summary { - ChunkSummary::from(self.0.as_str()) + ChunkSummary { + text: self.as_slice().text_summary(), + } } } @@ -1169,14 +949,6 @@ pub struct ChunkSummary { text: TextSummary, } -impl<'a> From<&'a str> for ChunkSummary { - fn from(text: &'a str) -> Self { - Self { - text: TextSummary::from(text), - } - } -} - impl sum_tree::Summary for ChunkSummary { type Context = (); @@ -1323,6 +1095,7 @@ impl std::ops::AddAssign for TextSummary { pub trait TextDimension: 'static + for<'a> Dimension<'a, ChunkSummary> { fn from_text_summary(summary: &TextSummary) -> Self; + fn from_chunk(chunk: ChunkSlice) -> Self; fn add_assign(&mut self, other: &Self); } @@ -1334,6 +1107,10 @@ impl TextDimension for (D1, D2) { ) } + fn from_chunk(chunk: ChunkSlice) -> Self { + (D1::from_chunk(chunk), D2::from_chunk(chunk)) + } + fn add_assign(&mut self, other: &Self) { self.0.add_assign(&other.0); self.1.add_assign(&other.1); @@ -1355,6 +1132,10 @@ impl TextDimension for TextSummary { summary.clone() } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.text_summary() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1375,6 +1156,10 @@ impl TextDimension for usize { summary.len } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.len() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1395,6 +1180,10 @@ impl TextDimension for OffsetUtf16 { summary.len_utf16 } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.len_utf16() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1415,6 +1204,10 @@ impl TextDimension for Point { summary.lines } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.lines() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1435,6 +1228,13 @@ impl TextDimension for PointUtf16 { summary.lines_utf16() } + fn from_chunk(chunk: ChunkSlice) -> Self { + PointUtf16 { + row: chunk.lines().row, + column: chunk.last_line_len_utf16(), + } + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1919,7 +1719,7 @@ mod tests { fn text(&self) -> String { let mut text = String::new(); for chunk in self.chunks.cursor::<()>(&()) { - text.push_str(&chunk.0); + text.push_str(&chunk.text); } text } diff --git a/crates/rope/src/unclipped.rs b/crates/rope/src/unclipped.rs index b3427e2cb9..679901875c 100644 --- a/crates/rope/src/unclipped.rs +++ b/crates/rope/src/unclipped.rs @@ -1,4 +1,4 @@ -use crate::{ChunkSummary, TextDimension, TextSummary}; +use crate::{chunk::ChunkSlice, ChunkSummary, TextDimension, TextSummary}; use std::ops::{Add, AddAssign, Sub, SubAssign}; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -27,6 +27,10 @@ impl TextDimension for Unclipped { Unclipped(T::from_text_summary(summary)) } + fn from_chunk(chunk: ChunkSlice) -> Self { + Unclipped(T::from_chunk(chunk)) + } + fn add_assign(&mut self, other: &Self) { TextDimension::add_assign(&mut self.0, &other.0); } diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index b370e6df18..06ca955767 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] arrayvec = "0.7.1" -rayon = "1.8" +rayon.workspace = true log.workspace = true [dev-dependencies] From 774a8bf039376449e0a787d6cfafd9385c960adf Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 30 Oct 2024 11:40:04 +0100 Subject: [PATCH 70/87] inline blame: Fix default setting for inline blame (#19943) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow-up to #19759. Fixes the default value. cc @pjtatlow 😄 Release Notes: - N/A --- crates/project/src/project_settings.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 42d8ae6dac..52594ce5b0 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -161,8 +161,9 @@ pub struct InlineBlameSettings { const fn true_value() -> bool { true } + const fn false_value() -> bool { - true + false } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] From f6cd97f6fd298ee6b41f26c449a92aa3e394ed31 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 30 Oct 2024 13:27:11 +0100 Subject: [PATCH 71/87] remote dev: Allow canceling language server work in editor (#19946) Release Notes: - Added ability to cancel language server work in remote development. Demo: https://github.com/user-attachments/assets/c9ca91a5-617f-4886-a458-87c563c5a247 --- crates/editor/src/editor.rs | 2 +- crates/project/src/lsp_store.rs | 194 ++++++++++++------ crates/proto/proto/zed.proto | 24 ++- crates/proto/src/proto.rs | 5 +- .../remote_server/src/remote_editing_tests.rs | 166 +++++++++++++++ 5 files changed, 325 insertions(+), 66 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 40cdf27f2e..50d367d730 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10460,7 +10460,7 @@ impl Editor { fn cancel_language_server_work( &mut self, - _: &CancelLanguageServerWork, + _: &actions::CancelLanguageServerWork, cx: &mut ViewContext, ) { if let Some(project) = self.project.clone() { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6387fc65d2..e04577e551 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -787,6 +787,7 @@ impl LspStore { pub fn init(client: &AnyProtoClient) { client.add_model_request_handler(Self::handle_multi_lsp_query); client.add_model_request_handler(Self::handle_restart_language_servers); + client.add_model_request_handler(Self::handle_cancel_language_server_work); client.add_model_message_handler(Self::handle_start_language_server); client.add_model_message_handler(Self::handle_update_language_server); client.add_model_message_handler(Self::handle_language_server_log); @@ -4118,7 +4119,7 @@ impl LspStore { LanguageServerProgress { title: payload.title, is_disk_based_diagnostics_progress: false, - is_cancellable: false, + is_cancellable: payload.is_cancellable.unwrap_or(false), message: payload.message, percentage: payload.percentage.map(|p| p as usize), last_update_at: cx.background_executor().now(), @@ -4134,7 +4135,7 @@ impl LspStore { LanguageServerProgress { title: None, is_disk_based_diagnostics_progress: false, - is_cancellable: false, + is_cancellable: payload.is_cancellable.unwrap_or(false), message: payload.message, percentage: payload.percentage.map(|p| p as usize), last_update_at: cx.background_executor().now(), @@ -4635,6 +4636,7 @@ impl LspStore { token, message: report.message, percentage: report.percentage, + is_cancellable: report.cancellable, }, ), }) @@ -4668,6 +4670,7 @@ impl LspStore { title: progress.title, message: progress.message, percentage: progress.percentage.map(|p| p as u32), + is_cancellable: Some(progress.is_cancellable), }), }) } @@ -4698,6 +4701,9 @@ impl LspStore { if progress.percentage.is_some() { entry.percentage = progress.percentage; } + if progress.is_cancellable != entry.is_cancellable { + entry.is_cancellable = progress.is_cancellable; + } cx.notify(); return true; } @@ -5168,22 +5174,52 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { this.update(&mut cx, |this, cx| { - let buffers: Vec<_> = envelope - .payload - .buffer_ids - .into_iter() - .flat_map(|buffer_id| { - this.buffer_store - .read(cx) - .get(BufferId::new(buffer_id).log_err()?) - }) - .collect(); - this.restart_language_servers_for_buffers(buffers, cx) + let buffers = this.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx); + this.restart_language_servers_for_buffers(buffers, cx); })?; Ok(proto::Ack {}) } + pub async fn handle_cancel_language_server_work( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + this.update(&mut cx, |this, cx| { + if let Some(work) = envelope.payload.work { + match work { + proto::cancel_language_server_work::Work::Buffers(buffers) => { + let buffers = + this.buffer_ids_to_buffers(buffers.buffer_ids.into_iter(), cx); + this.cancel_language_server_work_for_buffers(buffers, cx); + } + proto::cancel_language_server_work::Work::LanguageServerWork(work) => { + let server_id = LanguageServerId::from_proto(work.language_server_id); + this.cancel_language_server_work(server_id, work.token, cx); + } + } + } + })?; + + Ok(proto::Ack {}) + } + + fn buffer_ids_to_buffers( + &mut self, + buffer_ids: impl Iterator, + cx: &mut ModelContext, + ) -> Vec> { + buffer_ids + .into_iter() + .flat_map(|buffer_id| { + self.buffer_store + .read(cx) + .get(BufferId::new(buffer_id).log_err()?) + }) + .collect::>() + } + async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -6728,16 +6764,89 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - let servers = buffers - .into_iter() - .flat_map(|buffer| { - self.language_server_ids_for_buffer(buffer.read(cx), cx) - .into_iter() - }) - .collect::>(); + if let Some((client, project_id)) = self.upstream_client() { + let request = client.request(proto::CancelLanguageServerWork { + project_id, + work: Some(proto::cancel_language_server_work::Work::Buffers( + proto::cancel_language_server_work::Buffers { + buffer_ids: buffers + .into_iter() + .map(|b| b.read(cx).remote_id().to_proto()) + .collect(), + }, + )), + }); + cx.background_executor() + .spawn(request) + .detach_and_log_err(cx); + } else { + let servers = buffers + .into_iter() + .flat_map(|buffer| { + self.language_server_ids_for_buffer(buffer.read(cx), cx) + .into_iter() + }) + .collect::>(); - for server_id in servers { - self.cancel_language_server_work(server_id, None, cx); + for server_id in servers { + self.cancel_language_server_work(server_id, None, cx); + } + } + } + + pub(crate) fn cancel_language_server_work( + &mut self, + server_id: LanguageServerId, + token_to_cancel: Option, + cx: &mut ModelContext, + ) { + if let Some(local) = self.as_local() { + let status = self.language_server_statuses.get(&server_id); + let server = local.language_servers.get(&server_id); + if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) + { + for (token, progress) in &status.pending_work { + if let Some(token_to_cancel) = token_to_cancel.as_ref() { + if token != token_to_cancel { + continue; + } + } + if progress.is_cancellable { + server + .notify::( + WorkDoneProgressCancelParams { + token: lsp::NumberOrString::String(token.clone()), + }, + ) + .ok(); + } + + if progress.is_cancellable { + server + .notify::( + WorkDoneProgressCancelParams { + token: lsp::NumberOrString::String(token.clone()), + }, + ) + .ok(); + } + } + } + } else if let Some((client, project_id)) = self.upstream_client() { + let request = client.request(proto::CancelLanguageServerWork { + project_id, + work: Some( + proto::cancel_language_server_work::Work::LanguageServerWork( + proto::cancel_language_server_work::LanguageServerWork { + language_server_id: server_id.to_proto(), + token: token_to_cancel, + }, + ), + ), + }); + cx.background_executor() + .spawn(request) + .detach_and_log_err(cx); } } @@ -6868,47 +6977,6 @@ impl LspStore { } } - pub(crate) fn cancel_language_server_work( - &mut self, - server_id: LanguageServerId, - token_to_cancel: Option, - _cx: &mut ModelContext, - ) { - let Some(local) = self.as_local() else { - return; - }; - let status = self.language_server_statuses.get(&server_id); - let server = local.language_servers.get(&server_id); - if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) { - for (token, progress) in &status.pending_work { - if let Some(token_to_cancel) = token_to_cancel.as_ref() { - if token != token_to_cancel { - continue; - } - } - if progress.is_cancellable { - server - .notify::( - WorkDoneProgressCancelParams { - token: lsp::NumberOrString::String(token.clone()), - }, - ) - .ok(); - } - - if progress.is_cancellable { - server - .notify::( - WorkDoneProgressCancelParams { - token: lsp::NumberOrString::String(token.clone()), - }, - ) - .ok(); - } - } - } - } - pub fn wait_for_remote_buffer( &mut self, id: BufferId, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 5dce1fbda6..90fbc397f1 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -292,7 +292,9 @@ message Envelope { GetPathMetadataResponse get_path_metadata_response = 279; GetPanicFiles get_panic_files = 280; - GetPanicFilesResponse get_panic_files_response = 281; // current max + GetPanicFilesResponse get_panic_files_response = 281; + + CancelLanguageServerWork cancel_language_server_work = 282; // current max } reserved 87 to 88; @@ -1257,12 +1259,14 @@ message LspWorkStart { optional string title = 4; optional string message = 2; optional uint32 percentage = 3; + optional bool is_cancellable = 5; } message LspWorkProgress { string token = 1; optional string message = 2; optional uint32 percentage = 3; + optional bool is_cancellable = 4; } message LspWorkEnd { @@ -2500,3 +2504,21 @@ message GetPanicFiles { message GetPanicFilesResponse { repeated string file_contents = 2; } + +message CancelLanguageServerWork { + uint64 project_id = 1; + + oneof work { + Buffers buffers = 2; + LanguageServerWork language_server_work = 3; + } + + message Buffers { + repeated uint64 buffer_ids = 2; + } + + message LanguageServerWork { + uint64 language_server_id = 1; + optional string token = 2; + } +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 8ff10a6056..ca0403ed72 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -366,6 +366,7 @@ messages!( (GetPathMetadataResponse, Background), (GetPanicFiles, Background), (GetPanicFilesResponse, Background), + (CancelLanguageServerWork, Foreground), ); request_messages!( @@ -486,7 +487,8 @@ request_messages!( (ActivateToolchain, Ack), (ActiveToolchain, ActiveToolchainResponse), (GetPathMetadata, GetPathMetadataResponse), - (GetPanicFiles, GetPanicFilesResponse) + (GetPanicFiles, GetPanicFilesResponse), + (CancelLanguageServerWork, Ack), ); entity_messages!( @@ -570,6 +572,7 @@ entity_messages!( ActivateToolchain, ActiveToolchain, GetPathMetadata, + CancelLanguageServerWork, ); entity_messages!( diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index c7d3a3c97f..2554aa48d2 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -528,6 +528,172 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext }) } +#[gpui::test] +async fn test_remote_cancel_language_server_work( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; + + fs.insert_tree( + "/code/project1/.zed", + json!({ + "settings.json": r#" + { + "languages": {"Rust":{"language_servers":["rust-analyzer"]}}, + "lsp": { + "rust-analyzer": { + "binary": { + "path": "~/.cargo/bin/rust-analyzer" + } + } + } + }"# + }), + ) + .await; + + cx.update_model(&project, |project, _| { + project.languages().register_test_language(LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".into()], + ..Default::default() + }, + ..Default::default() + }); + project.languages().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + ..Default::default() + }, + ) + }); + + let mut fake_lsp = server_cx.update(|cx| { + headless.read(cx).languages.register_fake_language_server( + LanguageServerName("rust-analyzer".into()), + Default::default(), + None, + ) + }); + + cx.run_until_parked(); + + let worktree_id = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap() + .0 + .read_with(cx, |worktree, _| worktree.id()); + + cx.run_until_parked(); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let mut fake_lsp = fake_lsp.next().await.unwrap(); + + // Cancelling all language server work for a given buffer + { + // Two operations, one cancellable and one not. + fake_lsp + .start_progress_with( + "another-token", + lsp::WorkDoneProgressBegin { + cancellable: Some(false), + ..Default::default() + }, + ) + .await; + + let progress_token = "the-progress-token"; + fake_lsp + .start_progress_with( + progress_token, + lsp::WorkDoneProgressBegin { + cancellable: Some(true), + ..Default::default() + }, + ) + .await; + + cx.executor().run_until_parked(); + + project.update(cx, |project, cx| { + project.cancel_language_server_work_for_buffers([buffer.clone()], cx) + }); + + cx.executor().run_until_parked(); + + // Verify the cancellation was received on the server side + let cancel_notification = fake_lsp + .receive_notification::() + .await; + assert_eq!( + cancel_notification.token, + lsp::NumberOrString::String(progress_token.into()) + ); + } + + // Cancelling work by server_id and token + { + let server_id = fake_lsp.server.server_id(); + let progress_token = "the-progress-token"; + + fake_lsp + .start_progress_with( + progress_token, + lsp::WorkDoneProgressBegin { + cancellable: Some(true), + ..Default::default() + }, + ) + .await; + + cx.executor().run_until_parked(); + + project.update(cx, |project, cx| { + project.cancel_language_server_work(server_id, Some(progress_token.into()), cx) + }); + + cx.executor().run_until_parked(); + + // Verify the cancellation was received on the server side + let cancel_notification = fake_lsp + .receive_notification::() + .await; + assert_eq!( + cancel_notification.token, + lsp::NumberOrString::String(progress_token.into()) + ); + } +} + #[gpui::test] async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); From 0ba40bdfb8b6200583a42793fa159161c4e73089 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 30 Oct 2024 13:41:28 +0100 Subject: [PATCH 72/87] remote dev: Always upload binary in development mode (#19953) Release Notes: - N/A --- crates/remote/src/ssh_session.rs | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index ff6dfd7751..0f6c90de43 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1736,13 +1736,19 @@ impl SshRemoteConnection { } } - if self.is_binary_in_use(dst_path).await? { - log::info!("server binary is opened by another process. not updating"); - delegate.set_status( - Some("Skipping update of remote development server, since it's still in use"), - cx, - ); - return Ok(()); + if cfg!(not(debug_assertions)) { + // When we're not in dev mode, we don't want to switch out the binary if it's + // still open. + // In dev mode, that's fine, since we often kill Zed processes with Ctrl-C and want + // to still replace the binary. + if self.is_binary_in_use(dst_path).await? { + log::info!("server binary is opened by another process. not updating"); + delegate.set_status( + Some("Skipping update of remote development server, since it's still in use"), + cx, + ); + return Ok(()); + } } let upload_binary_over_ssh = self.socket.connection_options.upload_binary_over_ssh; From d49cd0019f4d2789c87eab651bc7bd2cbd8004e3 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 30 Oct 2024 14:49:47 +0200 Subject: [PATCH 73/87] Log prettier errors on failures (#19951) Closes https://github.com/zed-industries/zed/issues/11987 Release Notes: - Fixed prettier not reporting failures in the status panel on formatting and installation errors --- Cargo.lock | 1 + crates/activity_indicator/Cargo.toml | 1 + .../src/activity_indicator.rs | 22 ++- crates/prettier/src/prettier.rs | 6 +- crates/project/src/lsp_store.rs | 55 +++++--- crates/proto/src/error.rs | 14 +- crates/remote/src/ssh_session.rs | 132 ++++++++++-------- 7 files changed, 146 insertions(+), 85 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 56d538a883..266c1922bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16,6 +16,7 @@ dependencies = [ "project", "smallvec", "ui", + "util", "workspace", ] diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 9761a08238..b4fb2ec5b0 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -23,6 +23,7 @@ language.workspace = true project.workspace = true smallvec.workspace = true ui.workspace = true +util.workspace = true workspace.workspace = true [dev-dependencies] diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 8020e0665a..90410d534c 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -13,7 +13,8 @@ use language::{ use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId}; use smallvec::SmallVec; use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration}; -use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle}; +use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip}; +use util::truncate_and_trailoff; use workspace::{item::ItemHandle, StatusItemView, Workspace}; actions!(activity_indicator, [ShowErrorMessage]); @@ -446,6 +447,8 @@ impl ActivityIndicator { impl EventEmitter for ActivityIndicator {} +const MAX_MESSAGE_LEN: usize = 50; + impl Render for ActivityIndicator { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let result = h_flex() @@ -456,6 +459,7 @@ impl Render for ActivityIndicator { return result; }; let this = cx.view().downgrade(); + let truncate_content = content.message.len() > MAX_MESSAGE_LEN; result.gap_2().child( PopoverMenu::new("activity-indicator-popover") .trigger( @@ -464,7 +468,21 @@ impl Render for ActivityIndicator { .id("activity-indicator-status") .gap_2() .children(content.icon) - .child(Label::new(content.message).size(LabelSize::Small)) + .map(|button| { + if truncate_content { + button + .child( + Label::new(truncate_and_trailoff( + &content.message, + MAX_MESSAGE_LEN, + )) + .size(LabelSize::Small), + ) + .tooltip(move |cx| Tooltip::text(&content.message, cx)) + } else { + button.child(Label::new(content.message).size(LabelSize::Small)) + } + }) .when_some(content.on_click, |this, handler| { this.on_click(cx.listener(move |this, _, cx| { handler(this, cx); diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 4dc5bca40f..d7b13c9992 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -329,11 +329,7 @@ impl Prettier { })? .context("prettier params calculation")?; - let response = local - .server - .request::(params) - .await - .context("prettier format request")?; + let response = local.server.request::(params).await?; let diff_task = buffer.update(cx, |buffer, cx| buffer.diff(response.text, cx))?; Ok(diff_task.await) } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index e04577e551..fe4127d536 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -29,6 +29,7 @@ use gpui::{ Task, WeakModel, }; use http_client::HttpClient; +use itertools::Itertools as _; use language::{ language_settings::{ language_settings, FormatOnSave, Formatter, LanguageSettings, SelectedFormatter, @@ -144,7 +145,6 @@ pub struct LocalLspStore { HashMap)>, prettier_store: Model, current_lsp_settings: HashMap, - last_formatting_failure: Option, _subscription: gpui::Subscription, } @@ -563,9 +563,7 @@ impl LocalLspStore { })?; prettier_store::format_with_prettier(&prettier, &buffer.handle, cx) .await - .transpose() - .ok() - .flatten() + .transpose()? } Formatter::External { command, arguments } => { Self::format_via_external_command(buffer, command, arguments.as_deref(), cx) @@ -705,6 +703,7 @@ impl LspStoreMode { pub struct LspStore { mode: LspStoreMode, + last_formatting_failure: Option, downstream_client: Option<(AnyProtoClient, u64)>, nonce: u128, buffer_store: Model, @@ -907,7 +906,6 @@ impl LspStore { language_server_watcher_registrations: Default::default(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), buffers_being_formatted: Default::default(), - last_formatting_failure: None, prettier_store, environment, http_client, @@ -917,6 +915,7 @@ impl LspStore { this.as_local_mut().unwrap().shutdown_language_servers(cx) }), }), + last_formatting_failure: None, downstream_client: None, buffer_store, worktree_store, @@ -977,6 +976,7 @@ impl LspStore { upstream_project_id: project_id, }), downstream_client: None, + last_formatting_failure: None, buffer_store, worktree_store, languages: languages.clone(), @@ -5265,9 +5265,9 @@ impl LspStore { .map(language::proto::serialize_transaction), }) } + pub fn last_formatting_failure(&self) -> Option<&str> { - self.as_local() - .and_then(|local| local.last_formatting_failure.as_deref()) + self.last_formatting_failure.as_deref() } pub fn environment_for_buffer( @@ -5338,23 +5338,16 @@ impl LspStore { cx.clone(), ) .await; - lsp_store.update(&mut cx, |lsp_store, _| { - let local = lsp_store.as_local_mut().unwrap(); - match &result { - Ok(_) => local.last_formatting_failure = None, - Err(error) => { - local.last_formatting_failure.replace(error.to_string()); - } - } + lsp_store.update_last_formatting_failure(&result); })?; result }) } else if let Some((client, project_id)) = self.upstream_client() { let buffer_store = self.buffer_store(); - cx.spawn(move |_, mut cx| async move { - let response = client + cx.spawn(move |lsp_store, mut cx| async move { + let result = client .request(proto::FormatBuffers { project_id, trigger: trigger as i32, @@ -5365,13 +5358,21 @@ impl LspStore { }) .collect::>()?, }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; + .await + .and_then(|result| result.transaction.context("missing transaction")); + lsp_store.update(&mut cx, |lsp_store, _| { + lsp_store.update_last_formatting_failure(&result); + })?; + + let transaction_response = result?; buffer_store .update(&mut cx, |buffer_store, cx| { - buffer_store.deserialize_project_transaction(response, push_to_history, cx) + buffer_store.deserialize_project_transaction( + transaction_response, + push_to_history, + cx, + ) })? .await }) @@ -7366,6 +7367,18 @@ impl LspStore { lsp_action, }) } + + fn update_last_formatting_failure(&mut self, formatting_result: &anyhow::Result) { + match &formatting_result { + Ok(_) => self.last_formatting_failure = None, + Err(error) => { + let error_string = format!("{error:#}"); + log::error!("Formatting failed: {error_string}"); + self.last_formatting_failure + .replace(error_string.lines().join(" ")); + } + } + } } impl EventEmitter for LspStore {} diff --git a/crates/proto/src/error.rs b/crates/proto/src/error.rs index 8a87d6fdc9..680056fc1c 100644 --- a/crates/proto/src/error.rs +++ b/crates/proto/src/error.rs @@ -104,7 +104,19 @@ impl ErrorExt for anyhow::Error { if let Some(rpc_error) = self.downcast_ref::() { rpc_error.to_proto() } else { - ErrorCode::Internal.message(format!("{}", self)).to_proto() + ErrorCode::Internal + .message( + format!("{self:#}") + .lines() + .fold(String::new(), |mut message, line| { + if !message.is_empty() { + message.push(' '); + } + message.push_str(line); + message + }), + ) + .to_proto() } } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 0f6c90de43..a69f0330ff 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -2017,77 +2017,97 @@ impl ChannelClient { mut incoming_rx: mpsc::UnboundedReceiver, cx: &AsyncAppContext, ) -> Task> { - cx.spawn(|cx| { - async move { - let peer_id = PeerId { owner_id: 0, id: 0 }; - while let Some(incoming) = incoming_rx.next().await { - let Some(this) = this.upgrade() else { - return anyhow::Ok(()); - }; - if let Some(ack_id) = incoming.ack_id { - let mut buffer = this.buffer.lock(); - while buffer.front().is_some_and(|msg| msg.id <= ack_id) { - buffer.pop_front(); + cx.spawn(|cx| async move { + let peer_id = PeerId { owner_id: 0, id: 0 }; + while let Some(incoming) = incoming_rx.next().await { + let Some(this) = this.upgrade() else { + return anyhow::Ok(()); + }; + if let Some(ack_id) = incoming.ack_id { + let mut buffer = this.buffer.lock(); + while buffer.front().is_some_and(|msg| msg.id <= ack_id) { + buffer.pop_front(); + } + } + if let Some(proto::envelope::Payload::FlushBufferedMessages(_)) = &incoming.payload + { + log::debug!( + "{}:ssh message received. name:FlushBufferedMessages", + this.name + ); + { + let buffer = this.buffer.lock(); + for envelope in buffer.iter() { + this.outgoing_tx + .lock() + .unbounded_send(envelope.clone()) + .ok(); } } - if let Some(proto::envelope::Payload::FlushBufferedMessages(_)) = - &incoming.payload - { - log::debug!("{}:ssh message received. name:FlushBufferedMessages", this.name); - { - let buffer = this.buffer.lock(); - for envelope in buffer.iter() { - this.outgoing_tx.lock().unbounded_send(envelope.clone()).ok(); - } + let mut envelope = proto::Ack {}.into_envelope(0, Some(incoming.id), None); + envelope.id = this.next_message_id.fetch_add(1, SeqCst); + this.outgoing_tx.lock().unbounded_send(envelope).ok(); + continue; + } + + this.max_received.store(incoming.id, SeqCst); + + if let Some(request_id) = incoming.responding_to { + let request_id = MessageId(request_id); + let sender = this.response_channels.lock().remove(&request_id); + if let Some(sender) = sender { + let (tx, rx) = oneshot::channel(); + if incoming.payload.is_some() { + sender.send((incoming, tx)).ok(); } - let mut envelope = proto::Ack{}.into_envelope(0, Some(incoming.id), None); - envelope.id = this.next_message_id.fetch_add(1, SeqCst); - this.outgoing_tx.lock().unbounded_send(envelope).ok(); - continue; + rx.await.ok(); } - - this.max_received.store(incoming.id, SeqCst); - - if let Some(request_id) = incoming.responding_to { - let request_id = MessageId(request_id); - let sender = this.response_channels.lock().remove(&request_id); - if let Some(sender) = sender { - let (tx, rx) = oneshot::channel(); - if incoming.payload.is_some() { - sender.send((incoming, tx)).ok(); - } - rx.await.ok(); - } - } else if let Some(envelope) = - build_typed_envelope(peer_id, Instant::now(), incoming) - { - let type_name = envelope.payload_type_name(); - if let Some(future) = ProtoMessageHandlerSet::handle_message( - &this.message_handlers, - envelope, - this.clone().into(), - cx.clone(), - ) { - log::debug!("{}:ssh message received. name:{type_name}", this.name); - cx.foreground_executor().spawn(async move { + } else if let Some(envelope) = + build_typed_envelope(peer_id, Instant::now(), incoming) + { + let type_name = envelope.payload_type_name(); + if let Some(future) = ProtoMessageHandlerSet::handle_message( + &this.message_handlers, + envelope, + this.clone().into(), + cx.clone(), + ) { + log::debug!("{}:ssh message received. name:{type_name}", this.name); + cx.foreground_executor() + .spawn(async move { match future.await { Ok(_) => { - log::debug!("{}:ssh message handled. name:{type_name}", this.name); + log::debug!( + "{}:ssh message handled. name:{type_name}", + this.name + ); } Err(error) => { log::error!( - "{}:error handling message. type:{type_name}, error:{error}", this.name, + "{}:error handling message. type:{}, error:{}", + this.name, + type_name, + format!("{error:#}").lines().fold( + String::new(), + |mut message, line| { + if !message.is_empty() { + message.push(' '); + } + message.push_str(line); + message + } + ) ); } } - }).detach() - } else { - log::error!("{}:unhandled ssh message name:{type_name}", this.name); - } + }) + .detach() + } else { + log::error!("{}:unhandled ssh message name:{type_name}", this.name); } } - anyhow::Ok(()) } + anyhow::Ok(()) }) } From 83e2889d63b5df4b390d9fff66641f2c500cbc8a Mon Sep 17 00:00:00 2001 From: Lukas Geiger Date: Wed, 30 Oct 2024 14:12:32 +0000 Subject: [PATCH 74/87] Fix notebook cell-height when soft-wrapping lines (#19933) --- crates/repl/src/notebook/cell.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index bb6b6fbf38..f86f969a96 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -553,9 +553,6 @@ impl RunnableCell for CodeCell { impl Render for CodeCell { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let lines = self.source.lines().count(); - let height = lines as f32 * cx.line_height(); - v_flex() .size_full() // TODO: Move base cell render into trait impl so we don't have to repeat this @@ -582,7 +579,7 @@ impl Render for CodeCell { .border_1() .border_color(cx.theme().colors().border) .bg(cx.theme().colors().editor_background) - .child(div().h(height).w_full().child(self.editor.clone())), + .child(div().w_full().child(self.editor.clone())), ), ), ) From c8003c0697df7cc2352f912a0df27b2757fffdbf Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 30 Oct 2024 15:21:51 +0100 Subject: [PATCH 75/87] Take a mutable context when resolving selections (#19948) This is a behavior-preserving change, but lays the groundwork for expanding selections when the cursor lands inside of a "replace" block. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 67 ++--- crates/assistant/src/inline_assistant.rs | 31 ++- crates/collab/src/tests/following_tests.rs | 7 +- crates/diagnostics/src/items.rs | 9 +- crates/editor/src/editor.rs | 19 +- crates/editor/src/element.rs | 248 +++++++++--------- crates/editor/src/linked_editing_ranges.rs | 2 +- crates/editor/src/selections_collection.rs | 37 ++- crates/go_to_line/src/cursor_position.rs | 50 ++-- crates/go_to_line/src/go_to_line.rs | 4 +- crates/language_tools/src/syntax_tree_view.rs | 14 +- .../src/markdown_preview_view.rs | 4 +- crates/outline/src/outline.rs | 8 +- crates/outline_panel/src/outline_panel.rs | 8 +- crates/repl/src/repl_editor.rs | 20 +- crates/vim/src/state.rs | 2 +- crates/vim/src/vim.rs | 15 +- 17 files changed, 288 insertions(+), 257 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index f0b5a5d442..19d92bbc02 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -1677,8 +1677,10 @@ impl ContextEditor { }); } - fn cursors(&self, cx: &AppContext) -> Vec { - let selections = self.editor.read(cx).selections.all::(cx); + fn cursors(&self, cx: &mut WindowContext) -> Vec { + let selections = self + .editor + .update(cx, |editor, cx| editor.selections.all::(cx)); selections .into_iter() .map(|selection| selection.head()) @@ -2385,7 +2387,9 @@ impl ContextEditor { } fn update_active_patch(&mut self, cx: &mut ViewContext) { - let newest_cursor = self.editor.read(cx).selections.newest::(cx).head(); + let newest_cursor = self.editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); let context = self.context.read(cx); let new_patch = context.patch_containing(newest_cursor, cx).cloned(); @@ -2792,39 +2796,40 @@ impl ContextEditor { ) -> Option<(String, bool)> { const CODE_FENCE_DELIMITER: &'static str = "```"; - let context_editor = context_editor_view.read(cx).editor.read(cx); + let context_editor = context_editor_view.read(cx).editor.clone(); + context_editor.update(cx, |context_editor, cx| { + if context_editor.selections.newest::(cx).is_empty() { + let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let (_, _, snapshot) = snapshot.as_singleton()?; - if context_editor.selections.newest::(cx).is_empty() { - let snapshot = context_editor.buffer().read(cx).snapshot(cx); - let (_, _, snapshot) = snapshot.as_singleton()?; + let head = context_editor.selections.newest::(cx).head(); + let offset = snapshot.point_to_offset(head); - let head = context_editor.selections.newest::(cx).head(); - let offset = snapshot.point_to_offset(head); + let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; + let mut text = snapshot + .text_for_range(surrounding_code_block_range) + .collect::(); - let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; - let mut text = snapshot - .text_for_range(surrounding_code_block_range) - .collect::(); + // If there is no newline trailing the closing three-backticks, then + // tree-sitter-md extends the range of the content node to include + // the backticks. + if text.ends_with(CODE_FENCE_DELIMITER) { + text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + } - // If there is no newline trailing the closing three-backticks, then - // tree-sitter-md extends the range of the content node to include - // the backticks. - if text.ends_with(CODE_FENCE_DELIMITER) { - text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + (!text.is_empty()).then_some((text, true)) + } else { + let anchor = context_editor.selections.newest_anchor(); + let text = context_editor + .buffer() + .read(cx) + .read(cx) + .text_for_range(anchor.range()) + .collect::(); + + (!text.is_empty()).then_some((text, false)) } - - (!text.is_empty()).then_some((text, true)) - } else { - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); - - (!text.is_empty()).then_some((text, false)) - } + }) } fn insert_selection( diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 4c79662cf1..fdf00c8b04 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -189,11 +189,16 @@ impl InlineAssistant { initial_prompt: Option, cx: &mut WindowContext, ) { - let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); + let (snapshot, initial_selections) = editor.update(cx, |editor, cx| { + ( + editor.buffer().read(cx).snapshot(cx), + editor.selections.all::(cx), + ) + }); let mut selections = Vec::>::new(); let mut newest_selection = None; - for mut selection in editor.read(cx).selections.all::(cx) { + for mut selection in initial_selections { if selection.end > selection.start { selection.start.column = 0; // If the selection ends at the start of the line, we don't want to include it. @@ -566,10 +571,13 @@ impl InlineAssistant { return; }; - let editor = editor.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); + if editor.read(cx).selections.count() == 1 { + let (selection, buffer) = editor.update(cx, |editor, cx| { + ( + editor.selections.newest::(cx), + editor.buffer().read(cx).snapshot(cx), + ) + }); for assist_id in &editor_assists.assist_ids { let assist = &self.assists[assist_id]; let assist_range = assist.range.to_offset(&buffer); @@ -594,10 +602,13 @@ impl InlineAssistant { return; }; - let editor = editor.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); + if editor.read(cx).selections.count() == 1 { + let (selection, buffer) = editor.update(cx, |editor, cx| { + ( + editor.selections.newest::(cx), + editor.buffer().read(cx).snapshot(cx), + ) + }); let mut closest_assist_fallback = None; for assist_id in &editor_assists.assist_ids { let assist = &self.assists[assist_id]; diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 1367bf49c0..d708194f58 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1957,9 +1957,10 @@ async fn test_following_to_channel_notes_without_a_shared_project( }); channel_notes_1_b.update(cx_b, |notes, cx| { assert_eq!(notes.channel(cx).unwrap().name, "channel-1"); - let editor = notes.editor.read(cx); - assert_eq!(editor.text(cx), "Hello from A."); - assert_eq!(editor.selections.ranges::(cx), &[3..4]); + notes.editor.update(cx, |editor, cx| { + assert_eq!(editor.text(cx), "Hello from A."); + assert_eq!(editor.selections.ranges::(cx), &[3..4]); + }) }); // Client A opens the notes for channel 2. diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 72a4ac9bcf..2c580c44de 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -136,11 +136,12 @@ impl DiagnosticIndicator { } fn update(&mut self, editor: View, cx: &mut ViewContext) { - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx); - let cursor_position = editor.selections.newest::(cx).head(); + let (buffer, cursor_position) = editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let cursor_position = editor.selections.newest::(cx).head(); + (buffer, cursor_position) + }); let new_diagnostic = buffer - .snapshot(cx) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position, false) .filter(|entry| !entry.range.is_empty()) .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 50d367d730..2e88df6b92 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9629,8 +9629,8 @@ impl Editor { let Some(provider) = self.semantics_provider.clone() else { return Task::ready(Ok(Navigated::No)); }; - let buffer = self.buffer.read(cx); let head = self.selections.newest::(cx).head(); + let buffer = self.buffer.read(cx); let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) { text_anchor } else { @@ -9937,8 +9937,8 @@ impl Editor { _: &FindAllReferences, cx: &mut ViewContext, ) -> Option>> { - let multi_buffer = self.buffer.read(cx); let selection = self.selections.newest::(cx); + let multi_buffer = self.buffer.read(cx); let head = selection.head(); let multi_buffer_snapshot = multi_buffer.snapshot(cx); @@ -10345,8 +10345,9 @@ impl Editor { self.show_local_selections = true; if moving_cursor { - let rename_editor = rename.editor.read(cx); - let cursor_in_rename_editor = rename_editor.selections.newest::(cx).head(); + let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); // Update the selection to match the position of the selection inside // the rename editor. @@ -11592,9 +11593,9 @@ impl Editor { } pub fn copy_file_location(&mut self, _: &CopyFileLocation, cx: &mut ViewContext) { + let selection = self.selections.newest::(cx).start.row + 1; if let Some(file) = self.target_file(cx) { if let Some(path) = file.path().to_str() { - let selection = self.selections.newest::(cx).start.row + 1; cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); } } @@ -12370,9 +12371,10 @@ impl Editor { return; }; + let selections = self.selections.all::(cx); let buffer = self.buffer.read(cx); let mut new_selections_by_buffer = HashMap::default(); - for selection in self.selections.all::(cx) { + for selection in selections { for (buffer, range, _) in buffer.range_to_buffer_ranges(selection.start..selection.end, cx) { @@ -12417,6 +12419,7 @@ impl Editor { } fn open_excerpts_common(&mut self, split: bool, cx: &mut ViewContext) { + let selections = self.selections.all::(cx); let buffer = self.buffer.read(cx); if buffer.is_singleton() { cx.propagate(); @@ -12429,7 +12432,7 @@ impl Editor { }; let mut new_selections_by_buffer = HashMap::default(); - for selection in self.selections.all::(cx) { + for selection in selections { for (mut buffer_handle, mut range, _) in buffer.range_to_buffer_ranges(selection.range(), cx) { @@ -12545,7 +12548,7 @@ impl Editor { fn selection_replacement_ranges( &self, range: Range, - cx: &AppContext, + cx: &mut AppContext, ) -> Vec> { let selections = self.selections.all::(cx); let newest_selection = selections diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 489fe4c5ed..ac4d5d2340 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -824,129 +824,131 @@ impl EditorElement { let mut selections: Vec<(PlayerColor, Vec)> = Vec::new(); let mut active_rows = BTreeMap::new(); let mut newest_selection_head = None; - let editor = self.editor.read(cx); + self.editor.update(cx, |editor, cx| { + if editor.show_local_selections { + let mut local_selections: Vec> = editor + .selections + .disjoint_in_range(start_anchor..end_anchor, cx); + local_selections.extend(editor.selections.pending(cx)); + let mut layouts = Vec::new(); + let newest = editor.selections.newest(cx); + for selection in local_selections.drain(..) { + let is_empty = selection.start == selection.end; + let is_newest = selection == newest; - if editor.show_local_selections { - let mut local_selections: Vec> = editor - .selections - .disjoint_in_range(start_anchor..end_anchor, cx); - local_selections.extend(editor.selections.pending(cx)); - let mut layouts = Vec::new(); - let newest = editor.selections.newest(cx); - for selection in local_selections.drain(..) { - let is_empty = selection.start == selection.end; - let is_newest = selection == newest; + let layout = SelectionLayout::new( + selection, + editor.selections.line_mode, + editor.cursor_shape, + &snapshot.display_snapshot, + is_newest, + editor.leader_peer_id.is_none(), + None, + ); + if is_newest { + newest_selection_head = Some(layout.head); + } - let layout = SelectionLayout::new( - selection, - editor.selections.line_mode, - editor.cursor_shape, - &snapshot.display_snapshot, - is_newest, - editor.leader_peer_id.is_none(), - None, - ); - if is_newest { - newest_selection_head = Some(layout.head); + for row in cmp::max(layout.active_rows.start.0, start_row.0) + ..=cmp::min(layout.active_rows.end.0, end_row.0) + { + let contains_non_empty_selection = + active_rows.entry(DisplayRow(row)).or_insert(!is_empty); + *contains_non_empty_selection |= !is_empty; + } + layouts.push(layout); } - for row in cmp::max(layout.active_rows.start.0, start_row.0) - ..=cmp::min(layout.active_rows.end.0, end_row.0) - { - let contains_non_empty_selection = - active_rows.entry(DisplayRow(row)).or_insert(!is_empty); - *contains_non_empty_selection |= !is_empty; - } - layouts.push(layout); + let player = if editor.read_only(cx) { + cx.theme().players().read_only() + } else { + self.style.local_player + }; + + selections.push((player, layouts)); } - let player = if editor.read_only(cx) { - cx.theme().players().read_only() - } else { - self.style.local_player - }; - - selections.push((player, layouts)); - } - - if let Some(collaboration_hub) = &editor.collaboration_hub { - // When following someone, render the local selections in their color. - if let Some(leader_id) = editor.leader_peer_id { - if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id) { - if let Some(participant_index) = collaboration_hub - .user_participant_indices(cx) - .get(&collaborator.user_id) + if let Some(collaboration_hub) = &editor.collaboration_hub { + // When following someone, render the local selections in their color. + if let Some(leader_id) = editor.leader_peer_id { + if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id) { - if let Some((local_selection_style, _)) = selections.first_mut() { - *local_selection_style = cx - .theme() - .players() - .color_for_participant(participant_index.0); + if let Some(participant_index) = collaboration_hub + .user_participant_indices(cx) + .get(&collaborator.user_id) + { + if let Some((local_selection_style, _)) = selections.first_mut() { + *local_selection_style = cx + .theme() + .players() + .color_for_participant(participant_index.0); + } } } } - } - let mut remote_selections = HashMap::default(); - for selection in snapshot.remote_selections_in_range( - &(start_anchor..end_anchor), - collaboration_hub.as_ref(), - cx, - ) { - let selection_style = Self::get_participant_color(selection.participant_index, cx); + let mut remote_selections = HashMap::default(); + for selection in snapshot.remote_selections_in_range( + &(start_anchor..end_anchor), + collaboration_hub.as_ref(), + cx, + ) { + let selection_style = + Self::get_participant_color(selection.participant_index, cx); - // Don't re-render the leader's selections, since the local selections - // match theirs. - if Some(selection.peer_id) == editor.leader_peer_id { - continue; + // Don't re-render the leader's selections, since the local selections + // match theirs. + if Some(selection.peer_id) == editor.leader_peer_id { + continue; + } + let key = HoveredCursor { + replica_id: selection.replica_id, + selection_id: selection.selection.id, + }; + + let is_shown = + editor.show_cursor_names || editor.hovered_cursors.contains_key(&key); + + remote_selections + .entry(selection.replica_id) + .or_insert((selection_style, Vec::new())) + .1 + .push(SelectionLayout::new( + selection.selection, + selection.line_mode, + selection.cursor_shape, + &snapshot.display_snapshot, + false, + false, + if is_shown { selection.user_name } else { None }, + )); } - let key = HoveredCursor { - replica_id: selection.replica_id, - selection_id: selection.selection.id, + + selections.extend(remote_selections.into_values()); + } else if !editor.is_focused(cx) && editor.show_cursor_when_unfocused { + let player = if editor.read_only(cx) { + cx.theme().players().read_only() + } else { + self.style.local_player }; - - let is_shown = - editor.show_cursor_names || editor.hovered_cursors.contains_key(&key); - - remote_selections - .entry(selection.replica_id) - .or_insert((selection_style, Vec::new())) - .1 - .push(SelectionLayout::new( - selection.selection, - selection.line_mode, - selection.cursor_shape, - &snapshot.display_snapshot, - false, - false, - if is_shown { selection.user_name } else { None }, - )); + let layouts = snapshot + .buffer_snapshot + .selections_in_range(&(start_anchor..end_anchor), true) + .map(move |(_, line_mode, cursor_shape, selection)| { + SelectionLayout::new( + selection, + line_mode, + cursor_shape, + &snapshot.display_snapshot, + false, + false, + None, + ) + }) + .collect::>(); + selections.push((player, layouts)); } - - selections.extend(remote_selections.into_values()); - } else if !editor.is_focused(cx) && editor.show_cursor_when_unfocused { - let player = if editor.read_only(cx) { - cx.theme().players().read_only() - } else { - self.style.local_player - }; - let layouts = snapshot - .buffer_snapshot - .selections_in_range(&(start_anchor..end_anchor), true) - .map(move |(_, line_mode, cursor_shape, selection)| { - SelectionLayout::new( - selection, - line_mode, - cursor_shape, - &snapshot.display_snapshot, - false, - false, - None, - ) - }) - .collect::>(); - selections.push((player, layouts)); - } + }); (selections, active_rows, newest_selection_head) } @@ -1848,23 +1850,25 @@ impl EditorElement { return Vec::new(); } - let editor = self.editor.read(cx); - let newest_selection_head = newest_selection_head.unwrap_or_else(|| { - let newest = editor.selections.newest::(cx); - SelectionLayout::new( - newest, - editor.selections.line_mode, - editor.cursor_shape, - &snapshot.display_snapshot, - true, - true, - None, - ) - .head + let (newest_selection_head, is_relative) = self.editor.update(cx, |editor, cx| { + let newest_selection_head = newest_selection_head.unwrap_or_else(|| { + let newest = editor.selections.newest::(cx); + SelectionLayout::new( + newest, + editor.selections.line_mode, + editor.cursor_shape, + &snapshot.display_snapshot, + true, + true, + None, + ) + .head + }); + let is_relative = editor.should_use_relative_line_numbers(cx); + (newest_selection_head, is_relative) }); let font_size = self.style.text.font_size.to_pixels(cx.rem_size()); - let is_relative = editor.should_use_relative_line_numbers(cx); let relative_to = if is_relative { Some(newest_selection_head.row()) } else { diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index d3e4002173..853f014ddb 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -41,9 +41,9 @@ pub(super) fn refresh_linked_ranges(this: &mut Editor, cx: &mut ViewContext(cx); let buffer = this.buffer.read(cx); let mut applicable_selections = vec![]; - let selections = this.selections.all::(cx); let snapshot = buffer.snapshot(cx); for selection in selections { let cursor_position = selection.head(); diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index c85e60fdaa..8e1c12b8cd 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -8,14 +8,14 @@ use std::{ use collections::HashMap; use gpui::{AppContext, Model, Pixels}; use itertools::Itertools; -use language::{Bias, Point, Selection, SelectionGoal, TextDimension, ToPoint}; +use language::{Bias, Point, Selection, SelectionGoal, TextDimension}; use util::post_inc; use crate::{ display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint}, movement::TextLayoutDetails, Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode, - ToOffset, + ToOffset, ToPoint, }; #[derive(Debug, Clone)] @@ -96,7 +96,7 @@ impl SelectionsCollection { pub fn pending>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Option> { self.pending_anchor() .as_ref() @@ -107,7 +107,7 @@ impl SelectionsCollection { self.pending.as_ref().map(|pending| pending.mode.clone()) } - pub fn all<'a, D>(&self, cx: &AppContext) -> Vec> + pub fn all<'a, D>(&self, cx: &mut AppContext) -> Vec> where D: 'a + TextDimension + Ord + Sub, { @@ -194,7 +194,7 @@ impl SelectionsCollection { pub fn disjoint_in_range<'a, D>( &self, range: Range, - cx: &AppContext, + cx: &mut AppContext, ) -> Vec> where D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, @@ -239,9 +239,10 @@ impl SelectionsCollection { pub fn newest>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { - resolve(self.newest_anchor(), &self.buffer(cx)) + let buffer = self.buffer(cx); + self.newest_anchor().map(|p| p.summary::(&buffer)) } pub fn newest_display(&self, cx: &mut AppContext) -> Selection { @@ -262,9 +263,10 @@ impl SelectionsCollection { pub fn oldest>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { - resolve(self.oldest_anchor(), &self.buffer(cx)) + let buffer = self.buffer(cx); + self.oldest_anchor().map(|p| p.summary::(&buffer)) } pub fn first_anchor(&self) -> Selection { @@ -276,14 +278,14 @@ impl SelectionsCollection { pub fn first>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { self.all(cx).first().unwrap().clone() } pub fn last>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { self.all(cx).last().unwrap().clone() } @@ -298,7 +300,7 @@ impl SelectionsCollection { #[cfg(any(test, feature = "test-support"))] pub fn ranges + std::fmt::Debug>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Vec> { self.all::(cx) .iter() @@ -475,7 +477,7 @@ impl<'a> MutableSelectionsCollection<'a> { where T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub + std::marker::Copy, { - let mut selections = self.all(self.cx); + let mut selections = self.collection.all(self.cx); let mut start = range.start.to_offset(&self.buffer()); let mut end = range.end.to_offset(&self.buffer()); let reversed = if start > end { @@ -649,6 +651,7 @@ impl<'a> MutableSelectionsCollection<'a> { let mut changed = false; let display_map = self.display_map(); let selections = self + .collection .all::(self.cx) .into_iter() .map(|selection| { @@ -676,6 +679,7 @@ impl<'a> MutableSelectionsCollection<'a> { let mut changed = false; let snapshot = self.buffer().clone(); let selections = self + .collection .all::(self.cx) .into_iter() .map(|selection| { @@ -869,10 +873,3 @@ where goal: s.goal, }) } - -fn resolve>( - selection: &Selection, - buffer: &MultiBufferSnapshot, -) -> Selection { - selection.map(|p| p.summary::(buffer)) -} diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 63e0f2b079..80be035770 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -37,34 +37,34 @@ impl CursorPosition { } fn update_position(&mut self, editor: View, cx: &mut ViewContext) { - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); + editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); - self.selected_count = Default::default(); - self.selected_count.selections = editor.selections.count(); - let mut last_selection: Option> = None; - for selection in editor.selections.all::(cx) { - self.selected_count.characters += buffer - .text_for_range(selection.start..selection.end) - .map(|t| t.chars().count()) - .sum::(); - if last_selection - .as_ref() - .map_or(true, |last_selection| selection.id > last_selection.id) - { - last_selection = Some(selection); - } - } - for selection in editor.selections.all::(cx) { - if selection.end != selection.start { - self.selected_count.lines += (selection.end.row - selection.start.row) as usize; - if selection.end.column != 0 { - self.selected_count.lines += 1; + self.selected_count = Default::default(); + self.selected_count.selections = editor.selections.count(); + let mut last_selection: Option> = None; + for selection in editor.selections.all::(cx) { + self.selected_count.characters += buffer + .text_for_range(selection.start..selection.end) + .map(|t| t.chars().count()) + .sum::(); + if last_selection + .as_ref() + .map_or(true, |last_selection| selection.id > last_selection.id) + { + last_selection = Some(selection); } } - } - self.position = last_selection.map(|s| s.head().to_point(&buffer)); - + for selection in editor.selections.all::(cx) { + if selection.end != selection.start { + self.selected_count.lines += (selection.end.row - selection.start.row) as usize; + if selection.end.column != 0 { + self.selected_count.lines += 1; + } + } + } + self.position = last_selection.map(|s| s.head().to_point(&buffer)); + }); cx.notify(); } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 0e9482b759..805c1f0d52 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -56,8 +56,8 @@ impl GoToLine { } pub fn new(active_editor: View, cx: &mut ViewContext) -> Self { - let editor = active_editor.read(cx); - let cursor = editor.selections.last::(cx).head(); + let cursor = + active_editor.update(cx, |editor, cx| editor.selections.last::(cx).head()); let line = cursor.row + 1; let column = cursor.column + 1; diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index e2c4903e19..b9c960c9c3 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -128,12 +128,14 @@ impl SyntaxTreeView { fn editor_updated(&mut self, did_reparse: bool, cx: &mut ViewContext) -> Option<()> { // Find which excerpt the cursor is in, and the position within that excerpted buffer. let editor_state = self.editor.as_mut()?; - let editor = &editor_state.editor.read(cx); - let selection_range = editor.selections.last::(cx).range(); - let multibuffer = editor.buffer().read(cx); - let (buffer, range, excerpt_id) = multibuffer - .range_to_buffer_ranges(selection_range, cx) - .pop()?; + let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { + let selection_range = editor.selections.last::(cx).range(); + editor + .buffer() + .read(cx) + .range_to_buffer_ranges(selection_range, cx) + .pop() + })?; // If the cursor has moved into a different excerpt, retrieve a new syntax layer // from that buffer. diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 81145afa3f..7e8cc42dcf 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -301,8 +301,8 @@ impl MarkdownPreviewView { this.parse_markdown_from_active_editor(true, cx); } EditorEvent::SelectionsChanged { .. } => { - let editor = editor.read(cx); - let selection_range = editor.selections.last::(cx).range(); + let selection_range = + editor.update(cx, |editor, cx| editor.selections.last::(cx).range()); this.selected_block = this.get_block_index_under_cursor(selection_range); this.list_state.scroll_to_reveal_item(this.selected_block); cx.notify(); diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 1d82d06ad8..18965fe048 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -194,9 +194,11 @@ impl PickerDelegate for OutlineViewDelegate { }) .collect(); - let editor = self.active_editor.read(cx); - let cursor_offset = editor.selections.newest::(cx).head(); - let buffer = editor.buffer().read(cx).snapshot(cx); + let (buffer, cursor_offset) = self.active_editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let cursor_offset = editor.selections.newest::(cx).head(); + (buffer, cursor_offset) + }); selected_index = self .outline .items diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 83eb7347ce..10ca2b0712 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -2410,11 +2410,9 @@ impl OutlinePanel { editor: &View, cx: &mut ViewContext, ) -> Option { - let selection = editor - .read(cx) - .selections - .newest::(cx) - .head(); + let selection = editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(cx)); let multi_buffer = editor.read(cx).buffer(); let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 6c86257f30..e07958d0e4 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; use editor::Editor; -use gpui::{prelude::*, AppContext, Entity, View, WeakView, WindowContext}; +use gpui::{prelude::*, Entity, View, WeakView, WindowContext}; use language::{BufferSnapshot, Language, LanguageName, Point}; use crate::repl_store::ReplStore; @@ -103,7 +103,7 @@ pub enum SessionSupport { Unsupported, } -pub fn session(editor: WeakView, cx: &mut AppContext) -> SessionSupport { +pub fn session(editor: WeakView, cx: &mut WindowContext) -> SessionSupport { let store = ReplStore::global(cx); let entity_id = editor.entity_id(); @@ -311,17 +311,21 @@ fn language_supported(language: &Arc) -> bool { } } -fn get_language(editor: WeakView, cx: &mut AppContext) -> Option> { - let editor = editor.upgrade()?; - let selection = editor.read(cx).selections.newest::(cx); - let buffer = editor.read(cx).buffer().read(cx).snapshot(cx); - buffer.language_at(selection.head()).cloned() +fn get_language(editor: WeakView, cx: &mut WindowContext) -> Option> { + editor + .update(cx, |editor, cx| { + let selection = editor.selections.newest::(cx); + let buffer = editor.buffer().read(cx).snapshot(cx); + buffer.language_at(selection.head()).cloned() + }) + .ok() + .flatten() } #[cfg(test)] mod tests { use super::*; - use gpui::Context; + use gpui::{AppContext, Context}; use indoc::indoc; use language::{Buffer, Language, LanguageConfig, LanguageRegistry}; diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index b61cb405e1..f9dfcdd2c3 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -281,7 +281,7 @@ impl VimGlobals { &mut self, register: Option, editor: Option<&mut Editor>, - cx: &ViewContext, + cx: &mut ViewContext, ) -> Option { let Some(register) = register.filter(|reg| *reg != '"') else { let setting = VimSettings::get_global(cx).use_system_clipboard; diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 86a52aca25..6ec708d8b8 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -620,9 +620,11 @@ impl Vim { let Some(editor) = self.editor() else { return; }; + let newest_selection_empty = editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).is_empty() + }); let editor = editor.read(cx); let editor_mode = editor.mode(); - let newest_selection_empty = editor.selections.newest::(cx).is_empty(); if editor_mode == EditorMode::Full && !newest_selection_empty @@ -717,11 +719,12 @@ impl Vim { globals.recorded_count = None; let selections = self.editor().map(|editor| { - let editor = editor.read(cx); - ( - editor.selections.oldest::(cx), - editor.selections.newest::(cx), - ) + editor.update(cx, |editor, cx| { + ( + editor.selections.oldest::(cx), + editor.selections.newest::(cx), + ) + }) }); if let Some((oldest, newest)) = selections { From 5dee43b05cd83d6746b8c8dee5ef8d455190dbf6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 30 Oct 2024 11:00:06 -0400 Subject: [PATCH 76/87] dart: Extract to zed-extensions/dart repository (#19959) This PR extracts the Dart extension to the [zed-extensions/dart](https://github.com/zed-extensions/dart) repository. Release Notes: - N/A --- Cargo.lock | 7 - Cargo.toml | 1 - docs/src/languages/dart.md | 2 +- extensions/dart/Cargo.toml | 16 -- extensions/dart/LICENSE-APACHE | 1 - extensions/dart/README.md | 6 - extensions/dart/extension.toml | 16 -- extensions/dart/languages/dart/brackets.scm | 6 - extensions/dart/languages/dart/config.toml | 15 - extensions/dart/languages/dart/highlights.scm | 269 ------------------ extensions/dart/languages/dart/indents.scm | 3 - extensions/dart/languages/dart/outline.scm | 18 -- extensions/dart/languages/dart/runnables.scm | 45 --- extensions/dart/languages/dart/tasks.json | 26 -- extensions/dart/src/dart.rs | 162 ----------- 15 files changed, 1 insertion(+), 592 deletions(-) delete mode 100644 extensions/dart/Cargo.toml delete mode 120000 extensions/dart/LICENSE-APACHE delete mode 100644 extensions/dart/README.md delete mode 100644 extensions/dart/extension.toml delete mode 100644 extensions/dart/languages/dart/brackets.scm delete mode 100644 extensions/dart/languages/dart/config.toml delete mode 100644 extensions/dart/languages/dart/highlights.scm delete mode 100644 extensions/dart/languages/dart/indents.scm delete mode 100644 extensions/dart/languages/dart/outline.scm delete mode 100644 extensions/dart/languages/dart/runnables.scm delete mode 100644 extensions/dart/languages/dart/tasks.json delete mode 100644 extensions/dart/src/dart.rs diff --git a/Cargo.lock b/Cargo.lock index 266c1922bb..039016a50a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15173,13 +15173,6 @@ dependencies = [ "zed_extension_api 0.1.0", ] -[[package]] -name = "zed_dart" -version = "0.1.2" -dependencies = [ - "zed_extension_api 0.1.0", -] - [[package]] name = "zed_deno" version = "0.0.2" diff --git a/Cargo.toml b/Cargo.toml index d67f78dc2b..fda3254cc5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -138,7 +138,6 @@ members = [ "extensions/astro", "extensions/clojure", "extensions/csharp", - "extensions/dart", "extensions/deno", "extensions/elixir", "extensions/elm", diff --git a/docs/src/languages/dart.md b/docs/src/languages/dart.md index 6571166b6d..32f312e5dd 100644 --- a/docs/src/languages/dart.md +++ b/docs/src/languages/dart.md @@ -1,6 +1,6 @@ # Dart -Dart support is available through the [Dart extension](https://github.com/zed-industries/zed/tree/main/extensions/dart). +Dart support is available through the [Dart extension](https://github.com/zed-extensions/dart). - Tree Sitter: [UserNobody14/tree-sitter-dart](https://github.com/UserNobody14/tree-sitter-dart) - Language Server: [dart language-server](https://github.com/dart-lang/sdk) diff --git a/extensions/dart/Cargo.toml b/extensions/dart/Cargo.toml deleted file mode 100644 index 8d50e620cc..0000000000 --- a/extensions/dart/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_dart" -version = "0.1.2" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/dart.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/dart/LICENSE-APACHE b/extensions/dart/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3..0000000000 --- a/extensions/dart/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/dart/README.md b/extensions/dart/README.md deleted file mode 100644 index bf6976dd60..0000000000 --- a/extensions/dart/README.md +++ /dev/null @@ -1,6 +0,0 @@ -## Roadmap - -1. Add `dart run` command. -2. Add `dart test` command. -3. Add `flutter test --name` command, to allow running a single test or group of tests. -4. Auto hot reload Flutter app when files change. diff --git a/extensions/dart/extension.toml b/extensions/dart/extension.toml deleted file mode 100644 index 684580e7c0..0000000000 --- a/extensions/dart/extension.toml +++ /dev/null @@ -1,16 +0,0 @@ -id = "dart" -name = "Dart" -description = "Dart support." -version = "0.1.2" -schema_version = 1 -authors = ["Abdullah Alsigar ", "Flo ", "ybbond "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.dart] -name = "Dart LSP" -language = "Dart" -languages = ["Dart"] - -[grammars.dart] -repository = "https://github.com/UserNobody14/tree-sitter-dart" -commit = "6da46473ab8accb13da48113f4634e729a71d335" diff --git a/extensions/dart/languages/dart/brackets.scm b/extensions/dart/languages/dart/brackets.scm deleted file mode 100644 index 8d96f95f86..0000000000 --- a/extensions/dart/languages/dart/brackets.scm +++ /dev/null @@ -1,6 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("\"" @open "\"" @close) -("'" @open "'" @close) diff --git a/extensions/dart/languages/dart/config.toml b/extensions/dart/languages/dart/config.toml deleted file mode 100644 index 15420c74f0..0000000000 --- a/extensions/dart/languages/dart/config.toml +++ /dev/null @@ -1,15 +0,0 @@ -name = "Dart" -grammar = "dart" -path_suffixes = ["dart"] -line_comments = ["// ", "/// "] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = true, newline = false}, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string"] }, - { start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string", "comment"] }, -] diff --git a/extensions/dart/languages/dart/highlights.scm b/extensions/dart/languages/dart/highlights.scm deleted file mode 100644 index e75a86cd79..0000000000 --- a/extensions/dart/languages/dart/highlights.scm +++ /dev/null @@ -1,269 +0,0 @@ -(dotted_identifier_list) @string - -; Methods -; -------------------- -(super) @function - -(function_expression_body (identifier) @type) -; ((identifier)(selector (argument_part)) @function) - -(((identifier) @function (#match? @function "^_?[a-z]")) - . (selector . (argument_part))) @function - -; Annotations -; -------------------- -(annotation - name: (identifier) @attribute) - -; Operators and Tokens -; -------------------- -(template_substitution - "$" @punctuation.special - "{" @punctuation.special - "}" @punctuation.special) @none - -(template_substitution - "$" @punctuation.special - (identifier_dollar_escaped) @variable) @none - -(escape_sequence) @string.escape - -[ - "@" - "=>" - ".." - "??" - "==" - "?" - ":" - "&&" - "%" - "<" - ">" - "=" - ">=" - "<=" - "||" - (multiplicative_operator) - (increment_operator) - (is_operator) - (prefix_operator) - (equality_operator) - (additive_operator) - ] @operator - -[ - "(" - ")" - "[" - "]" - "{" - "}" - ] @punctuation.bracket - -; Delimiters -; -------------------- -[ - ";" - "." - "," - ] @punctuation.delimiter - -; Types -; -------------------- -(class_definition - name: (identifier) @type) -(constructor_signature - name: (identifier) @type) -(scoped_identifier - scope: (identifier) @type) -(function_signature - name: (identifier) @function.method) - -(getter_signature - (identifier) @function.method) - -(setter_signature - name: (identifier) @function.method) -(enum_declaration - name: (identifier) @type) -(enum_constant - name: (identifier) @type) -(void_type) @type - -((scoped_identifier - scope: (identifier) @type - name: (identifier) @type) - (#match? @type "^[a-zA-Z]")) - -(type_identifier) @type - -(type_alias - (type_identifier) @type.definition) - -; Variables -; -------------------- -; var keyword -(inferred_type) @keyword - -((identifier) @type - (#match? @type "^_?[A-Z].*[a-z]")) - -("Function" @type) - -; properties -(unconditional_assignable_selector - (identifier) @property) - -(conditional_assignable_selector - (identifier) @property) - -; assignments -(assignment_expression - left: (assignable_expression) @variable) - -(this) @variable.builtin - -; Parameters -; -------------------- -(formal_parameter - name: (identifier) @variable.parameter) - -(named_argument - (label - (identifier) @variable.parameter)) - -; Literals -; -------------------- -[ - (hex_integer_literal) - (decimal_integer_literal) - (decimal_floating_point_literal) - ; TODO: inaccessible nodes - ; (octal_integer_literal) - ; (hex_floating_point_literal) - ] @number - -(symbol_literal) @string.special.symbol - -(string_literal) @string -(true) @boolean -(false) @boolean -(null_literal) @constant.builtin - -(comment) @comment - -(documentation_comment) @comment.documentation - -; Keywords -; -------------------- -[ - "import" - "library" - "export" - "as" - "show" - "hide" - ] @keyword.import - -; Reserved words (cannot be used as identifiers) -[ - (case_builtin) - "late" - "required" - "extension" - "on" - "class" - "enum" - "extends" - "in" - "is" - "new" - "super" - "with" - ] @keyword - -"return" @keyword.return - -; Built in identifiers: -; alone these are marked as keywords -[ - "deferred" - "factory" - "get" - "implements" - "interface" - "library" - "operator" - "mixin" - "part" - "set" - "typedef" - ] @keyword - -[ - "async" - "async*" - "sync*" - "await" - "yield" -] @keyword.coroutine - -[ - (const_builtin) - (final_builtin) - "abstract" - "covariant" - "dynamic" - "external" - "static" - "final" - "base" - "sealed" - ] @type.qualifier - -; when used as an identifier: -((identifier) @variable.builtin - (#any-of? @variable.builtin - "abstract" - "as" - "covariant" - "deferred" - "dynamic" - "export" - "external" - "factory" - "Function" - "get" - "implements" - "import" - "interface" - "library" - "operator" - "mixin" - "part" - "set" - "static" - "typedef")) - -[ - "if" - "else" - "switch" - "default" -] @keyword.conditional - -[ - "try" - "throw" - "catch" - "finally" - (break_statement) -] @keyword.exception - -[ - "do" - "while" - "continue" - "for" -] @keyword.repeat diff --git a/extensions/dart/languages/dart/indents.scm b/extensions/dart/languages/dart/indents.scm deleted file mode 100644 index 112b414aa4..0000000000 --- a/extensions/dart/languages/dart/indents.scm +++ /dev/null @@ -1,3 +0,0 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/extensions/dart/languages/dart/outline.scm b/extensions/dart/languages/dart/outline.scm deleted file mode 100644 index 4d6f8c1cb7..0000000000 --- a/extensions/dart/languages/dart/outline.scm +++ /dev/null @@ -1,18 +0,0 @@ -(class_definition - "class" @context - name: (_) @name) @item - -(function_signature - name: (_) @name) @item - -(getter_signature - "get" @context - name: (_) @name) @item - -(setter_signature - "set" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item diff --git a/extensions/dart/languages/dart/runnables.scm b/extensions/dart/languages/dart/runnables.scm deleted file mode 100644 index 509cd38d05..0000000000 --- a/extensions/dart/languages/dart/runnables.scm +++ /dev/null @@ -1,45 +0,0 @@ -; Flutter main -( - ( - (import_or_export - (library_import - (import_specification - ("import" - (configurable_uri - (uri - (string_literal) @_import - (#match? @_import "package:flutter/(material|widgets|cupertino).dart") - (#not-match? @_import "package:flutter_test/flutter_test.dart") - (#not-match? @_import "package:test/test.dart") - )))))) - ( - (function_signature - name: (_) @run - ) - (#eq? @run "main") - ) - (#set! tag flutter-main) - ) -) - -; Flutter test main -( - ( - (import_or_export - (library_import - (import_specification - ("import" - (configurable_uri - (uri - (string_literal) @_import - (#match? @_import "package:flutter_test/flutter_test.dart") - )))))) - ( - (function_signature - name: (_) @run - ) - (#eq? @run "main") - ) - (#set! tag flutter-test-main) - ) -) diff --git a/extensions/dart/languages/dart/tasks.json b/extensions/dart/languages/dart/tasks.json deleted file mode 100644 index c177d04cec..0000000000 --- a/extensions/dart/languages/dart/tasks.json +++ /dev/null @@ -1,26 +0,0 @@ -[ - { - "label": "flutter run", - "command": "flutter", - "args": ["run"], - "tags": ["flutter-main"] - }, - { - "label": "fvm flutter run", - "command": "fvm flutter", - "args": ["run"], - "tags": ["flutter-main"] - }, - { - "label": "flutter test $ZED_STEM", - "command": "flutter", - "args": ["test", "$ZED_FILE"], - "tags": ["flutter-test-main"] - }, - { - "label": "fvm flutter test $ZED_STEM", - "command": "fvm flutter", - "args": ["test", "$ZED_FILE"], - "tags": ["flutter-test-main"] - } -] diff --git a/extensions/dart/src/dart.rs b/extensions/dart/src/dart.rs deleted file mode 100644 index 38a2cf25a6..0000000000 --- a/extensions/dart/src/dart.rs +++ /dev/null @@ -1,162 +0,0 @@ -use zed::lsp::CompletionKind; -use zed::settings::LspSettings; -use zed::{CodeLabel, CodeLabelSpan}; -use zed_extension_api::{self as zed, serde_json, Result}; - -struct DartBinary { - pub path: String, - pub args: Option>, -} - -struct DartExtension; - -impl DartExtension { - fn language_server_binary( - &mut self, - _language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary_settings = LspSettings::for_worktree("dart", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.binary); - let binary_args = binary_settings - .as_ref() - .and_then(|binary_settings| binary_settings.arguments.clone()); - - if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { - return Ok(DartBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = worktree.which("dart") { - return Ok(DartBinary { - path, - args: binary_args, - }); - } - - Err( - "dart must be installed from dart.dev/get-dart or pointed to by the LSP binary settings" - .to_string(), - ) - } -} - -impl zed::Extension for DartExtension { - fn new() -> Self { - Self - } - - fn language_server_command( - &mut self, - language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let dart_binary = self.language_server_binary(language_server_id, worktree)?; - - Ok(zed::Command { - command: dart_binary.path, - args: dart_binary.args.unwrap_or_else(|| { - vec!["language-server".to_string(), "--protocol=lsp".to_string()] - }), - env: Default::default(), - }) - } - - fn language_server_workspace_configuration( - &mut self, - _language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result> { - let settings = LspSettings::for_worktree("dart", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.settings.clone()) - .unwrap_or_default(); - - Ok(Some(serde_json::json!({ - "dart": settings - }))) - } - - fn label_for_completion( - &self, - _language_server_id: &zed::LanguageServerId, - completion: zed::lsp::Completion, - ) -> Option { - let arrow = " → "; - - match completion.kind? { - CompletionKind::Class => Some(CodeLabel { - filter_range: (0..completion.label.len()).into(), - spans: vec![CodeLabelSpan::literal( - completion.label, - Some("type".into()), - )], - code: String::new(), - }), - CompletionKind::Function | CompletionKind::Constructor | CompletionKind::Method => { - let mut parts = completion.detail.as_ref()?.split(arrow); - let (name, _) = completion.label.split_once('(')?; - let parameter_list = parts.next()?; - let return_type = parts.next()?; - let fn_name = " a"; - let fat_arrow = " => "; - let call_expr = "();"; - - let code = - format!("{return_type}{fn_name}{parameter_list}{fat_arrow}{name}{call_expr}"); - - let parameter_list_start = return_type.len() + fn_name.len(); - - Some(CodeLabel { - spans: vec![ - CodeLabelSpan::code_range( - code.len() - call_expr.len() - name.len()..code.len() - call_expr.len(), - ), - CodeLabelSpan::code_range( - parameter_list_start..parameter_list_start + parameter_list.len(), - ), - CodeLabelSpan::literal(arrow, None), - CodeLabelSpan::code_range(0..return_type.len()), - ], - filter_range: (0..name.len()).into(), - code, - }) - } - CompletionKind::Property => { - let class_start = "class A {"; - let get = " get "; - let property_end = " => a; }"; - let ty = completion.detail?; - let name = completion.label; - - let code = format!("{class_start}{ty}{get}{name}{property_end}"); - let name_start = class_start.len() + ty.len() + get.len(); - - Some(CodeLabel { - spans: vec![ - CodeLabelSpan::code_range(name_start..name_start + name.len()), - CodeLabelSpan::literal(arrow, None), - CodeLabelSpan::code_range(class_start.len()..class_start.len() + ty.len()), - ], - filter_range: (0..name.len()).into(), - code, - }) - } - CompletionKind::Variable => { - let name = completion.label; - - Some(CodeLabel { - filter_range: (0..name.len()).into(), - spans: vec![CodeLabelSpan::literal(name, Some("variable".into()))], - code: String::new(), - }) - } - _ => None, - } - } -} - -zed::register_extension!(DartExtension); From 662a4440cc8ca42dfd00af79bd8a22a19f2086ce Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 30 Oct 2024 11:06:39 -0400 Subject: [PATCH 77/87] v0.161.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 039016a50a..319d4e983d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15033,7 +15033,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.160.0" +version = "0.161.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 272d423f24..2e6fd1b237 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.160.0" +version = "0.161.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 515fd7b75f532d530809f3749cdc975482a8c610 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 30 Oct 2024 11:16:44 -0400 Subject: [PATCH 78/87] git_hosting_providers: Fix support for GitLab remotes containing subgroups (#19962) This PR fixes the support for GitLab remote URLs containing subgroups. Reported in https://github.com/zed-industries/zed/issues/18012#issuecomment-2446206256. Release Notes: - N/A --- .../src/providers/gitlab.rs | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index 1e7bdbb88b..7910379ef0 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -77,9 +77,9 @@ impl GitHostingProvider for Gitlab { return None; } - let mut path_segments = url.path_segments()?; - let owner = path_segments.next()?; - let repo = path_segments.next()?.trim_end_matches(".git"); + let mut path_segments = url.path_segments()?.collect::>(); + let repo = path_segments.pop()?.trim_end_matches(".git"); + let owner = path_segments.join("/"); Some(ParsedGitRemote { owner: owner.into(), @@ -178,6 +178,23 @@ mod tests { ); } + #[test] + fn test_parse_remote_url_given_self_hosted_https_url_with_subgroup() { + let remote_url = "https://gitlab.my-enterprise.com/group/subgroup/zed.git"; + let parsed_remote = Gitlab::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "group/subgroup".into(), + repo: "zed".into(), + } + ); + } + #[test] fn test_build_gitlab_permalink() { let permalink = Gitlab::new().build_permalink( From c564a4a26cb94904c5f849cd5bb5eaaf3f881692 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Wed, 30 Oct 2024 11:38:43 -0400 Subject: [PATCH 79/87] Require /file or /tab when using Suggest Edits (#19960) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Now if you try to do Suggest Edits without a file context, you see this (and it doesn't run the query). Screenshot 2024-10-30 at 10 51 24 AM Release Notes: - N/A --------- Co-authored-by: Antonio --- .../{edit_workflow.hbs => suggest_edits.hbs} | 0 crates/assistant/src/assistant_panel.rs | 42 ++++++++++++++++++- crates/assistant/src/context.rs | 21 ++++++++-- crates/assistant/src/prompts.rs | 2 +- docs/src/assistant/prompting.md | 4 +- 5 files changed, 62 insertions(+), 7 deletions(-) rename assets/prompts/{edit_workflow.hbs => suggest_edits.hbs} (100%) diff --git a/assets/prompts/edit_workflow.hbs b/assets/prompts/suggest_edits.hbs similarity index 100% rename from assets/prompts/edit_workflow.hbs rename to assets/prompts/suggest_edits.hbs diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 19d92bbc02..3d498d94eb 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -1462,6 +1462,7 @@ type MessageHeader = MessageMetadata; #[derive(Clone)] enum AssistError { + FileRequired, PaymentRequired, MaxMonthlySpendReached, Message(SharedString), @@ -1628,7 +1629,10 @@ impl ContextEditor { self.last_error = None; - if let Some(user_message) = self + if request_type == RequestType::SuggestEdits && !self.context.read(cx).contains_files(cx) { + self.last_error = Some(AssistError::FileRequired); + cx.notify(); + } else if let Some(user_message) = self .context .update(cx, |context, cx| context.assist(request_type, cx)) { @@ -3740,6 +3744,7 @@ impl ContextEditor { .elevation_2(cx) .occlude() .child(match last_error { + AssistError::FileRequired => self.render_file_required_error(cx), AssistError::PaymentRequired => self.render_payment_required_error(cx), AssistError::MaxMonthlySpendReached => { self.render_max_monthly_spend_reached_error(cx) @@ -3752,6 +3757,41 @@ impl ContextEditor { ) } + fn render_file_required_error(&self, cx: &mut ViewContext) -> AnyElement { + v_flex() + .gap_0p5() + .child( + h_flex() + .gap_1p5() + .items_center() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child( + Label::new("Suggest Edits needs a file to edit").weight(FontWeight::MEDIUM), + ), + ) + .child( + div() + .id("error-message") + .max_h_24() + .overflow_y_scroll() + .child(Label::new( + "To include files, type /file or /tab in your prompt.", + )), + ) + .child( + h_flex() + .justify_end() + .mt_1() + .child(Button::new("dismiss", "Dismiss").on_click(cx.listener( + |this, _, cx| { + this.last_error = None; + cx.notify(); + }, + ))), + ) + .into_any() + } + fn render_payment_required_error(&self, cx: &mut ViewContext) -> AnyElement { const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used."; diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index f5e8174748..a1de9d3b40 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -2,8 +2,9 @@ mod context_tests; use crate::{ - prompts::PromptBuilder, slash_command::SlashCommandLine, AssistantEdit, AssistantPatch, - AssistantPatchStatus, MessageId, MessageStatus, + prompts::PromptBuilder, + slash_command::{file_command::FileCommandMetadata, SlashCommandLine}, + AssistantEdit, AssistantPatch, AssistantPatchStatus, MessageId, MessageStatus, }; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ @@ -66,7 +67,7 @@ impl ContextId { } } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum RequestType { /// Request a normal chat response from the model. Chat, @@ -989,6 +990,20 @@ impl Context { &self.slash_command_output_sections } + pub fn contains_files(&self, cx: &AppContext) -> bool { + let buffer = self.buffer.read(cx); + self.slash_command_output_sections.iter().any(|section| { + section.is_valid(buffer) + && section + .metadata + .as_ref() + .and_then(|metadata| { + serde_json::from_value::(metadata.clone()).ok() + }) + .is_some() + }) + } + pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> { self.pending_tool_uses_by_id.values().collect() } diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 2d0829086c..50fee242ea 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -311,7 +311,7 @@ impl PromptBuilder { } pub fn generate_workflow_prompt(&self) -> Result { - self.handlebars.lock().render("edit_workflow", &()) + self.handlebars.lock().render("suggest_edits", &()) } pub fn generate_project_slash_command_prompt( diff --git a/docs/src/assistant/prompting.md b/docs/src/assistant/prompting.md index 0dca671b47..18bda28bbf 100644 --- a/docs/src/assistant/prompting.md +++ b/docs/src/assistant/prompting.md @@ -137,7 +137,7 @@ Zed has the following internal prompt templates: - `content_prompt.hbs`: Used for generating content in the editor. - `terminal_assistant_prompt.hbs`: Used for the terminal assistant feature. -- `edit_workflow.hbs`: Used for generating the edit workflow prompt. +- `suggest_edits.hbs`: Used for generating the model instructions for the XML Suggest Edits should return. - `step_resolution.hbs`: Used for generating the step resolution prompt. At this point it is unknown if we will expand templates further to be user-creatable. @@ -215,7 +215,7 @@ The following templates can be overridden: given system information and latest terminal output if relevant. ``` -3. `edit_workflow.hbs`: Used for generating the edit workflow prompt. +3. `suggest_edits.hbs`: Used for generating the model instructions for the XML Suggest Edits should return. 4. `step_resolution.hbs`: Used for generating the step resolution prompt. From f84f3ffeb7fc1f661f4af4d7c4fd37b8f5e736ff Mon Sep 17 00:00:00 2001 From: Gherman Date: Wed, 30 Oct 2024 15:43:44 +0000 Subject: [PATCH 80/87] docs: Add `linkedProjects` section to Rust docs (#19954) Related to #19897 Adds a section about multi-project workspaces and how to configure rust-analyzer to diagnose them even if the cargo workspace does not list them Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- docs/src/languages/rust.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 330b5fa9d0..2af91f1fdc 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -163,6 +163,23 @@ Here's a snippet for Zed settings.json (the language server will restart automat } ``` +### Multi-project workspaces + +If you want rust-analyzer to analyze multiple Rust projects in the same folder that are not listed in `[members]` in the Cargo workspace, +you can list them in `linkedProjects` in the local project settings: + +```json +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "linkedProjects": ["./path/to/a/Cargo.toml", "./path/to/b/Cargo.toml"] + } + } + } +} +``` + ### Snippets There's a way get custom completion items from rust-analyzer, that will transform the code according to the snippet body: From 7bc4cb98688009826f66781b1cc8f36ad9049da9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 12:11:07 -0400 Subject: [PATCH 81/87] Update Rust crate hyper to v0.14.31 (#19323) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [hyper](https://hyper.rs) ([source](https://redirect.github.com/hyperium/hyper)) | workspace.dependencies | patch | `0.14.30` -> `0.14.31` | --- ### Release Notes
hyperium/hyper (hyper) ### [`v0.14.31`](https://redirect.github.com/hyperium/hyper/releases/tag/v0.14.31) [Compare Source](https://redirect.github.com/hyperium/hyper/compare/v0.14.30...v0.14.31) #### Bug Fixes - **http1:** improve performance of parsing sequentially partial messages ([97b595e](https://redirect.github.com/hyperium/hyper/commit/97b595e5892c239a195b199f9e7910f582351c44))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 319d4e983d..5b54937bd2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -854,7 +854,7 @@ dependencies = [ "chrono", "futures-util", "http-types", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls 0.24.2", "serde", "serde_json", @@ -1350,7 +1350,7 @@ dependencies = [ "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", @@ -1441,7 +1441,7 @@ dependencies = [ "headers", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "itoa", "matchit", "memchr", @@ -2366,7 +2366,7 @@ dependencies = [ "clickhouse-derive", "clickhouse-rs-cityhash-sys", "futures 0.3.30", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-tls", "lz4", "sealed", @@ -2569,7 +2569,7 @@ dependencies = [ "gpui", "hex", "http_client", - "hyper 0.14.30", + "hyper 0.14.31", "indoc", "jsonwebtoken", "language", @@ -5570,9 +5570,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.30" +version = "0.14.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes 1.7.2", "futures-channel", @@ -5585,7 +5585,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -5620,7 +5620,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "log", "rustls 0.21.12", "rustls-native-certs 0.6.3", @@ -5653,7 +5653,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.7.2", - "hyper 0.14.30", + "hyper 0.14.31", "native-tls", "tokio", "tokio-native-tls", @@ -6489,7 +6489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -9660,7 +9660,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-tls", "ipnet", "js-sys", @@ -13452,7 +13452,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "log", "mime", "mime_guess", From cf7b0c8971b53e3587f450827cea62a1d2bf98ae Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 30 Oct 2024 19:09:14 +0200 Subject: [PATCH 82/87] Add scrollbars to outline panel (#19969) Part of https://github.com/zed-industries/zed/issues/15324 ![image](https://github.com/user-attachments/assets/4f32d585-9bd2-46be-8234-3658a71906ee) Repeats the approach used in the project panel. Release Notes: - Added scrollbars to outline panel --------- Co-authored-by: Nate Butler --- assets/settings/default.json | 17 + crates/outline_panel/src/outline_panel.rs | 864 ++++++++++++------ .../src/outline_panel_settings.rs | 20 + docs/src/configuring-zed.md | 3 + 4 files changed, 613 insertions(+), 291 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 748a4b12d1..5295052215 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -414,6 +414,23 @@ // 2. Never show indent guides: // "never" "show": "always" + }, + /// Scrollbar-related settings + "scrollbar": { + /// When to show the scrollbar in the project panel. + /// This setting can take four values: + /// + /// 1. null (default): Inherit editor settings + /// 2. Show the scrollbar if there's important information or + /// follow the system's configured behavior (default): + /// "auto" + /// 3. Match the system's configured behavior: + /// "system" + /// 4. Always show the scrollbar: + /// "always" + /// 5. Never show the scrollbar: + /// "never" + "show": null } }, "collaboration_panel": { diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 10ca2b0712..6ffac21021 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -5,7 +5,7 @@ use std::{ cmp, hash::Hash, ops::Range, - path::{Path, PathBuf}, + path::{Path, PathBuf, MAIN_SEPARATOR_STR}, sync::{atomic::AtomicBool, Arc, OnceLock}, time::Duration, u32, @@ -17,9 +17,9 @@ use db::kvp::KEY_VALUE_STORE; use editor::{ display_map::ToDisplayPoint, items::{entry_git_aware_label_color, entry_label_color}, - scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor}, - AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, EditorMode, ExcerptId, ExcerptRange, - MultiBufferSnapshot, RangeToAnchorExt, + scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor, ScrollbarAutoHide}, + AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, EditorMode, EditorSettings, ExcerptId, + ExcerptRange, MultiBufferSnapshot, RangeToAnchorExt, ShowScrollbar, }; use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; @@ -27,8 +27,9 @@ use gpui::{ actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, Div, ElementId, EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, - IntoElement, KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, - Render, SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, + IntoElement, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, Model, MouseButton, + MouseDownEvent, ParentElement, Pixels, Point, Render, SharedString, Stateful, + StatefulInteractiveElement as _, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext, WeakView, WindowContext, }; use itertools::Itertools; @@ -51,7 +52,8 @@ use workspace::{ ui::{ h_flex, v_flex, ActiveTheme, ButtonCommon, Clickable, Color, ContextMenu, FluentBuilder, HighlightedLabel, Icon, IconButton, IconButtonShape, IconName, IconSize, Label, - LabelCommon, ListItem, Selectable, Spacing, StyledExt, StyledTypography, Tooltip, + LabelCommon, ListItem, Scrollbar, ScrollbarState, Selectable, Spacing, StyledExt, + StyledTypography, Tooltip, }, OpenInTerminal, WeakItemHandle, Workspace, }; @@ -116,6 +118,11 @@ pub struct OutlinePanel { cached_entries: Vec, filter_editor: View, mode: ItemsDisplayMode, + show_scrollbar: bool, + vertical_scrollbar_state: ScrollbarState, + horizontal_scrollbar_state: ScrollbarState, + hide_scrollbar_task: Option>, + max_width_item_index: Option, } enum ItemsDisplayMode { @@ -624,6 +631,9 @@ impl OutlinePanel { let focus_handle = cx.focus_handle(); let focus_subscription = cx.on_focus(&focus_handle, Self::focus_in); + let focus_out_subscription = cx.on_focus_out(&focus_handle, |outline_panel, _, cx| { + outline_panel.hide_scrollbar(cx); + }); let workspace_subscription = cx.subscribe( &workspace .weak_handle() @@ -674,6 +684,8 @@ impl OutlinePanel { } }); + let scroll_handle = UniformListScrollHandle::new(); + let mut outline_panel = Self { mode: ItemsDisplayMode::Outline, active: false, @@ -681,7 +693,14 @@ impl OutlinePanel { workspace: workspace_handle, project, fs: workspace.app_state().fs.clone(), - scroll_handle: UniformListScrollHandle::new(), + show_scrollbar: !Self::should_autohide_scrollbar(cx), + hide_scrollbar_task: None, + vertical_scrollbar_state: ScrollbarState::new(scroll_handle.clone()) + .parent_view(cx.view()), + horizontal_scrollbar_state: ScrollbarState::new(scroll_handle.clone()) + .parent_view(cx.view()), + max_width_item_index: None, + scroll_handle, focus_handle, filter_editor, fs_entries: Vec::new(), @@ -705,6 +724,7 @@ impl OutlinePanel { settings_subscription, icons_subscription, focus_subscription, + focus_out_subscription, workspace_subscription, filter_update_subscription, ], @@ -1606,16 +1626,11 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; - let excerpt_range = range.context.to_point(&buffer_snapshot); - let label_element = Label::new(format!( - "Lines {}- {}", - excerpt_range.start.row + 1, - excerpt_range.end.row + 1, - )) - .single_line() - .color(color) - .into_any_element(); + let label = self.excerpt_label(buffer_id, range, cx)?; + let label_element = Label::new(label) + .single_line() + .color(color) + .into_any_element(); Some(self.entry_element( PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, range.clone())), @@ -1628,6 +1643,21 @@ impl OutlinePanel { )) } + fn excerpt_label( + &self, + buffer_id: BufferId, + range: &ExcerptRange, + cx: &AppContext, + ) -> Option { + let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; + let excerpt_range = range.context.to_point(&buffer_snapshot); + Some(format!( + "Lines {}- {}", + excerpt_range.start.row + 1, + excerpt_range.end.row + 1, + )) + } + fn render_outline( &self, buffer_id: BufferId, @@ -2793,10 +2823,11 @@ impl OutlinePanel { else { return; }; - let new_cached_entries = new_cached_entries.await; + let (new_cached_entries, max_width_item_index) = new_cached_entries.await; outline_panel .update(&mut cx, |outline_panel, cx| { outline_panel.cached_entries = new_cached_entries; + outline_panel.max_width_item_index = max_width_item_index; if outline_panel.selected_entry.is_invalidated() { if let Some(new_selected_entry) = outline_panel.active_editor().and_then(|active_editor| { @@ -2819,11 +2850,10 @@ impl OutlinePanel { is_singleton: bool, query: Option, cx: &mut ViewContext<'_, Self>, - ) -> Task> { + ) -> Task<(Vec, Option)> { let project = self.project.clone(); cx.spawn(|outline_panel, mut cx| async move { - let mut entries = Vec::new(); - let mut match_candidates = Vec::new(); + let mut generation_state = GenerationState::default(); let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; @@ -2943,8 +2973,7 @@ impl OutlinePanel { folded_dirs, ); outline_panel.push_entry( - &mut entries, - &mut match_candidates, + &mut generation_state, track_matches, new_folded_dirs, folded_depth, @@ -2981,8 +3010,7 @@ impl OutlinePanel { .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, + &mut generation_state, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3006,8 +3034,7 @@ impl OutlinePanel { .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, + &mut generation_state, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3042,8 +3069,7 @@ impl OutlinePanel { && (should_add || (query.is_some() && folded_dirs_entry.is_none())) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, + &mut generation_state, track_matches, PanelEntry::Fs(entry.clone()), depth, @@ -3055,8 +3081,7 @@ impl OutlinePanel { ItemsDisplayMode::Search(_) => { if is_singleton || query.is_some() || (should_add && is_expanded) { outline_panel.add_search_entries( - &mut entries, - &mut match_candidates, + &mut generation_state, entry.clone(), depth, query.clone(), @@ -3082,14 +3107,13 @@ impl OutlinePanel { }; if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider { outline_panel.add_excerpt_entries( + &mut generation_state, buffer_id, entry_excerpts, depth, track_matches, is_singleton, query.as_deref(), - &mut entries, - &mut match_candidates, cx, ); } @@ -3098,13 +3122,12 @@ impl OutlinePanel { if is_singleton && matches!(entry, FsEntry::File(..) | FsEntry::ExternalFile(..)) - && !entries.iter().any(|item| { + && !generation_state.entries.iter().any(|item| { matches!(item.entry, PanelEntry::Outline(..) | PanelEntry::Search(_)) }) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, + &mut generation_state, track_matches, PanelEntry::Fs(entry.clone()), 0, @@ -3121,8 +3144,7 @@ impl OutlinePanel { .map_or(true, |parent| parent.expanded); if parent_expanded || query.is_some() { outline_panel.push_entry( - &mut entries, - &mut match_candidates, + &mut generation_state, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3131,15 +3153,20 @@ impl OutlinePanel { } } }) else { - return Vec::new(); + return (Vec::new(), None); }; let Some(query) = query else { - return entries; + return ( + generation_state.entries, + generation_state + .max_width_estimate_and_index + .map(|(_, index)| index), + ); }; let mut matched_ids = match_strings( - &match_candidates, + &generation_state.match_candidates, &query, true, usize::MAX, @@ -3152,7 +3179,7 @@ impl OutlinePanel { .collect::>(); let mut id = 0; - entries.retain_mut(|cached_entry| { + generation_state.entries.retain_mut(|cached_entry| { let retain = match matched_ids.remove(&id) { Some(string_match) => { cached_entry.string_match = Some(string_match); @@ -3164,15 +3191,19 @@ impl OutlinePanel { retain }); - entries + ( + generation_state.entries, + generation_state + .max_width_estimate_and_index + .map(|(_, index)| index), + ) }) } #[allow(clippy::too_many_arguments)] fn push_entry( &self, - entries: &mut Vec, - match_candidates: &mut Vec, + state: &mut GenerationState, track_matches: bool, entry: PanelEntry, depth: usize, @@ -3192,13 +3223,13 @@ impl OutlinePanel { }; if track_matches { - let id = entries.len(); + let id = state.entries.len(); match &entry { PanelEntry::Fs(fs_entry) => { if let Some(file_name) = self.relative_path(fs_entry, cx).as_deref().map(file_name) { - match_candidates.push(StringMatchCandidate { + state.match_candidates.push(StringMatchCandidate { id, string: file_name.to_string(), char_bag: file_name.chars().collect(), @@ -3208,7 +3239,7 @@ impl OutlinePanel { PanelEntry::FoldedDirs(worktree_id, entries) => { let dir_names = self.dir_names_string(entries, *worktree_id, cx); { - match_candidates.push(StringMatchCandidate { + state.match_candidates.push(StringMatchCandidate { id, string: dir_names.clone(), char_bag: dir_names.chars().collect(), @@ -3217,7 +3248,7 @@ impl OutlinePanel { } PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Outline(_, _, outline) => { - match_candidates.push(StringMatchCandidate { + state.match_candidates.push(StringMatchCandidate { id, string: outline.text.clone(), char_bag: outline.text.chars().collect(), @@ -3226,7 +3257,7 @@ impl OutlinePanel { OutlineEntry::Excerpt(..) => {} }, PanelEntry::Search(new_search_entry) => { - match_candidates.push(StringMatchCandidate { + state.match_candidates.push(StringMatchCandidate { id, char_bag: new_search_entry.render_data.context_text.chars().collect(), string: new_search_entry.render_data.context_text.clone(), @@ -3234,7 +3265,16 @@ impl OutlinePanel { } } } - entries.push(CachedEntry { + + let width_estimate = self.width_estimate(depth, &entry, cx); + if Some(width_estimate) + > state + .max_width_estimate_and_index + .map(|(estimate, _)| estimate) + { + state.max_width_estimate_and_index = Some((width_estimate, state.entries.len())); + } + state.entries.push(CachedEntry { depth, entry, string_match: None, @@ -3369,14 +3409,13 @@ impl OutlinePanel { #[allow(clippy::too_many_arguments)] fn add_excerpt_entries( &self, + state: &mut GenerationState, buffer_id: BufferId, entries_to_add: &[ExcerptId], parent_depth: usize, track_matches: bool, is_singleton: bool, query: Option<&str>, - entries: &mut Vec, - match_candidates: &mut Vec, cx: &mut ViewContext, ) { if let Some(excerpts) = self.excerpts.get(&buffer_id) { @@ -3386,8 +3425,7 @@ impl OutlinePanel { }; let excerpt_depth = parent_depth + 1; self.push_entry( - entries, - match_candidates, + state, track_matches, PanelEntry::Outline(OutlineEntry::Excerpt( buffer_id, @@ -3401,8 +3439,7 @@ impl OutlinePanel { let mut outline_base_depth = excerpt_depth + 1; if is_singleton { outline_base_depth = 0; - entries.clear(); - match_candidates.clear(); + state.clear(); } else if query.is_none() && self .collapsed_entries @@ -3413,8 +3450,7 @@ impl OutlinePanel { for outline in excerpt.iter_outlines() { self.push_entry( - entries, - match_candidates, + state, track_matches, PanelEntry::Outline(OutlineEntry::Outline( buffer_id, @@ -3432,8 +3468,7 @@ impl OutlinePanel { #[allow(clippy::too_many_arguments)] fn add_search_entries( &mut self, - entries: &mut Vec, - match_candidates: &mut Vec, + state: &mut GenerationState, parent_entry: FsEntry, parent_depth: usize, filter_query: Option, @@ -3464,7 +3499,8 @@ impl OutlinePanel { || related_excerpts.contains(&match_range.end.excerpt_id) }); - let previous_search_matches = entries + let previous_search_matches = state + .entries .iter() .skip_while(|entry| { if let PanelEntry::Fs(entry) = &entry.entry { @@ -3519,8 +3555,7 @@ impl OutlinePanel { .collect::>(); for new_search_entry in new_search_entries { self.push_entry( - entries, - match_candidates, + state, filter_query.is_some(), PanelEntry::Search(new_search_entry), depth, @@ -3589,6 +3624,430 @@ impl OutlinePanel { self.autoscroll(cx); cx.notify(); } + + fn render_vertical_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) + || !(self.show_scrollbar || self.vertical_scrollbar_state.is_dragging()) + { + return None; + } + Some( + div() + .occlude() + .id("project-panel-vertical-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|outline_panel, _, cx| { + if !outline_panel.vertical_scrollbar_state.is_dragging() + && !outline_panel.focus_handle.contains_focused(cx) + { + outline_panel.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .h_full() + .absolute() + .right_1() + .top_1() + .bottom_0() + .w(px(12.)) + .cursor_default() + .children(Scrollbar::vertical(self.vertical_scrollbar_state.clone())), + ) + } + + fn render_horizontal_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) + || !(self.show_scrollbar || self.horizontal_scrollbar_state.is_dragging()) + { + return None; + } + + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + + Some( + div() + .occlude() + .id("project-panel-horizontal-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|outline_panel, _, cx| { + if !outline_panel.horizontal_scrollbar_state.is_dragging() + && !outline_panel.focus_handle.contains_focused(cx) + { + outline_panel.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .w_full() + .absolute() + .right_1() + .left_1() + .bottom_0() + .h(px(12.)) + .cursor_default() + .when(self.width.is_some(), |this| { + this.children(Scrollbar::horizontal( + self.horizontal_scrollbar_state.clone(), + )) + }), + ) + } + + fn should_show_scrollbar(cx: &AppContext) -> bool { + let show = OutlinePanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => true, + ShowScrollbar::Always => true, + ShowScrollbar::Never => false, + } + } + + fn should_autohide_scrollbar(cx: &AppContext) -> bool { + let show = OutlinePanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => cx + .try_global::() + .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0), + ShowScrollbar::Always => false, + ShowScrollbar::Never => true, + } + } + + fn hide_scrollbar(&mut self, cx: &mut ViewContext) { + const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1); + if !Self::should_autohide_scrollbar(cx) { + return; + } + self.hide_scrollbar_task = Some(cx.spawn(|panel, mut cx| async move { + cx.background_executor() + .timer(SCROLLBAR_SHOW_INTERVAL) + .await; + panel + .update(&mut cx, |panel, cx| { + panel.show_scrollbar = false; + cx.notify(); + }) + .log_err(); + })) + } + + fn width_estimate(&self, depth: usize, entry: &PanelEntry, cx: &AppContext) -> u64 { + let item_text_chars = match entry { + PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => self + .buffer_snapshot_for_id(*buffer_id, cx) + .and_then(|snapshot| { + Some(snapshot.file()?.path().file_name()?.to_string_lossy().len()) + }) + .unwrap_or_default(), + PanelEntry::Fs(FsEntry::Directory(_, directory)) => directory + .path + .file_name() + .map(|name| name.to_string_lossy().len()) + .unwrap_or_default(), + PanelEntry::Fs(FsEntry::File(_, file, _, _)) => file + .path + .file_name() + .map(|name| name.to_string_lossy().len()) + .unwrap_or_default(), + PanelEntry::FoldedDirs(_, dirs) => { + dirs.iter() + .map(|dir| { + dir.path + .file_name() + .map(|name| name.to_string_lossy().len()) + .unwrap_or_default() + }) + .sum::() + + dirs.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() + } + PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, _, range)) => self + .excerpt_label(*buffer_id, range, cx) + .map(|label| label.len()) + .unwrap_or_default(), + PanelEntry::Outline(OutlineEntry::Outline(_, _, outline)) => outline.text.len(), + PanelEntry::Search(search) => search.render_data.context_text.len(), + }; + + (item_text_chars + depth) as u64 + } + + fn render_main_contents( + &mut self, + query: Option, + show_indent_guides: bool, + indent_size: f32, + cx: &mut ViewContext<'_, Self>, + ) -> Div { + let contents = if self.cached_entries.is_empty() { + let header = if self.updating_fs_entries { + "Loading outlines" + } else if query.is_some() { + "No matches for query" + } else { + "No outlines available" + }; + + v_flex() + .flex_1() + .justify_center() + .size_full() + .child(h_flex().justify_center().child(Label::new(header))) + .when_some(query.clone(), |panel, query| { + panel.child(h_flex().justify_center().child(Label::new(query))) + }) + .child( + h_flex() + .pt(Spacing::Small.rems(cx)) + .justify_center() + .child({ + let keystroke = match self.position(cx) { + DockPosition::Left => { + cx.keystroke_text_for(&workspace::ToggleLeftDock) + } + DockPosition::Bottom => { + cx.keystroke_text_for(&workspace::ToggleBottomDock) + } + DockPosition::Right => { + cx.keystroke_text_for(&workspace::ToggleRightDock) + } + }; + Label::new(format!("Toggle this panel with {keystroke}")) + }), + ) + } else { + let list_contents = { + let items_len = self.cached_entries.len(); + let multi_buffer_snapshot = self + .active_editor() + .map(|editor| editor.read(cx).buffer().read(cx).snapshot(cx)); + uniform_list(cx.view().clone(), "entries", items_len, { + move |outline_panel, range, cx| { + let entries = outline_panel.cached_entries.get(range); + entries + .map(|entries| entries.to_vec()) + .unwrap_or_default() + .into_iter() + .filter_map(|cached_entry| match cached_entry.entry { + PanelEntry::Fs(entry) => Some(outline_panel.render_entry( + &entry, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )), + PanelEntry::FoldedDirs(worktree_id, entries) => { + Some(outline_panel.render_folded_dirs( + worktree_id, + &entries, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )) + } + PanelEntry::Outline(OutlineEntry::Excerpt( + buffer_id, + excerpt_id, + excerpt, + )) => outline_panel.render_excerpt( + buffer_id, + excerpt_id, + &excerpt, + cached_entry.depth, + cx, + ), + PanelEntry::Outline(OutlineEntry::Outline( + buffer_id, + excerpt_id, + outline, + )) => Some(outline_panel.render_outline( + buffer_id, + excerpt_id, + &outline, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )), + PanelEntry::Search(SearchEntry { + match_range, + render_data, + kind, + .. + }) => Some(outline_panel.render_search_match( + multi_buffer_snapshot.as_ref(), + &match_range, + &render_data, + kind, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )), + }) + .collect() + } + }) + .with_sizing_behavior(ListSizingBehavior::Infer) + .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) + .with_width_from_item(self.max_width_item_index) + .track_scroll(self.scroll_handle.clone()) + .when(show_indent_guides, |list| { + list.with_decoration( + ui::indent_guides( + cx.view().clone(), + px(indent_size), + IndentGuideColors::panel(cx), + |outline_panel, range, _| { + let entries = outline_panel.cached_entries.get(range); + if let Some(entries) = entries { + entries.into_iter().map(|item| item.depth).collect() + } else { + smallvec::SmallVec::new() + } + }, + ) + .with_render_fn( + cx.view().clone(), + move |outline_panel, params, _| { + const LEFT_OFFSET: f32 = 14.; + + let indent_size = params.indent_size; + let item_height = params.item_height; + let active_indent_guide_ix = find_active_indent_guide_ix( + outline_panel, + ¶ms.indent_guides, + ); + + params + .indent_guides + .into_iter() + .enumerate() + .map(|(ix, layout)| { + let bounds = Bounds::new( + point( + px(layout.offset.x as f32) * indent_size + + px(LEFT_OFFSET), + px(layout.offset.y as f32) * item_height, + ), + size(px(1.), px(layout.length as f32) * item_height), + ); + ui::RenderedIndentGuide { + bounds, + layout, + is_active: active_indent_guide_ix == Some(ix), + hitbox: None, + } + }) + .collect() + }, + ), + ) + }) + }; + + v_flex() + .flex_shrink() + .size_full() + .child(list_contents.size_full().flex_shrink()) + .children(self.render_vertical_scrollbar(cx)) + .when_some(self.render_horizontal_scrollbar(cx), |this, scrollbar| { + this.pb_4().child(scrollbar) + }) + } + .children(self.context_menu.as_ref().map(|(menu, position, _)| { + deferred( + anchored() + .position(*position) + .anchor(gpui::AnchorCorner::TopLeft) + .child(menu.clone()), + ) + .with_priority(1) + })); + + v_flex().w_full().flex_1().overflow_hidden().child(contents) + } + + fn render_filter_footer(&mut self, pinned: bool, cx: &mut ViewContext<'_, Self>) -> Div { + v_flex().flex_none().child(horizontal_separator(cx)).child( + h_flex() + .p_2() + .w_full() + .child(self.filter_editor.clone()) + .child( + div().child( + IconButton::new( + "outline-panel-menu", + if pinned { + IconName::Unpin + } else { + IconName::Pin + }, + ) + .tooltip(move |cx| { + Tooltip::text( + if pinned { + "Unpin Outline" + } else { + "Pin Active Outline" + }, + cx, + ) + }) + .shape(IconButtonShape::Square) + .on_click(cx.listener(|outline_panel, _, cx| { + outline_panel.toggle_active_editor_pin(&ToggleActiveEditorPin, cx); + })), + ), + ), + ) + } } fn workspace_active_editor( @@ -3741,17 +4200,34 @@ impl EventEmitter for OutlinePanel {} impl Render for OutlinePanel { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let project = self.project.read(cx); + let (is_local, is_via_ssh) = self + .project + .read_with(cx, |project, _| (project.is_local(), project.is_via_ssh())); let query = self.query(cx); let pinned = self.pinned; let settings = OutlinePanelSettings::get_global(cx); let indent_size = settings.indent_size; let show_indent_guides = settings.indent_guides.show == ShowIndentGuides::Always; - let outline_panel = v_flex() + let search_query = match &self.mode { + ItemsDisplayMode::Search(search_query) => Some(search_query), + _ => None, + }; + + v_flex() .id("outline-panel") .size_full() + .overflow_hidden() .relative() + .on_hover(cx.listener(|this, hovered, cx| { + if *hovered { + this.show_scrollbar = true; + this.hide_scrollbar_task.take(); + cx.notify(); + } else if !this.focus_handle.contains_focused(cx) { + this.hide_scrollbar(cx); + } + })) .key_context(self.dispatch_context(cx)) .on_action(cx.listener(Self::open)) .on_action(cx.listener(Self::cancel)) @@ -3769,10 +4245,10 @@ impl Render for OutlinePanel { .on_action(cx.listener(Self::toggle_active_editor_pin)) .on_action(cx.listener(Self::unfold_directory)) .on_action(cx.listener(Self::fold_directory)) - .when(project.is_local(), |el| { + .when(is_local, |el| { el.on_action(cx.listener(Self::reveal_in_finder)) }) - .when(project.is_local() || project.is_via_ssh(), |el| { + .when(is_local || is_via_ssh, |el| { el.on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( @@ -3785,229 +4261,20 @@ impl Render for OutlinePanel { } }), ) - .track_focus(&self.focus_handle(cx)); - - if self.cached_entries.is_empty() { - let header = if self.updating_fs_entries { - "Loading outlines" - } else if query.is_some() { - "No matches for query" - } else { - "No outlines available" - }; - - outline_panel.child( - v_flex() - .justify_center() - .size_full() - .child(h_flex().justify_center().child(Label::new(header))) - .when_some(query.clone(), |panel, query| { - panel.child(h_flex().justify_center().child(Label::new(query))) - }) - .child( - h_flex() - .pt(Spacing::Small.rems(cx)) - .justify_center() - .child({ - let keystroke = match self.position(cx) { - DockPosition::Left => { - cx.keystroke_text_for(&workspace::ToggleLeftDock) - } - DockPosition::Bottom => { - cx.keystroke_text_for(&workspace::ToggleBottomDock) - } - DockPosition::Right => { - cx.keystroke_text_for(&workspace::ToggleRightDock) - } - }; - Label::new(format!("Toggle this panel with {keystroke}")) - }), - ), - ) - } else { - let search_query = match &self.mode { - ItemsDisplayMode::Search(search_query) => Some(search_query), - _ => None, - }; - outline_panel - .when_some(search_query, |outline_panel, search_state| { - outline_panel.child( - div() - .mx_2() - .child( - Label::new(format!("Searching: '{}'", search_state.query)) - .color(Color::Muted), - ) - .child(horizontal_separator(cx)), - ) - }) - .child({ - let items_len = self.cached_entries.len(); - let multi_buffer_snapshot = self - .active_editor() - .map(|editor| editor.read(cx).buffer().read(cx).snapshot(cx)); - uniform_list(cx.view().clone(), "entries", items_len, { - move |outline_panel, range, cx| { - let entries = outline_panel.cached_entries.get(range); - entries - .map(|entries| entries.to_vec()) - .unwrap_or_default() - .into_iter() - .filter_map(|cached_entry| match cached_entry.entry { - PanelEntry::Fs(entry) => Some(outline_panel.render_entry( - &entry, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), - PanelEntry::FoldedDirs(worktree_id, entries) => { - Some(outline_panel.render_folded_dirs( - worktree_id, - &entries, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )) - } - PanelEntry::Outline(OutlineEntry::Excerpt( - buffer_id, - excerpt_id, - excerpt, - )) => outline_panel.render_excerpt( - buffer_id, - excerpt_id, - &excerpt, - cached_entry.depth, - cx, - ), - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - outline, - )) => Some(outline_panel.render_outline( - buffer_id, - excerpt_id, - &outline, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), - PanelEntry::Search(SearchEntry { - match_range, - render_data, - kind, - .. - }) => Some(outline_panel.render_search_match( - multi_buffer_snapshot.as_ref(), - &match_range, - &render_data, - kind, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), - }) - .collect() - } - }) - .size_full() - .track_scroll(self.scroll_handle.clone()) - .when(show_indent_guides, |list| { - list.with_decoration( - ui::indent_guides( - cx.view().clone(), - px(indent_size), - IndentGuideColors::panel(cx), - |outline_panel, range, _| { - let entries = outline_panel.cached_entries.get(range); - if let Some(entries) = entries { - entries.into_iter().map(|item| item.depth).collect() - } else { - smallvec::SmallVec::new() - } - }, - ) - .with_render_fn( - cx.view().clone(), - move |outline_panel, params, _| { - const LEFT_OFFSET: f32 = 14.; - - let indent_size = params.indent_size; - let item_height = params.item_height; - let active_indent_guide_ix = find_active_indent_guide_ix( - outline_panel, - ¶ms.indent_guides, - ); - - params - .indent_guides - .into_iter() - .enumerate() - .map(|(ix, layout)| { - let bounds = Bounds::new( - point( - px(layout.offset.x as f32) * indent_size - + px(LEFT_OFFSET), - px(layout.offset.y as f32) * item_height, - ), - size( - px(1.), - px(layout.length as f32) * item_height, - ), - ); - ui::RenderedIndentGuide { - bounds, - layout, - is_active: active_indent_guide_ix == Some(ix), - hitbox: None, - } - }) - .collect() - }, - ), + .track_focus(&self.focus_handle(cx)) + .when_some(search_query, |outline_panel, search_state| { + outline_panel.child( + v_flex() + .child( + Label::new(format!("Searching: '{}'", search_state.query)) + .color(Color::Muted) + .mx_2(), ) - }) - }) - } - .children(self.context_menu.as_ref().map(|(menu, position, _)| { - deferred( - anchored() - .position(*position) - .anchor(gpui::AnchorCorner::TopLeft) - .child(menu.clone()), - ) - .with_priority(1) - })) - .child( - v_flex().child(horizontal_separator(cx)).child( - h_flex().p_2().child(self.filter_editor.clone()).child( - div().child( - IconButton::new( - "outline-panel-menu", - if pinned { - IconName::Unpin - } else { - IconName::Pin - }, - ) - .tooltip(move |cx| { - Tooltip::text( - if pinned { - "Unpin Outline" - } else { - "Pin Active Outline" - }, - cx, - ) - }) - .shape(IconButtonShape::Square) - .on_click(cx.listener(|outline_panel, _, cx| { - outline_panel.toggle_active_editor_pin(&ToggleActiveEditorPin, cx); - })), - ), - ), - ), - ) + .child(horizontal_separator(cx)), + ) + }) + .child(self.render_main_contents(query, show_indent_guides, indent_size, cx)) + .child(self.render_filter_footer(pinned, cx)) } } @@ -4108,6 +4375,21 @@ fn horizontal_separator(cx: &mut WindowContext) -> Div { div().mx_2().border_primary(cx).border_t_1() } +#[derive(Debug, Default)] +struct GenerationState { + entries: Vec, + match_candidates: Vec, + max_width_estimate_and_index: Option<(u64, usize)>, +} + +impl GenerationState { + fn clear(&mut self) { + self.entries.clear(); + self.match_candidates.clear(); + self.max_width_estimate_and_index = None; + } +} + #[cfg(test)] mod tests { use gpui::{TestAppContext, VisualTestContext, WindowHandle}; diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index d658a55793..2759424c6a 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,3 +1,4 @@ +use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -29,6 +30,23 @@ pub struct OutlinePanelSettings { pub indent_guides: IndentGuidesSettings, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, + pub scrollbar: ScrollbarSettings, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ScrollbarSettings { + /// When to show the scrollbar in the project panel. + /// + /// Default: inherits editor scrollbar settings + pub show: Option, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ScrollbarSettingsContent { + /// When to show the scrollbar in the project panel. + /// + /// Default: inherits editor scrollbar settings + pub show: Option>, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -85,6 +103,8 @@ pub struct OutlinePanelSettingsContent { pub auto_fold_dirs: Option, /// Settings related to indent guides in the outline panel. pub indent_guides: Option, + /// Scrollbar-related settings + pub scrollbar: Option, } impl Settings for OutlinePanelSettings { diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index e1c4f698a5..d8105b4537 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2271,6 +2271,9 @@ Run the `theme selector: toggle` action in the command palette to see a current "auto_fold_dirs": true, "indent_guides": { "show": "always" + }, + "scrollbar": { + "show": null } } ``` From ce5222f1df5d01f09e8707ae3b5288638905227f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 30 Oct 2024 11:26:54 -0600 Subject: [PATCH 83/87] Add KeyContextView (#19872) Release Notes: - Added `cmd-shift-p debug: Open Key Context View` to help debug custom key bindings https://github.com/user-attachments/assets/de273c97-5b27-45aa-9ff1-f943b0ed7dfe --- Cargo.lock | 2 + crates/gpui/src/keymap.rs | 12 + crates/gpui/src/keymap/binding.rs | 5 + crates/gpui/src/keymap/context.rs | 34 ++- crates/gpui/src/platform/keystroke.rs | 26 ++ crates/gpui/src/window.rs | 26 +- crates/language_tools/Cargo.toml | 2 + crates/language_tools/src/key_context_view.rs | 280 ++++++++++++++++++ crates/language_tools/src/language_tools.rs | 2 + crates/zed/src/zed.rs | 3 +- crates/zed/src/zed/app_menus.rs | 5 +- crates/zed_actions/src/lib.rs | 1 + 12 files changed, 390 insertions(+), 8 deletions(-) create mode 100644 crates/language_tools/src/key_context_view.rs diff --git a/Cargo.lock b/Cargo.lock index 5b54937bd2..9a30984724 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6346,6 +6346,7 @@ dependencies = [ "env_logger 0.11.5", "futures 0.3.30", "gpui", + "itertools 0.13.0", "language", "lsp", "project", @@ -6357,6 +6358,7 @@ dependencies = [ "ui", "util", "workspace", + "zed_actions", ] [[package]] diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index 3eaf6ff3a3..9a0c054a07 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -75,6 +75,18 @@ impl Keymap { .filter(move |binding| binding.action().partial_eq(action)) } + /// all bindings for input returns all bindings that might match the input + /// (without checking context) + pub fn all_bindings_for_input(&self, input: &[Keystroke]) -> Vec { + self.bindings() + .rev() + .filter_map(|binding| { + binding.match_keystrokes(input).filter(|pending| !pending)?; + Some(binding.clone()) + }) + .collect() + } + /// bindings_for_input returns a list of bindings that match the given input, /// and a boolean indicating whether or not more bindings might match if /// the input was longer. diff --git a/crates/gpui/src/keymap/binding.rs b/crates/gpui/src/keymap/binding.rs index c61210ce25..2fff62c7b6 100644 --- a/crates/gpui/src/keymap/binding.rs +++ b/crates/gpui/src/keymap/binding.rs @@ -69,6 +69,11 @@ impl KeyBinding { pub fn action(&self) -> &dyn Action { self.action.as_ref() } + + /// Get the predicate used to match this binding + pub fn predicate(&self) -> Option<&KeyBindingContextPredicate> { + self.context_predicate.as_ref() + } } impl std::fmt::Debug for KeyBinding { diff --git a/crates/gpui/src/keymap/context.rs b/crates/gpui/src/keymap/context.rs index 2990bff196..fccc02886b 100644 --- a/crates/gpui/src/keymap/context.rs +++ b/crates/gpui/src/keymap/context.rs @@ -11,9 +11,12 @@ use std::fmt; pub struct KeyContext(SmallVec<[ContextEntry; 1]>); #[derive(Clone, Debug, Eq, PartialEq, Hash)] -struct ContextEntry { - key: SharedString, - value: Option, +/// An entry in a KeyContext +pub struct ContextEntry { + /// The key (or name if no value) + pub key: SharedString, + /// The value + pub value: Option, } impl<'a> TryFrom<&'a str> for KeyContext { @@ -39,6 +42,17 @@ impl KeyContext { context } + /// Returns the primary context entry (usually the name of the component) + pub fn primary(&self) -> Option<&ContextEntry> { + self.0.iter().find(|p| p.value.is_none()) + } + + /// Returns everything except the primary context entry. + pub fn secondary(&self) -> impl Iterator { + let primary = self.primary(); + self.0.iter().filter(move |&p| Some(p) != primary) + } + /// Parse a key context from a string. /// The key context format is very simple: /// - either a single identifier, such as `StatusBar` @@ -178,6 +192,20 @@ pub enum KeyBindingContextPredicate { ), } +impl fmt::Display for KeyBindingContextPredicate { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Identifier(name) => write!(f, "{}", name), + Self::Equal(left, right) => write!(f, "{} == {}", left, right), + Self::NotEqual(left, right) => write!(f, "{} != {}", left, right), + Self::Not(pred) => write!(f, "!{}", pred), + Self::Child(parent, child) => write!(f, "{} > {}", parent, child), + Self::And(left, right) => write!(f, "({} && {})", left, right), + Self::Or(left, right) => write!(f, "({} || {})", left, right), + } + } +} + impl KeyBindingContextPredicate { /// Parse a string in the same format as the keymap's context field. /// diff --git a/crates/gpui/src/platform/keystroke.rs b/crates/gpui/src/platform/keystroke.rs index 6e0da7dac2..38000f4fb1 100644 --- a/crates/gpui/src/platform/keystroke.rs +++ b/crates/gpui/src/platform/keystroke.rs @@ -121,6 +121,32 @@ impl Keystroke { }) } + /// Produces a representation of this key that Parse can understand. + pub fn unparse(&self) -> String { + let mut str = String::new(); + if self.modifiers.control { + str.push_str("ctrl-"); + } + if self.modifiers.alt { + str.push_str("alt-"); + } + if self.modifiers.platform { + #[cfg(target_os = "macos")] + str.push_str("cmd-"); + + #[cfg(target_os = "linux")] + str.push_str("super-"); + + #[cfg(target_os = "windows")] + str.push_str("win-"); + } + if self.modifiers.shift { + str.push_str("shift-"); + } + str.push_str(&self.key); + str + } + /// Returns true if this keystroke left /// the ime system in an incomplete state. pub fn is_ime_in_progress(&self) -> bool { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 2d896f2ee8..e4bea94da0 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3324,17 +3324,18 @@ impl<'a> WindowContext<'a> { return; } - self.pending_input_changed(); self.propagate_event = true; for binding in match_result.bindings { self.dispatch_action_on_node(node_id, binding.action.as_ref()); if !self.propagate_event { self.dispatch_keystroke_observers(event, Some(binding.action)); + self.pending_input_changed(); return; } } - self.finish_dispatch_key_event(event, dispatch_path) + self.finish_dispatch_key_event(event, dispatch_path); + self.pending_input_changed(); } fn finish_dispatch_key_event( @@ -3664,6 +3665,22 @@ impl<'a> WindowContext<'a> { receiver } + /// Returns the current context stack. + pub fn context_stack(&self) -> Vec { + let dispatch_tree = &self.window.rendered_frame.dispatch_tree; + let node_id = self + .window + .focus + .and_then(|focus_id| dispatch_tree.focusable_node_id(focus_id)) + .unwrap_or_else(|| dispatch_tree.root_node_id()); + + dispatch_tree + .dispatch_path(node_id) + .iter() + .filter_map(move |&node_id| dispatch_tree.node(node_id).context.clone()) + .collect() + } + /// Returns all available actions for the focused element. pub fn available_actions(&self) -> Vec> { let node_id = self @@ -3704,6 +3721,11 @@ impl<'a> WindowContext<'a> { ) } + /// Returns key bindings that invoke the given action on the currently focused element. + pub fn all_bindings_for_input(&self, input: &[Keystroke]) -> Vec { + RefCell::borrow(&self.keymap).all_bindings_for_input(input) + } + /// Returns any bindings that would invoke the given action on the given focus handle if it were focused. pub fn bindings_for_action_in( &self, diff --git a/crates/language_tools/Cargo.toml b/crates/language_tools/Cargo.toml index d85f5a6e52..285e128eac 100644 --- a/crates/language_tools/Cargo.toml +++ b/crates/language_tools/Cargo.toml @@ -19,6 +19,7 @@ copilot.workspace = true editor.workspace = true futures.workspace = true gpui.workspace = true +itertools.workspace = true language.workspace = true lsp.workspace = true project.workspace = true @@ -28,6 +29,7 @@ theme.workspace = true tree-sitter.workspace = true ui.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/language_tools/src/key_context_view.rs b/crates/language_tools/src/key_context_view.rs new file mode 100644 index 0000000000..19f6de2a84 --- /dev/null +++ b/crates/language_tools/src/key_context_view.rs @@ -0,0 +1,280 @@ +use gpui::{ + actions, Action, AppContext, EventEmitter, FocusHandle, FocusableView, + KeyBindingContextPredicate, KeyContext, Keystroke, MouseButton, Render, Subscription, +}; +use itertools::Itertools; +use serde_json::json; +use ui::{ + div, h_flex, px, v_flex, ButtonCommon, Clickable, FluentBuilder, InteractiveElement, Label, + LabelCommon, LabelSize, ParentElement, SharedString, StatefulInteractiveElement, Styled, + ViewContext, VisualContext, WindowContext, +}; +use ui::{Button, ButtonStyle}; +use workspace::Item; +use workspace::Workspace; + +actions!(debug, [OpenKeyContextView]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(|workspace: &mut Workspace, _| { + workspace.register_action(|workspace, _: &OpenKeyContextView, cx| { + let key_context_view = cx.new_view(KeyContextView::new); + workspace.add_item_to_active_pane(Box::new(key_context_view), None, true, cx) + }); + }) + .detach(); +} + +struct KeyContextView { + pending_keystrokes: Option>, + last_keystrokes: Option, + last_possibilities: Vec<(SharedString, SharedString, Option)>, + context_stack: Vec, + focus_handle: FocusHandle, + _subscriptions: [Subscription; 2], +} + +impl KeyContextView { + pub fn new(cx: &mut ViewContext) -> Self { + let sub1 = cx.observe_keystrokes(|this, e, cx| { + let mut pending = this.pending_keystrokes.take().unwrap_or_default(); + pending.push(e.keystroke.clone()); + let mut possibilities = cx.all_bindings_for_input(&pending); + possibilities.reverse(); + this.context_stack = cx.context_stack(); + this.last_keystrokes = Some( + json!(pending.iter().map(|p| p.unparse()).join(" ")) + .to_string() + .into(), + ); + this.last_possibilities = possibilities + .into_iter() + .map(|binding| { + let match_state = if let Some(predicate) = binding.predicate() { + if this.matches(predicate) { + if this.action_matches(&e.action, binding.action()) { + Some(true) + } else { + Some(false) + } + } else { + None + } + } else { + if this.action_matches(&e.action, binding.action()) { + Some(true) + } else { + Some(false) + } + }; + let predicate = if let Some(predicate) = binding.predicate() { + format!("{}", predicate) + } else { + "".to_string() + }; + let mut name = binding.action().name(); + if name == "zed::NoAction" { + name = "(null)" + } + + ( + name.to_owned().into(), + json!(predicate).to_string().into(), + match_state, + ) + }) + .collect(); + }); + let sub2 = cx.observe_pending_input(|this, cx| { + this.pending_keystrokes = cx + .pending_input_keystrokes() + .map(|k| k.iter().cloned().collect()); + if this.pending_keystrokes.is_some() { + this.last_keystrokes.take(); + } + cx.notify(); + }); + + Self { + context_stack: Vec::new(), + pending_keystrokes: None, + last_keystrokes: None, + last_possibilities: Vec::new(), + focus_handle: cx.focus_handle(), + _subscriptions: [sub1, sub2], + } + } +} + +impl EventEmitter<()> for KeyContextView {} + +impl FocusableView for KeyContextView { + fn focus_handle(&self, _: &AppContext) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} +impl KeyContextView { + fn set_context_stack(&mut self, stack: Vec, cx: &mut ViewContext) { + self.context_stack = stack; + cx.notify() + } + + fn matches(&self, predicate: &KeyBindingContextPredicate) -> bool { + let mut stack = self.context_stack.clone(); + while !stack.is_empty() { + if predicate.eval(&stack) { + return true; + } + stack.pop(); + } + false + } + + fn action_matches(&self, a: &Option>, b: &dyn Action) -> bool { + if let Some(last_action) = a { + last_action.partial_eq(b) + } else { + b.name() == "zed::NoAction" + } + } +} + +impl Item for KeyContextView { + type Event = (); + + fn to_item_events(_: &Self::Event, _: impl FnMut(workspace::item::ItemEvent)) {} + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some("Keyboard Context".into()) + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + None + } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(Self::new)) + } +} + +impl Render for KeyContextView { + fn render(&mut self, cx: &mut ViewContext) -> impl ui::IntoElement { + use itertools::Itertools; + v_flex() + .id("key-context-view") + .overflow_scroll() + .size_full() + .max_h_full() + .pt_4() + .pl_4() + .track_focus(&self.focus_handle) + .key_context("KeyContextView") + .on_mouse_up_out( + MouseButton::Left, + cx.listener(|this, _, cx| { + this.last_keystrokes.take(); + this.set_context_stack(cx.context_stack(), cx); + }), + ) + .on_mouse_up_out( + MouseButton::Right, + cx.listener(|_, _, cx| { + cx.defer(|this, cx| { + this.last_keystrokes.take(); + this.set_context_stack(cx.context_stack(), cx); + }); + }), + ) + .child(Label::new("Keyboard Context").size(LabelSize::Large)) + .child(Label::new("This view lets you determine the current context stack for creating custom key bindings in Zed. When a keyboard shortcut is triggered, it also shows all the possible contexts it could have triggered in, and which one matched.")) + .child( + h_flex() + .mt_4() + .gap_4() + .child( + Button::new("default", "Open Documentation") + .style(ButtonStyle::Filled) + .on_click(|_, cx| cx.open_url("https://zed.dev/docs/key-bindings")), + ) + .child( + Button::new("default", "View default keymap") + .style(ButtonStyle::Filled) + .key_binding(ui::KeyBinding::for_action( + &zed_actions::OpenDefaultKeymap, + cx, + )) + .on_click(|_, cx| { + cx.dispatch_action(workspace::SplitRight.boxed_clone()); + cx.dispatch_action(zed_actions::OpenDefaultKeymap.boxed_clone()); + }), + ) + .child( + Button::new("default", "Edit your keymap") + .style(ButtonStyle::Filled) + .key_binding(ui::KeyBinding::for_action(&zed_actions::OpenKeymap, cx)) + .on_click(|_, cx| { + cx.dispatch_action(workspace::SplitRight.boxed_clone()); + cx.dispatch_action(zed_actions::OpenKeymap.boxed_clone()); + }), + ), + ) + .child( + Label::new("Current Context Stack") + .size(LabelSize::Large) + .mt_8(), + ) + .children({ + cx.context_stack().iter().enumerate().map(|(i, context)| { + let primary = context.primary().map(|e| e.key.clone()).unwrap_or_default(); + let secondary = context + .secondary() + .map(|e| { + if let Some(value) = e.value.as_ref() { + format!("{}={}", e.key, value) + } else { + e.key.to_string() + } + }) + .join(" "); + Label::new(format!("{} {}", primary, secondary)).ml(px(12. * (i + 1) as f32)) + }) + }) + .child(Label::new("Last Keystroke").mt_4().size(LabelSize::Large)) + .when_some(self.pending_keystrokes.as_ref(), |el, keystrokes| { + el.child( + Label::new(format!( + "Waiting for more input: {}", + keystrokes.iter().map(|k| k.unparse()).join(" ") + )) + .ml(px(12.)), + ) + }) + .when_some(self.last_keystrokes.as_ref(), |el, keystrokes| { + el.child(Label::new(format!("Typed: {}", keystrokes)).ml_4()) + .children( + self.last_possibilities + .iter() + .map(|(name, predicate, state)| { + let (text, color) = match state { + Some(true) => ("(match)", ui::Color::Success), + Some(false) => ("(low precedence)", ui::Color::Hint), + None => ("(no match)", ui::Color::Error), + }; + h_flex() + .gap_2() + .ml_8() + .child(div().min_w(px(200.)).child(Label::new(name.clone()))) + .child(Label::new(predicate.clone())) + .child(Label::new(text).color(color)) + }), + ) + }) + } +} diff --git a/crates/language_tools/src/language_tools.rs b/crates/language_tools/src/language_tools.rs index 0a1f31f03f..b7a4694cd4 100644 --- a/crates/language_tools/src/language_tools.rs +++ b/crates/language_tools/src/language_tools.rs @@ -1,3 +1,4 @@ +mod key_context_view; mod lsp_log; mod syntax_tree_view; @@ -12,4 +13,5 @@ pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView}; pub fn init(cx: &mut AppContext) { lsp_log::init(cx); syntax_tree_view::init(cx); + key_context_view::init(cx); } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index a5621cfbd8..bbe24bdaaf 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -68,7 +68,6 @@ actions!( Hide, HideOthers, Minimize, - OpenDefaultKeymap, OpenDefaultSettings, OpenProjectSettings, OpenProjectTasks, @@ -474,7 +473,7 @@ pub fn initialize_workspace( .register_action(open_project_tasks_file) .register_action( move |workspace: &mut Workspace, - _: &OpenDefaultKeymap, + _: &zed_actions::OpenDefaultKeymap, cx: &mut ViewContext| { open_bundled_file( workspace, diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index 34c19932dd..5c01724ba7 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -18,7 +18,10 @@ pub fn app_menus() -> Vec { MenuItem::action("Open Settings", super::OpenSettings), MenuItem::action("Open Key Bindings", zed_actions::OpenKeymap), MenuItem::action("Open Default Settings", super::OpenDefaultSettings), - MenuItem::action("Open Default Key Bindings", super::OpenDefaultKeymap), + MenuItem::action( + "Open Default Key Bindings", + zed_actions::OpenDefaultKeymap, + ), MenuItem::action("Open Project Settings", super::OpenProjectSettings), MenuItem::action("Select Theme...", theme_selector::Toggle::default()), ], diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index cedacb6d84..7ea5c923c2 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -26,6 +26,7 @@ actions!( zed, [ OpenSettings, + OpenDefaultKeymap, OpenAccountSettings, OpenServerSettings, Quit, From 4f9217bca0cf5d782f65560d9b49efe92f5e9345 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 30 Oct 2024 11:28:25 -0600 Subject: [PATCH 84/87] Support zed://ssh (#19970) Closes: #15070 Release Notes: - Added support for `zed://ssh//` --- crates/zed/src/zed/open_listener.rs | 3 +++ docs/src/remote-development.md | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index f1cfc43a6a..b37bc78dce 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -47,6 +47,9 @@ impl OpenRequest { this.parse_file_path(file) } else if let Some(file) = url.strip_prefix("zed://file") { this.parse_file_path(file) + } else if let Some(file) = url.strip_prefix("zed://ssh") { + let ssh_url = "ssh:/".to_string() + file; + this.parse_ssh_file_path(&ssh_url, cx)? } else if url.starts_with("ssh://") { this.parse_ssh_file_path(&url, cx)? } else if let Some(request_path) = parse_zed_link(&url, cx) { diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index e2bf1c5b66..771be830bc 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -23,7 +23,7 @@ On your local machine, Zed runs its UI, talks to language models, uses Tree-sitt 1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server. > **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos. -For simple cases where you don't need any SSH arguments, you can run `zed ssh://[@][:]/` to open a remote folder/file directly. +For simple cases where you don't need any SSH arguments, you can run `zed ssh://[@][:]/` to open a remote folder/file directly. If you'd like to hotlink into an SSH project, use a link of the format: `zed://ssh/[@][:]/`. ## Supported platforms From bd187883da1d0da7120b6fd8d63cc0b7cb1c7c78 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 30 Oct 2024 11:55:55 -0600 Subject: [PATCH 85/87] Migration to remove dev servers (#19639) Depends on #19638 Release Notes: - None --- .../20221109000000_test_schema.sql | 28 +----------- .../20241023201725_remove_dev_servers.sql | 6 +++ crates/collab/src/db/queries/projects.rs | 43 ------------------- 3 files changed, 7 insertions(+), 70 deletions(-) create mode 100644 crates/collab/migrations/20241023201725_remove_dev_servers.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index c6bd87a8a5..c59091d66d 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -52,9 +52,7 @@ CREATE TABLE "projects" ( "host_user_id" INTEGER REFERENCES users (id), "host_connection_id" INTEGER, "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, - "unregistered" BOOLEAN NOT NULL DEFAULT FALSE, - "hosted_project_id" INTEGER REFERENCES hosted_projects (id), - "dev_server_project_id" INTEGER REFERENCES dev_server_projects(id) + "unregistered" BOOLEAN NOT NULL DEFAULT FALSE ); CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id"); @@ -399,30 +397,6 @@ CREATE TABLE rate_buckets ( ); CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); -CREATE TABLE hosted_projects ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - channel_id INTEGER NOT NULL REFERENCES channels(id), - name TEXT NOT NULL, - visibility TEXT NOT NULL, - deleted_at TIMESTAMP NULL -); -CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id); -CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL); - -CREATE TABLE dev_servers ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL REFERENCES users(id), - name TEXT NOT NULL, - ssh_connection_string TEXT, - hashed_token TEXT NOT NULL -); - -CREATE TABLE dev_server_projects ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id), - paths TEXT NOT NULL -); - CREATE TABLE IF NOT EXISTS billing_preferences ( id INTEGER PRIMARY KEY AUTOINCREMENT, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, diff --git a/crates/collab/migrations/20241023201725_remove_dev_servers.sql b/crates/collab/migrations/20241023201725_remove_dev_servers.sql new file mode 100644 index 0000000000..c5da673a29 --- /dev/null +++ b/crates/collab/migrations/20241023201725_remove_dev_servers.sql @@ -0,0 +1,6 @@ +ALTER TABLE projects DROP COLUMN dev_server_project_id; +ALTER TABLE projects DROP COLUMN hosted_project_id; + +DROP TABLE hosted_projects; +DROP TABLE dev_server_projects; +DROP TABLE dev_servers; diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 9ea42dd9bf..7ff8aa7a9f 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -750,49 +750,6 @@ impl Database { Ok((project, replica_id as ReplicaId)) } - pub async fn leave_hosted_project( - &self, - project_id: ProjectId, - connection: ConnectionId, - ) -> Result { - self.transaction(|tx| async move { - let result = project_collaborator::Entity::delete_many() - .filter( - Condition::all() - .add(project_collaborator::Column::ProjectId.eq(project_id)) - .add(project_collaborator::Column::ConnectionId.eq(connection.id as i32)) - .add( - project_collaborator::Column::ConnectionServerId - .eq(connection.owner_id as i32), - ), - ) - .exec(&*tx) - .await?; - if result.rows_affected == 0 { - return Err(anyhow!("not in the project"))?; - } - - let project = project::Entity::find_by_id(project_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such project"))?; - let collaborators = project - .find_related(project_collaborator::Entity) - .all(&*tx) - .await?; - let connection_ids = collaborators - .into_iter() - .map(|collaborator| collaborator.connection()) - .collect(); - Ok(LeftProject { - id: project.id, - connection_ids, - should_unshare: false, - }) - }) - .await - } - /// Removes the given connection from the specified project. pub async fn leave_project( &self, From 60be47d115b0977a2ea71d1d1f1dd22c5c401cb7 Mon Sep 17 00:00:00 2001 From: Jen Stehlik Date: Wed, 30 Oct 2024 20:29:32 +0100 Subject: [PATCH 86/87] Update Gleam icon (#19978) Improves upon: https://github.com/zed-industries/zed/pull/19887 Implements the feedback by @PixelJanitor to make the icon follow the design guidelines. Release Notes: - Improved Gleam icon --- assets/icons/file_icons/gleam.svg | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/assets/icons/file_icons/gleam.svg b/assets/icons/file_icons/gleam.svg index 9036ec15dc..6a3dc2c96f 100644 --- a/assets/icons/file_icons/gleam.svg +++ b/assets/icons/file_icons/gleam.svg @@ -1,6 +1,7 @@ - - - - - + + + + + + From 7ce131aaf8e471e77e16cafb028fc7d1fbc5be9c Mon Sep 17 00:00:00 2001 From: Kyle Kelley Date: Wed, 30 Oct 2024 12:32:17 -0700 Subject: [PATCH 87/87] Trim whitespace from base64 encoded image data before decoding it (#19977) Closes #17956 Closes #16330 This fix is for both REPL (released) and notebook (unreleased) image Release Notes: - Fixed image support in REPL for certain versions of matplotlib that included preceding and/or trailing whitespace in the base64 image data --- crates/repl/src/outputs/image.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/repl/src/outputs/image.rs b/crates/repl/src/outputs/image.rs index 15881aa915..648f4aa82c 100644 --- a/crates/repl/src/outputs/image.rs +++ b/crates/repl/src/outputs/image.rs @@ -16,7 +16,7 @@ pub struct ImageView { impl ImageView { pub fn from(base64_encoded_data: &str) -> Result { - let bytes = BASE64_STANDARD.decode(base64_encoded_data)?; + let bytes = BASE64_STANDARD.decode(base64_encoded_data.trim())?; let format = image::guess_format(&bytes)?; let mut data = image::load_from_memory_with_format(&bytes, format)?.into_rgba8();