diff --git a/.cargo/config.toml b/.cargo/config.toml index a657ae61b9..043adf6b30 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -13,6 +13,12 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"] linker = "clang" rustflags = ["-C", "link-arg=-fuse-ld=mold"] +[target.aarch64-apple-darwin] +rustflags = ["-C", "link-args=-Objc -all_load"] + +[target.x86_64-apple-darwin] +rustflags = ["-C", "link-args=-Objc -all_load"] + # This cfg will reduce the size of `windows::core::Error` from 16 bytes to 4 bytes [target.'cfg(target_os = "windows")'] rustflags = ["--cfg", "windows_slim_errors"] diff --git a/.cloudflare/docs-proxy/src/worker.js b/.cloudflare/docs-proxy/src/worker.js index b29ddc00f1..f9f441883a 100644 --- a/.cloudflare/docs-proxy/src/worker.js +++ b/.cloudflare/docs-proxy/src/worker.js @@ -3,15 +3,6 @@ export default { const url = new URL(request.url); url.hostname = "docs-anw.pages.dev"; - // These pages were removed, but may still be served due to Cloudflare's - // [asset retention](https://developers.cloudflare.com/pages/configuration/serving-pages/#asset-retention). - if ( - url.pathname === "/docs/assistant/context-servers" || - url.pathname === "/docs/assistant/model-context-protocol" - ) { - return await fetch("https://zed.dev/404"); - } - let res = await fetch(url, request); if (res.status === 404) { diff --git a/.github/ISSUE_TEMPLATE/0_feature_request.yml b/.github/ISSUE_TEMPLATE/0_feature_request.yml index 7aa135df35..900df46e7a 100644 --- a/.github/ISSUE_TEMPLATE/0_feature_request.yml +++ b/.github/ISSUE_TEMPLATE/0_feature_request.yml @@ -18,8 +18,11 @@ body: - type: textarea id: environment attributes: - label: Environment - description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below. If you are unable to run the command, please include your Zed version and release channel, operating system and version, RAM amount, and architecture. + label: Zed Version and System Specs + description: Zed version, release channel, architecture (x86_64 or aarch64), OS (macOS version / Linux distro and version) and RAM amount. + placeholder: | + + validations: required: true - type: textarea diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yml b/.github/ISSUE_TEMPLATE/1_bug_report.yml index 9d8fedb74a..3a26baa739 100644 --- a/.github/ISSUE_TEMPLATE/1_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/1_bug_report.yml @@ -20,14 +20,19 @@ body: - type: textarea id: environment attributes: - label: Environment - description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below. If you are unable to run the command, please include your Zed version and release channel, operating system and version, RAM amount, and architecture. + label: Zed Version and System Specs + description: Zed version, release channel, architecture (x86_64 or aarch64), OS (macOS version / Linux distro and version) and RAM amount. + placeholder: | + + + + --]----<-- + cur_s > cur_e < + > < + new_s>>>>>>>>< + */ + ( + Ordering::Greater, + Ordering::Greater, + ) => { + if current_excerpt_range + .context + .end + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ) + .is_ge() + { + let expand_down = new_hunk + .buffer_range + .end + .to_point(&buffer_snapshot) + .row + .saturating_sub( + current_excerpt_range + .context + .end + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_down.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Down)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } else { + latest_excerpt_id = + *current_excerpt_id; + let _ = + current_excerpts.next(); + } + } + } + } + None => { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by( + |(probe, ..)| { + compare_paths( + ( + new_path.path.as_ref(), + true, + ), + (probe.path.as_ref(), true), + ) + }, + ) { + Ok(i) => hunks[i].2.extend( + new_changes.hunks.iter().map( + |hunk| { + hunk.buffer_range + .clone() + }, + ), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| { + hunk.buffer_range + .clone() + }) + .collect(), + ), + ), + } + continue 'new_hunks; + } + } + } + } + + for (excerpt_id, excerpt_range) in current_excerpts { + if !excerpts_with_new_changes.contains(&excerpt_id) + && !new_hunks_unchanged.iter().any(|hunk| { + excerpt_range + .context + .start + .cmp( + &hunk.buffer_range.end, + &buffer_snapshot, + ) + .is_le() + && excerpt_range + .context + .end + .cmp( + &hunk.buffer_range.start, + &buffer_snapshot, + ) + .is_ge() + }) + { + excerpts_to_remove.push(excerpt_id); + } + latest_excerpt_id = excerpt_id; + } + } + None => excerpts_to_remove.extend( + current_excerpts.map(|(excerpt_id, _)| excerpt_id), + ), + } + let _ = new_order_entries.next(); + break; + } + } + } + None => { + excerpts_to_remove + .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); + break; + } + } + } + latest_excerpt_id = last_current_excerpt_id.unwrap_or(latest_excerpt_id); + } + + for (path, project_entry_id) in new_order_entries { + if let Some(changes) = new_changes.get(project_entry_id) { + if !changes.hunks.is_empty() { + let hunks = new_excerpt_hunks.entry(latest_excerpt_id).or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths((path.path.as_ref(), true), (probe.path.as_ref(), true)) + }) { + Ok(i) => hunks[i] + .2 + .extend(changes.hunks.iter().map(|hunk| hunk.buffer_range.clone())), + Err(i) => hunks.insert( + i, + ( + path.clone(), + changes.buffer.clone(), + changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + } + } + + self.excerpts.update(cx, |multi_buffer, cx| { + for (mut after_excerpt_id, excerpts_to_add) in new_excerpt_hunks { + for (_, buffer, hunk_ranges) in excerpts_to_add { + let buffer_snapshot = buffer.read(cx).snapshot(); + let max_point = buffer_snapshot.max_point(); + let new_excerpts = multi_buffer.insert_excerpts_after( + after_excerpt_id, + buffer, + hunk_ranges.into_iter().map(|range| { + let mut extended_point_range = range.to_point(&buffer_snapshot); + extended_point_range.start.row = extended_point_range + .start + .row + .saturating_sub(DEFAULT_MULTIBUFFER_CONTEXT); + extended_point_range.end.row = (extended_point_range.end.row + + DEFAULT_MULTIBUFFER_CONTEXT) + .min(max_point.row); + ExcerptRange { + context: extended_point_range, + primary: None, + } + }), + cx, + ); + after_excerpt_id = new_excerpts.last().copied().unwrap_or(after_excerpt_id); + } + } + multi_buffer.remove_excerpts(excerpts_to_remove, cx); + for ((line_count, direction), excerpts) in excerpt_to_expand { + multi_buffer.expand_excerpts(excerpts, line_count, direction, cx); + } + }); + } else { + self.excerpts.update(cx, |multi_buffer, cx| { + for new_changes in new_entry_order + .iter() + .filter_map(|(_, entry_id)| new_changes.get(entry_id)) + { + multi_buffer.push_excerpts_with_context_lines( + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + } + }); + }; + + let mut new_changes = new_changes; + let mut new_entry_order = new_entry_order; + std::mem::swap( + self.buffer_changes.entry(worktree_id).or_default(), + &mut new_changes, + ); + std::mem::swap( + self.entry_order.entry(worktree_id).or_default(), + &mut new_entry_order, + ); + } +} + +impl EventEmitter for ProjectDiffEditor {} + +impl FocusableView for ProjectDiffEditor { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Item for ProjectDiffEditor { + type Event = EditorEvent; + + fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { + Editor::to_item_events(event, f) + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor.update(cx, |editor, cx| editor.deactivated(cx)); + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, cx)) + } + + fn tab_tooltip_text(&self, _: &AppContext) -> Option { + Some("Project Diff".into()) + } + + fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement { + if self.buffer_changes.is_empty() { + Label::new("No changes") + .color(if params.selected { + Color::Default + } else { + Color::Muted + }) + .into_any_element() + } else { + h_flex() + .gap_1() + .when(true, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child(Label::new(self.buffer_changes.len().to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .when(true, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Indicator).color(Color::Warning)) + .child(Label::new(self.buffer_changes.len().to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .into_any_element() + } + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + Some("project diagnostics") + } + + fn for_each_project_item( + &self, + cx: &AppContext, + f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), + ) { + self.editor.for_each_project_item(cx, f) + } + + fn is_singleton(&self, _: &AppContext) -> bool { + false + } + + fn set_nav_history(&mut self, nav_history: ItemNavHistory, cx: &mut ViewContext) { + self.editor.update(cx, |editor, _| { + editor.set_nav_history(Some(nav_history)); + }); + } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(|cx| { + ProjectDiffEditor::new(self.project.clone(), self.workspace.clone(), cx) + })) + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).is_dirty(cx) + } + + fn has_conflict(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).has_conflict(cx) + } + + fn can_save(&self, _: &AppContext) -> bool { + true + } + + fn save( + &mut self, + format: bool, + project: Model, + cx: &mut ViewContext, + ) -> Task> { + self.editor.save(format, project, cx) + } + + fn save_as( + &mut self, + _: Model, + _: ProjectPath, + _: &mut ViewContext, + ) -> Task> { + unreachable!() + } + + fn reload( + &mut self, + project: Model, + cx: &mut ViewContext, + ) -> Task> { + self.editor.reload(project, cx) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a View, + _: &'a AppContext, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } + + fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation { + ToolbarItemLocation::PrimaryLeft + } + + fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option> { + self.editor.breadcrumbs(theme, cx) + } + + fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + } +} + +impl Render for ProjectDiffEditor { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let child = if self.buffer_changes.is_empty() { + div() + .bg(cx.theme().colors().editor_background) + .flex() + .items_center() + .justify_center() + .size_full() + .child(Label::new("No changes in the workspace")) + } else { + div().size_full().child(self.editor.clone()) + }; + + div() + .track_focus(&self.focus_handle) + .size_full() + .child(child) + } +} + +#[cfg(test)] +mod tests { + use gpui::{SemanticVersion, TestAppContext, VisualTestContext}; + use project::buffer_store::BufferChangeSet; + use serde_json::json; + use settings::SettingsStore; + use std::{ + ops::Deref as _, + path::{Path, PathBuf}, + }; + + use super::*; + + // TODO finish + // #[gpui::test] + // async fn randomized_tests(cx: &mut TestAppContext) { + // // Create a new project (how?? temp fs?), + // let fs = FakeFs::new(cx.executor()); + // let project = Project::test(fs, [], cx).await; + + // // create random files with random content + + // // Commit it into git somehow (technically can do with "real" fs in a temp dir) + // // + // // Apply randomized changes to the project: select a random file, random change and apply to buffers + // } + + #[gpui::test(iterations = 30)] + async fn simple_edit_test(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + init_test(cx); + + let fs = fs::FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + ".git": {}, + "file_a": "This is file_a", + "file_b": "This is file_b", + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/root")], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + + let file_a_editor = workspace + .update(cx, |workspace, cx| { + let file_a_editor = + workspace.open_abs_path(PathBuf::from("/root/file_a"), true, cx); + ProjectDiffEditor::deploy(workspace, &Deploy, cx); + file_a_editor + }) + .unwrap() + .await + .expect("did not open an item at all") + .downcast::() + .expect("did not open an editor for file_a"); + let project_diff_editor = workspace + .update(cx, |workspace, cx| { + workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()) + }) + .unwrap() + .expect("did not find a ProjectDiffEditor"); + project_diff_editor.update(cx, |project_diff_editor, cx| { + assert!( + project_diff_editor.editor.read(cx).text(cx).is_empty(), + "Should have no changes after opening the diff on no git changes" + ); + }); + + let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx)); + let change = "an edit after git add"; + file_a_editor + .update(cx, |file_a_editor, cx| { + file_a_editor.insert(change, cx); + file_a_editor.save(false, project.clone(), cx) + }) + .await + .expect("failed to save a file"); + file_a_editor.update(cx, |file_a_editor, cx| { + let change_set = cx.new_model(|cx| { + BufferChangeSet::new_with_base_text( + old_text.clone(), + file_a_editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .read(cx) + .text_snapshot(), + cx, + ) + }); + file_a_editor + .diff_map + .add_change_set(change_set.clone(), cx); + project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.set_change_set( + file_a_editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .read(cx) + .remote_id(), + change_set, + ); + }); + }); + }); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/.git"), + &[(Path::new("file_a"), GitFileStatus::Modified)], + ); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + + project_diff_editor.update(cx, |project_diff_editor, cx| { + assert_eq!( + // TODO assert it better: extract added text (based on the background changes) and deleted text (based on the deleted blocks added) + project_diff_editor.editor.read(cx).text(cx), + format!("{change}{old_text}"), + "Should have a new change shown in the beginning, and the old text shown as deleted text afterwards" + ); + }); + } + + fn init_test(cx: &mut gpui::TestAppContext) { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } + + cx.update(|cx| { + assets::Assets.load_test_fonts(cx); + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + release_channel::init(SemanticVersion::default(), cx); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + workspace::init_settings(cx); + crate::init(cx); + cx.set_staff(true); + }); + } +} diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 31be9e93a9..9d51eb7175 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -1,8 +1,9 @@ use crate::{ + editor_settings::MultiCursorModifier, hover_popover::{self, InlayHover}, scroll::ScrollAmount, - Anchor, Editor, EditorSnapshot, FindAllReferences, GoToDefinition, GoToTypeDefinition, - GotoDefinitionKind, InlayId, Navigated, PointForPosition, SelectPhase, + Anchor, Editor, EditorSettings, EditorSnapshot, FindAllReferences, GoToDefinition, + GoToTypeDefinition, GotoDefinitionKind, InlayId, Navigated, PointForPosition, SelectPhase, }; use gpui::{px, AppContext, AsyncWindowContext, Model, Modifiers, Task, ViewContext}; use language::{Bias, ToOffset}; @@ -12,6 +13,7 @@ use project::{ HoverBlock, HoverBlockKind, InlayHintLabelPartTooltip, InlayHintTooltip, LocationLink, Project, ResolveState, ResolvedPath, }; +use settings::Settings; use std::ops::Range; use theme::ActiveTheme as _; use util::{maybe, ResultExt, TryFutureExt as _}; @@ -117,7 +119,12 @@ impl Editor { modifiers: Modifiers, cx: &mut ViewContext, ) { - if !modifiers.secondary() || self.has_pending_selection() { + let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier; + let hovered_link_modifier = match multi_cursor_setting { + MultiCursorModifier::Alt => modifiers.secondary(), + MultiCursorModifier::CmdOrCtrl => modifiers.alt, + }; + if !hovered_link_modifier || self.has_pending_selection() { self.hide_hovered_link(cx); return; } @@ -137,7 +144,7 @@ impl Editor { snapshot, point_for_position, self, - modifiers.secondary(), + hovered_link_modifier, modifiers.shift, cx, ); @@ -259,7 +266,7 @@ pub fn update_inlay_link_and_hover_points( editor: &mut Editor, secondary_held: bool, shift_held: bool, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) { let hovered_offset = if point_for_position.column_overshoot_after_line_end == 0 { Some(snapshot.display_point_to_inlay_offset(point_for_position.exact_unclipped, Bias::Left)) @@ -687,6 +694,65 @@ pub(crate) fn find_url( None } +pub(crate) fn find_url_from_range( + buffer: &Model, + range: Range, + mut cx: AsyncWindowContext, +) -> Option { + const LIMIT: usize = 2048; + + let Ok(snapshot) = buffer.update(&mut cx, |buffer, _| buffer.snapshot()) else { + return None; + }; + + let start_offset = range.start.to_offset(&snapshot); + let end_offset = range.end.to_offset(&snapshot); + + let mut token_start = start_offset.min(end_offset); + let mut token_end = start_offset.max(end_offset); + + let range_len = token_end - token_start; + + if range_len >= LIMIT { + return None; + } + + // Skip leading whitespace + for ch in snapshot.chars_at(token_start).take(range_len) { + if !ch.is_whitespace() { + break; + } + token_start += ch.len_utf8(); + } + + // Skip trailing whitespace + for ch in snapshot.reversed_chars_at(token_end).take(range_len) { + if !ch.is_whitespace() { + break; + } + token_end -= ch.len_utf8(); + } + + if token_start >= token_end { + return None; + } + + let text = snapshot + .text_for_range(token_start..token_end) + .collect::(); + + let mut finder = LinkFinder::new(); + finder.kinds(&[LinkKind::Url]); + + if let Some(link) = finder.links(&text).next() { + if link.start() == 0 && link.end() == text.len() { + return Some(link.as_str().to_string()); + } + } + + None +} + pub(crate) async fn find_file( buffer: &Model, project: Option>, diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 006a42700b..439d15ec6a 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -3,7 +3,7 @@ use crate::{ hover_links::{InlayHighlight, RangeInEditor}, scroll::ScrollAmount, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot, - Hover, RangeToAnchorExt, + Hover, }; use gpui::{ div, px, AnyElement, AsyncWindowContext, FontWeight, Hsla, InteractiveElement, IntoElement, @@ -11,11 +11,11 @@ use gpui::{ StyleRefinement, Styled, Task, TextStyleRefinement, View, ViewContext, }; use itertools::Itertools; -use language::{Diagnostic, DiagnosticEntry, Language, LanguageRegistry}; +use language::{DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{Markdown, MarkdownStyle}; use multi_buffer::ToOffset; -use project::{HoverBlock, InlayHintLabelPart}; +use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart}; use settings::Settings; use std::rc::Rc; use std::{borrow::Cow, cell::RefCell}; @@ -23,7 +23,6 @@ use std::{ops::Range, sync::Arc, time::Duration}; use theme::ThemeSettings; use ui::{prelude::*, window_is_transparent, Scrollbar, ScrollbarState}; use util::TryFutureExt; -pub const HOVER_DELAY_MILLIS: u64 = 350; pub const HOVER_REQUEST_DELAY_MILLIS: u64 = 200; pub const MIN_POPOVER_CHARACTER_WIDTH: f32 = 20.; @@ -131,10 +130,12 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie hide_hover(editor, cx); } + let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay; + let task = cx.spawn(|this, mut cx| { async move { cx.background_executor() - .timer(Duration::from_millis(HOVER_DELAY_MILLIS)) + .timer(Duration::from_millis(hover_popover_delay)) .await; this.update(&mut cx, |this, _| { this.hover_state.diagnostic_popover = None; @@ -236,6 +237,8 @@ fn show_hover( } } + let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay; + let task = cx.spawn(|this, mut cx| { async move { // If we need to delay, delay a set amount initially before making the lsp request @@ -245,7 +248,7 @@ fn show_hover( // Construct delay task to wait for later let total_delay = Some( cx.background_executor() - .timer(Duration::from_millis(HOVER_DELAY_MILLIS)), + .timer(Duration::from_millis(hover_popover_delay)), ); cx.background_executor() @@ -260,64 +263,15 @@ fn show_hover( delay.await; } - // If there's a diagnostic, assign it on the hover state and notify - let mut local_diagnostic = snapshot + let local_diagnostic = snapshot .buffer_snapshot - .diagnostics_in_range::<_, usize>(anchor..anchor, false) + .diagnostics_in_range(anchor..anchor, false) // Find the entry with the most specific range - .min_by_key(|entry| entry.range.end - entry.range.start) - .map(|entry| DiagnosticEntry { - diagnostic: entry.diagnostic, - range: entry.range.to_anchors(&snapshot.buffer_snapshot), + .min_by_key(|entry| { + let range = entry.range.to_offset(&snapshot.buffer_snapshot); + range.end - range.start }); - // Pull the primary diagnostic out so we can jump to it if the popover is clicked - let primary_diagnostic = local_diagnostic.as_ref().and_then(|local_diagnostic| { - snapshot - .buffer_snapshot - .diagnostic_group::(local_diagnostic.diagnostic.group_id) - .find(|diagnostic| diagnostic.diagnostic.is_primary) - .map(|entry| DiagnosticEntry { - diagnostic: entry.diagnostic, - range: entry.range.to_anchors(&snapshot.buffer_snapshot), - }) - }); - if let Some(invisible) = snapshot - .buffer_snapshot - .chars_at(anchor) - .next() - .filter(|&c| is_invisible(c)) - { - let after = snapshot.buffer_snapshot.anchor_after( - anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(), - ); - local_diagnostic = Some(DiagnosticEntry { - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: format!("Unicode character U+{:02X}", invisible as u32), - ..Default::default() - }, - range: anchor..after, - }) - } else if let Some(invisible) = snapshot - .buffer_snapshot - .reversed_chars_at(anchor) - .next() - .filter(|&c| is_invisible(c)) - { - let before = snapshot.buffer_snapshot.anchor_before( - anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(), - ); - local_diagnostic = Some(DiagnosticEntry { - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: format!("Unicode character U+{:02X}", invisible as u32), - ..Default::default() - }, - range: before..anchor, - }) - } - let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic { let text = match local_diagnostic.diagnostic.source { Some(ref source) => { @@ -359,6 +313,7 @@ fn show_hover( let mut base_text_style = cx.text_style(); base_text_style.refine(&TextStyleRefinement { font_family: Some(settings.ui_font.family.clone()), + font_fallbacks: settings.ui_font.fallbacks.clone(), font_size: Some(settings.ui_font_size.into()), color: Some(cx.theme().colors().editor_foreground), background_color: Some(gpui::transparent_black()), @@ -378,13 +333,12 @@ fn show_hover( }, ..Default::default() }; - Markdown::new_text(text, markdown_style.clone(), None, cx, None) + Markdown::new_text(text, markdown_style.clone(), None, None, cx) }) .ok(); Some(DiagnosticPopover { local_diagnostic, - primary_diagnostic, parsed_content, border_color, background_color, @@ -399,6 +353,31 @@ fn show_hover( this.hover_state.diagnostic_popover = diagnostic_popover; })?; + let invisible_char = if let Some(invisible) = snapshot + .buffer_snapshot + .chars_at(anchor) + .next() + .filter(|&c| is_invisible(c)) + { + let after = snapshot.buffer_snapshot.anchor_after( + anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(), + ); + Some((invisible, anchor..after)) + } else if let Some(invisible) = snapshot + .buffer_snapshot + .reversed_chars_at(anchor) + .next() + .filter(|&c| is_invisible(c)) + { + let before = snapshot.buffer_snapshot.anchor_before( + anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(), + ); + + Some((invisible, before..anchor)) + } else { + None + }; + let hovers_response = if let Some(hover_request) = hover_request { hover_request.await } else { @@ -406,8 +385,26 @@ fn show_hover( }; let snapshot = this.update(&mut cx, |this, cx| this.snapshot(cx))?; let mut hover_highlights = Vec::with_capacity(hovers_response.len()); - let mut info_popovers = Vec::with_capacity(hovers_response.len()); - let mut info_popover_tasks = Vec::with_capacity(hovers_response.len()); + let mut info_popovers = Vec::with_capacity( + hovers_response.len() + if invisible_char.is_some() { 1 } else { 0 }, + ); + + if let Some((invisible, range)) = invisible_char { + let blocks = vec![HoverBlock { + text: format!("Unicode character U+{:02X}", invisible as u32), + kind: HoverBlockKind::PlainText, + }]; + let parsed_content = parse_blocks(&blocks, &language_registry, None, &mut cx).await; + let scroll_handle = ScrollHandle::new(); + info_popovers.push(InfoPopover { + symbol_range: RangeInEditor::Text(range), + parsed_content, + scrollbar_state: ScrollbarState::new(scroll_handle.clone()), + scroll_handle, + keyboard_grace: Rc::new(RefCell::new(ignore_timeout)), + anchor: Some(anchor), + }) + } for hover_result in hovers_response { // Create symbol range of anchors for highlighting and filtering of future requests. @@ -420,12 +417,11 @@ fn show_hover( let end = snapshot .buffer_snapshot .anchor_in_excerpt(excerpt_id, range.end)?; - Some(start..end) }) .or_else(|| { let snapshot = &snapshot.buffer_snapshot; - let offset_range = snapshot.range_for_syntax_ancestor(anchor..anchor)?; + let offset_range = snapshot.syntax_ancestor(anchor..anchor)?.1; Some( snapshot.anchor_before(offset_range.start) ..snapshot.anchor_after(offset_range.end), @@ -438,21 +434,15 @@ fn show_hover( let parsed_content = parse_blocks(&blocks, &language_registry, language, &mut cx).await; let scroll_handle = ScrollHandle::new(); - info_popover_tasks.push(( - range.clone(), - InfoPopover { - symbol_range: RangeInEditor::Text(range), - parsed_content, - scrollbar_state: ScrollbarState::new(scroll_handle.clone()), - scroll_handle, - keyboard_grace: Rc::new(RefCell::new(ignore_timeout)), - anchor: Some(anchor), - }, - )); - } - for (highlight_range, info_popover) in info_popover_tasks { - hover_highlights.push(highlight_range); - info_popovers.push(info_popover); + hover_highlights.push(range.clone()); + info_popovers.push(InfoPopover { + symbol_range: RangeInEditor::Text(range), + parsed_content, + scrollbar_state: ScrollbarState::new(scroll_handle.clone()), + scroll_handle, + keyboard_grace: Rc::new(RefCell::new(ignore_timeout)), + anchor: Some(anchor), + }); } this.update(&mut cx, |editor, cx| { @@ -547,11 +537,14 @@ async fn parse_blocks( .new_view(|cx| { let settings = ThemeSettings::get_global(cx); let ui_font_family = settings.ui_font.family.clone(); + let ui_font_fallbacks = settings.ui_font.fallbacks.clone(); let buffer_font_family = settings.buffer_font.family.clone(); + let buffer_font_fallbacks = settings.buffer_font.fallbacks.clone(); let mut base_text_style = cx.text_style(); base_text_style.refine(&TextStyleRefinement { font_family: Some(ui_font_family.clone()), + font_fallbacks: ui_font_fallbacks, color: Some(cx.theme().colors().editor_foreground), ..Default::default() }); @@ -562,6 +555,7 @@ async fn parse_blocks( inline_code: TextStyleRefinement { background_color: Some(cx.theme().colors().background), font_family: Some(buffer_font_family), + font_fallbacks: buffer_font_fallbacks, ..Default::default() }, rule_color: cx.theme().colors().border, @@ -593,8 +587,8 @@ async fn parse_blocks( combined_text, markdown_style.clone(), Some(language_registry.clone()), - cx, fallback_language_name, + cx, ) }) .ok(); @@ -738,6 +732,7 @@ impl InfoPopover { cx.notify(); self.scroll_handle.set_offset(current); } + fn render_vertical_scrollbar(&self, cx: &mut ViewContext) -> Stateful
{ div() .occlude() @@ -775,7 +770,6 @@ impl InfoPopover { #[derive(Debug, Clone)] pub struct DiagnosticPopover { local_diagnostic: DiagnosticEntry, - primary_diagnostic: Option>, parsed_content: Option>, border_color: Option, background_color: Option, @@ -829,13 +823,8 @@ impl DiagnosticPopover { diagnostic_div.into_any_element() } - pub fn activation_info(&self) -> (usize, Anchor) { - let entry = self - .primary_diagnostic - .as_ref() - .unwrap_or(&self.local_diagnostic); - - (entry.diagnostic.group_id, entry.range.start) + pub fn group_id(&self) -> usize { + self.local_diagnostic.diagnostic.group_id } } @@ -851,6 +840,7 @@ mod tests { InlayId, PointForPosition, }; use collections::BTreeSet; + use gpui::AppContext; use indoc::indoc; use language::{language_settings::InlayHintSettings, Diagnostic, DiagnosticSet}; use lsp::LanguageServerId; @@ -860,6 +850,10 @@ mod tests { use std::sync::atomic::AtomicUsize; use text::Bias; + fn get_hover_popover_delay(cx: &gpui::TestAppContext) -> u64 { + cx.read(|cx: &AppContext| -> u64 { EditorSettings::get_global(cx).hover_popover_delay }) + } + impl InfoPopover { fn get_rendered_text(&self, cx: &gpui::AppContext) -> String { let mut rendered_text = String::new(); @@ -884,7 +878,6 @@ mod tests { cx: &mut gpui::TestAppContext, ) { init_test(cx, |_| {}); - const HOVER_DELAY_MILLIS: u64 = 350; let mut cx = EditorLspTestContext::new_rust( lsp::ServerCapabilities { @@ -958,7 +951,7 @@ mod tests { })) }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); requests.next().await; cx.editor(|editor, cx| { @@ -1037,7 +1030,7 @@ mod tests { hover_at(editor, Some(anchor), cx) }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); request.next().await; // verify that the information popover is no longer visible @@ -1091,7 +1084,7 @@ mod tests { })) }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); requests.next().await; cx.editor(|editor, cx| { @@ -1127,7 +1120,7 @@ mod tests { hover_at(editor, Some(anchor), cx) }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); request.next().await; cx.editor(|editor, _| { assert!(!editor.hover_state.visible()); @@ -1389,7 +1382,7 @@ mod tests { })) }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); cx.background_executor.run_until_parked(); cx.editor(|Editor { hover_state, .. }, _| { @@ -1677,7 +1670,7 @@ mod tests { ); }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); cx.background_executor.run_until_parked(); cx.update_editor(|editor, cx| { let hover_state = &editor.hover_state; @@ -1731,7 +1724,7 @@ mod tests { ); }); cx.background_executor - .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); cx.background_executor.run_until_parked(); cx.update_editor(|editor, cx| { let hover_state = &editor.hover_state; diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 27bb8ac557..85bd964699 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -1,12 +1,16 @@ -use collections::{hash_map, HashMap, HashSet}; +use collections::{HashMap, HashSet}; use git::diff::DiffHunkStatus; -use gpui::{Action, AnchorCorner, AppContext, CursorStyle, Hsla, Model, MouseButton, Task, View}; +use gpui::{ + Action, AppContext, Corner, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View, +}; use language::{Buffer, BufferId, Point}; use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, - MultiBufferSnapshot, ToPoint, + MultiBufferSnapshot, ToOffset, ToPoint, }; +use project::buffer_store::BufferChangeSet; use std::{ops::Range, sync::Arc}; +use sum_tree::TreeMap; use text::OffsetRangeExt; use ui::{ prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement, @@ -29,10 +33,11 @@ pub(super) struct HoveredHunk { pub diff_base_byte_range: Range, } -#[derive(Debug, Default)] -pub(super) struct ExpandedHunks { +#[derive(Default)] +pub(super) struct DiffMap { pub(crate) hunks: Vec, - diff_base: HashMap, + pub(crate) diff_bases: HashMap, + pub(crate) snapshot: DiffMapSnapshot, hunk_update_tasks: HashMap, Task<()>>, expand_all: bool, } @@ -46,10 +51,13 @@ pub(super) struct ExpandedHunk { pub folded: bool, } -#[derive(Debug)] -struct DiffBaseBuffer { - buffer: Model, - diff_base_version: usize, +#[derive(Clone, Debug, Default)] +pub(crate) struct DiffMapSnapshot(TreeMap); + +pub(crate) struct DiffBaseState { + pub(crate) change_set: Model, + pub(crate) last_version: Option, + _subscription: Subscription, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -66,7 +74,38 @@ pub enum DisplayDiffHunk { }, } -impl ExpandedHunks { +impl DiffMap { + pub fn snapshot(&self) -> DiffMapSnapshot { + self.snapshot.clone() + } + + pub fn add_change_set( + &mut self, + change_set: Model, + cx: &mut ViewContext, + ) { + let buffer_id = change_set.read(cx).buffer_id; + self.snapshot + .0 + .insert(buffer_id, change_set.read(cx).diff_to_buffer.clone()); + self.diff_bases.insert( + buffer_id, + DiffBaseState { + last_version: None, + _subscription: cx.observe(&change_set, move |editor, change_set, cx| { + editor + .diff_map + .snapshot + .0 + .insert(buffer_id, change_set.read(cx).diff_to_buffer.clone()); + Editor::sync_expanded_diff_hunks(&mut editor.diff_map, buffer_id, cx); + }), + change_set, + }, + ); + Editor::sync_expanded_diff_hunks(self, buffer_id, cx); + } + pub fn hunks(&self, include_folded: bool) -> impl Iterator { self.hunks .iter() @@ -74,9 +113,92 @@ impl ExpandedHunks { } } +impl DiffMapSnapshot { + pub fn is_empty(&self) -> bool { + self.0.values().all(|diff| diff.is_empty()) + } + + pub fn diff_hunks<'a>( + &'a self, + buffer_snapshot: &'a MultiBufferSnapshot, + ) -> impl Iterator + 'a { + self.diff_hunks_in_range(0..buffer_snapshot.len(), buffer_snapshot) + } + + pub fn diff_hunks_in_range<'a, T: ToOffset>( + &'a self, + range: Range, + buffer_snapshot: &'a MultiBufferSnapshot, + ) -> impl Iterator + 'a { + let range = range.start.to_offset(buffer_snapshot)..range.end.to_offset(buffer_snapshot); + buffer_snapshot + .excerpts_for_range(range.clone()) + .filter_map(move |excerpt| { + let buffer = excerpt.buffer(); + let buffer_id = buffer.remote_id(); + let diff = self.0.get(&buffer_id)?; + let buffer_range = excerpt.map_range_to_buffer(range.clone()); + let buffer_range = + buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end); + Some( + diff.hunks_intersecting_range(buffer_range, excerpt.buffer()) + .map(move |hunk| { + let start = + excerpt.map_point_from_buffer(Point::new(hunk.row_range.start, 0)); + let end = + excerpt.map_point_from_buffer(Point::new(hunk.row_range.end, 0)); + MultiBufferDiffHunk { + row_range: MultiBufferRow(start.row)..MultiBufferRow(end.row), + buffer_id, + buffer_range: hunk.buffer_range.clone(), + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + } + }), + ) + }) + .flatten() + } + + pub fn diff_hunks_in_range_rev<'a, T: ToOffset>( + &'a self, + range: Range, + buffer_snapshot: &'a MultiBufferSnapshot, + ) -> impl Iterator + 'a { + let range = range.start.to_offset(buffer_snapshot)..range.end.to_offset(buffer_snapshot); + buffer_snapshot + .excerpts_for_range_rev(range.clone()) + .filter_map(move |excerpt| { + let buffer = excerpt.buffer(); + let buffer_id = buffer.remote_id(); + let diff = self.0.get(&buffer_id)?; + let buffer_range = excerpt.map_range_to_buffer(range.clone()); + let buffer_range = + buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end); + Some( + diff.hunks_intersecting_range_rev(buffer_range, excerpt.buffer()) + .map(move |hunk| { + let start_row = excerpt + .map_point_from_buffer(Point::new(hunk.row_range.start, 0)) + .row; + let end_row = excerpt + .map_point_from_buffer(Point::new(hunk.row_range.end, 0)) + .row; + MultiBufferDiffHunk { + row_range: MultiBufferRow(start_row)..MultiBufferRow(end_row), + buffer_id, + buffer_range: hunk.buffer_range.clone(), + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + } + }), + ) + }) + .flatten() + } +} + impl Editor { pub fn set_expand_all_diff_hunks(&mut self) { - self.expanded_hunks.expand_all = true; + self.diff_map.expand_all = true; } pub(super) fn toggle_hovered_hunk( @@ -92,18 +214,15 @@ impl Editor { } pub fn toggle_hunk_diff(&mut self, _: &ToggleHunkDiff, cx: &mut ViewContext) { - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let selections = self.selections.disjoint_anchors(); - self.toggle_hunks_expanded( - hunks_for_selections(&multi_buffer_snapshot, &selections), - cx, - ); + let snapshot = self.snapshot(cx); + let selections = self.selections.all(cx); + self.toggle_hunks_expanded(hunks_for_selections(&snapshot, &selections), cx); } pub fn expand_all_hunk_diffs(&mut self, _: &ExpandAllHunkDiffs, cx: &mut ViewContext) { let snapshot = self.snapshot(cx); let display_rows_with_expanded_hunks = self - .expanded_hunks + .diff_map .hunks(false) .map(|hunk| &hunk.hunk_range) .map(|anchor_range| { @@ -119,10 +238,10 @@ impl Editor { ) }) .collect::>(); - let hunks = snapshot - .display_snapshot - .buffer_snapshot - .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + let hunks = self + .diff_map + .snapshot + .diff_hunks(&snapshot.display_snapshot.buffer_snapshot) .filter(|hunk| { let hunk_display_row_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) @@ -140,11 +259,11 @@ impl Editor { hunks_to_toggle: Vec, cx: &mut ViewContext, ) { - if self.expanded_hunks.expand_all { + if self.diff_map.expand_all { return; } - let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); + let previous_toggle_task = self.diff_map.hunk_update_tasks.remove(&None); let new_toggle_task = cx.spawn(move |editor, mut cx| async move { if let Some(task) = previous_toggle_task { task.await; @@ -154,11 +273,10 @@ impl Editor { .update(&mut cx, |editor, cx| { let snapshot = editor.snapshot(cx); let mut hunks_to_toggle = hunks_to_toggle.into_iter().fuse().peekable(); - let mut highlights_to_remove = - Vec::with_capacity(editor.expanded_hunks.hunks.len()); + let mut highlights_to_remove = Vec::with_capacity(editor.diff_map.hunks.len()); let mut blocks_to_remove = HashSet::default(); let mut hunks_to_expand = Vec::new(); - editor.expanded_hunks.hunks.retain(|expanded_hunk| { + editor.diff_map.hunks.retain(|expanded_hunk| { if expanded_hunk.folded { return true; } @@ -238,7 +356,7 @@ impl Editor { .ok(); }); - self.expanded_hunks + self.diff_map .hunk_update_tasks .insert(None, cx.background_executor().spawn(new_toggle_task)); } @@ -247,35 +365,39 @@ impl Editor { &mut self, diff_base_buffer: Option>, hunk: &HoveredHunk, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) -> Option<()> { let buffer = self.buffer.clone(); let multi_buffer_snapshot = buffer.read(cx).snapshot(cx); let hunk_range = hunk.multi_buffer_range.clone(); - let (diff_base_buffer, deleted_text_lines) = buffer.update(cx, |buffer, cx| { - let buffer = buffer.buffer(hunk_range.start.buffer_id?)?; - let diff_base_buffer = diff_base_buffer - .or_else(|| self.current_diff_base_buffer(&buffer, cx)) - .or_else(|| create_diff_base_buffer(&buffer, cx))?; - let deleted_text_lines = buffer.read(cx).diff_base().map(|diff_base| { - let diff_start_row = diff_base - .offset_to_point(hunk.diff_base_byte_range.start) - .row; - let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row; - diff_end_row - diff_start_row - })?; - Some((diff_base_buffer, deleted_text_lines)) + let buffer_id = hunk_range.start.buffer_id?; + let diff_base_buffer = diff_base_buffer.or_else(|| { + self.diff_map + .diff_bases + .get(&buffer_id)? + .change_set + .read(cx) + .base_text + .clone() })?; - let block_insert_index = match self.expanded_hunks.hunks.binary_search_by(|probe| { - probe - .hunk_range - .start - .cmp(&hunk_range.start, &multi_buffer_snapshot) - }) { - Ok(_already_present) => return None, - Err(ix) => ix, - }; + let diff_base = diff_base_buffer.read(cx); + let diff_start_row = diff_base + .offset_to_point(hunk.diff_base_byte_range.start) + .row; + let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row; + let deleted_text_lines = diff_end_row - diff_start_row; + + let block_insert_index = self + .diff_map + .hunks + .binary_search_by(|probe| { + probe + .hunk_range + .start + .cmp(&hunk_range.start, &multi_buffer_snapshot) + }) + .err()?; let blocks; match hunk.status { @@ -315,7 +437,7 @@ impl Editor { ); } }; - self.expanded_hunks.hunks.insert( + self.diff_map.hunks.insert( block_insert_index, ExpandedHunk { blocks, @@ -332,18 +454,21 @@ impl Editor { fn apply_diff_hunks_in_range( &mut self, range: Range, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) -> Option<()> { - let (buffer, range, _) = self - .buffer - .read(cx) - .range_to_buffer_ranges(range, cx) + let multi_buffer = self.buffer.read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let (excerpt, range) = multi_buffer_snapshot + .range_to_buffer_ranges(range) .into_iter() .next()?; - buffer.update(cx, |branch_buffer, cx| { - branch_buffer.merge_into_base(vec![range], cx); - }); + multi_buffer + .buffer(excerpt.buffer_id()) + .unwrap() + .update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(vec![range], cx); + }); if let Some(project) = self.project.clone() { self.save(true, project, cx).detach_and_log_err(cx); @@ -374,8 +499,8 @@ impl Editor { _: &ApplyDiffHunk, cx: &mut ViewContext, ) { - let snapshot = self.buffer.read(cx).snapshot(cx); - let hunks = hunks_for_selections(&snapshot, &self.selections.disjoint_anchors()); + let snapshot = self.snapshot(cx); + let hunks = hunks_for_selections(&snapshot, &self.selections.all(cx)); let mut ranges_by_buffer = HashMap::default(); self.transact(cx, |editor, cx| { for hunk in hunks { @@ -399,17 +524,23 @@ impl Editor { } } + fn has_multiple_hunks(&self, cx: &AppContext) -> bool { + let snapshot = self.buffer.read(cx).snapshot(cx); + let mut hunks = self.diff_map.snapshot.diff_hunks(&snapshot); + hunks.nth(1).is_some() + } + fn hunk_header_block( &self, hunk: &HoveredHunk, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) -> BlockProperties { let is_branch_buffer = self .buffer .read(cx) .point_to_buffer_offset(hunk.multi_buffer_range.start, cx) .map_or(false, |(buffer, _, _)| { - buffer.read(cx).diff_base_buffer().is_some() + buffer.read(cx).base_buffer().is_some() }); let border_color = cx.theme().colors().border_variant; @@ -428,6 +559,7 @@ impl Editor { render: Arc::new({ let editor = cx.view().clone(); let hunk = hunk.clone(); + let has_multiple_hunks = self.has_multiple_hunks(cx); move |cx| { let hunk_controls_menu_handle = @@ -471,6 +603,7 @@ impl Editor { IconButton::new("next-hunk", IconName::ArrowDown) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) + .disabled(!has_multiple_hunks) .tooltip({ let focus_handle = editor.focus_handle(cx); move |cx| { @@ -499,6 +632,7 @@ impl Editor { IconButton::new("prev-hunk", IconName::ArrowUp) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) + .disabled(!has_multiple_hunks) .tooltip({ let focus_handle = editor.focus_handle(cx); move |cx| { @@ -543,29 +677,9 @@ impl Editor { let editor = editor.clone(); let hunk = hunk.clone(); move |_event, cx| { - let multi_buffer = - editor.read(cx).buffer().clone(); - let multi_buffer_snapshot = - multi_buffer.read(cx).snapshot(cx); - let mut revert_changes = HashMap::default(); - if let Some(hunk) = - crate::hunk_diff::to_diff_hunk( - &hunk, - &multi_buffer_snapshot, - ) - { - Editor::prepare_revert_change( - &mut revert_changes, - &multi_buffer, - &hunk, - cx, - ); - } - if !revert_changes.is_empty() { - editor.update(cx, |editor, cx| { - editor.revert(revert_changes, cx) - }); - } + editor.update(cx, |editor, cx| { + editor.revert_hunk(hunk.clone(), cx); + }); } }), ) @@ -614,7 +728,7 @@ impl Editor { .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) - .selected( + .toggle_state( hunk_controls_menu_handle .is_deployed(), ) @@ -631,7 +745,7 @@ impl Editor { }, ), ) - .anchor(AnchorCorner::TopRight) + .anchor(Corner::TopRight) .with_handle(hunk_controls_menu_handle) .menu(move |cx| { let focus = focus.clone(); @@ -690,7 +804,7 @@ impl Editor { hunk: &HoveredHunk, diff_base_buffer: Model, deleted_text_height: u32, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) -> BlockProperties { let gutter_color = match hunk.status { DiffHunkStatus::Added => unreachable!(), @@ -753,14 +867,14 @@ impl Editor { } } - pub(super) fn clear_expanded_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) -> bool { - if self.expanded_hunks.expand_all { + pub(super) fn clear_expanded_diff_hunks(&mut self, cx: &mut ViewContext) -> bool { + if self.diff_map.expand_all { return false; } - self.expanded_hunks.hunk_update_tasks.clear(); + self.diff_map.hunk_update_tasks.clear(); self.clear_row_highlights::(); let to_remove = self - .expanded_hunks + .diff_map .hunks .drain(..) .flat_map(|expanded_hunk| expanded_hunk.blocks.into_iter()) @@ -774,48 +888,39 @@ impl Editor { } pub(super) fn sync_expanded_diff_hunks( - &mut self, - buffer: Model, - cx: &mut ViewContext<'_, Self>, + diff_map: &mut DiffMap, + buffer_id: BufferId, + cx: &mut ViewContext, ) { - let buffer_id = buffer.read(cx).remote_id(); - let buffer_diff_base_version = buffer.read(cx).diff_base_version(); - self.expanded_hunks - .hunk_update_tasks - .remove(&Some(buffer_id)); - let diff_base_buffer = self.current_diff_base_buffer(&buffer, cx); + let diff_base_state = diff_map.diff_bases.get_mut(&buffer_id); + let mut diff_base_buffer = None; + let mut diff_base_buffer_unchanged = true; + if let Some(diff_base_state) = diff_base_state { + diff_base_state.change_set.update(cx, |change_set, _| { + if diff_base_state.last_version != Some(change_set.base_text_version) { + diff_base_state.last_version = Some(change_set.base_text_version); + diff_base_buffer_unchanged = false; + } + diff_base_buffer = change_set.base_text.clone(); + }) + } + + diff_map.hunk_update_tasks.remove(&Some(buffer_id)); + let new_sync_task = cx.spawn(move |editor, mut cx| async move { - let diff_base_buffer_unchanged = diff_base_buffer.is_some(); - let Ok(diff_base_buffer) = - cx.update(|cx| diff_base_buffer.or_else(|| create_diff_base_buffer(&buffer, cx))) - else { - return; - }; editor .update(&mut cx, |editor, cx| { - if let Some(diff_base_buffer) = &diff_base_buffer { - editor.expanded_hunks.diff_base.insert( - buffer_id, - DiffBaseBuffer { - buffer: diff_base_buffer.clone(), - diff_base_version: buffer_diff_base_version, - }, - ); - } - let snapshot = editor.snapshot(cx); let mut recalculated_hunks = snapshot - .buffer_snapshot - .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + .diff_map + .diff_hunks(&snapshot.buffer_snapshot) .filter(|hunk| hunk.buffer_id == buffer_id) .fuse() .peekable(); - let mut highlights_to_remove = - Vec::with_capacity(editor.expanded_hunks.hunks.len()); + let mut highlights_to_remove = Vec::with_capacity(editor.diff_map.hunks.len()); let mut blocks_to_remove = HashSet::default(); - let mut hunks_to_reexpand = - Vec::with_capacity(editor.expanded_hunks.hunks.len()); - editor.expanded_hunks.hunks.retain_mut(|expanded_hunk| { + let mut hunks_to_reexpand = Vec::with_capacity(editor.diff_map.hunks.len()); + editor.diff_map.hunks.retain_mut(|expanded_hunk| { if expanded_hunk.hunk_range.start.buffer_id != Some(buffer_id) { return true; }; @@ -865,7 +970,7 @@ impl Editor { > hunk_display_range.end { recalculated_hunks.next(); - if editor.expanded_hunks.expand_all { + if editor.diff_map.expand_all { hunks_to_reexpand.push(HoveredHunk { status, multi_buffer_range, @@ -908,7 +1013,7 @@ impl Editor { retain }); - if editor.expanded_hunks.expand_all { + if editor.diff_map.expand_all { for hunk in recalculated_hunks { match diff_hunk_to_display(&hunk, &snapshot) { DisplayDiffHunk::Folded { .. } => {} @@ -926,6 +1031,8 @@ impl Editor { } } } + } else { + drop(recalculated_hunks); } editor.remove_highlighted_rows::(highlights_to_remove, cx); @@ -940,32 +1047,12 @@ impl Editor { .ok(); }); - self.expanded_hunks.hunk_update_tasks.insert( + diff_map.hunk_update_tasks.insert( Some(buffer_id), cx.background_executor().spawn(new_sync_task), ); } - fn current_diff_base_buffer( - &mut self, - buffer: &Model, - cx: &mut AppContext, - ) -> Option> { - buffer.update(cx, |buffer, _| { - match self.expanded_hunks.diff_base.entry(buffer.remote_id()) { - hash_map::Entry::Occupied(o) => { - if o.get().diff_base_version != buffer.diff_base_version() { - o.remove(); - None - } else { - Some(o.get().buffer.clone()) - } - } - hash_map::Entry::Vacant(_) => None, - } - }) - } - fn go_to_subsequent_hunk(&mut self, position: Anchor, cx: &mut ViewContext) { let snapshot = self.snapshot(cx); let position = position.to_point(&snapshot.buffer_snapshot); @@ -1012,7 +1099,7 @@ impl Editor { } } -fn to_diff_hunk( +pub(crate) fn to_diff_hunk( hovered_hunk: &HoveredHunk, multi_buffer_snapshot: &MultiBufferSnapshot, ) -> Option { @@ -1034,24 +1121,6 @@ fn to_diff_hunk( }) } -fn create_diff_base_buffer(buffer: &Model, cx: &mut AppContext) -> Option> { - buffer - .update(cx, |buffer, _| { - let language = buffer.language().cloned(); - let diff_base = buffer.diff_base()?.clone(); - Some((buffer.line_ending(), diff_base, language)) - }) - .map(|(line_ending, diff_base, language)| { - cx.new_model(|cx| { - let buffer = Buffer::local_normalized(diff_base, line_ending, cx); - match language { - Some(language) => buffer.with_language(language, cx), - None => buffer, - } - }) - }) -} - fn added_hunk_color(cx: &AppContext) -> Hsla { let mut created_color = cx.theme().status().git().created; created_color.fade_out(0.7); @@ -1068,7 +1137,7 @@ fn editor_with_deleted_text( diff_base_buffer: Model, deleted_color: Hsla, hunk: &HoveredHunk, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) -> (u32, View) { let parent_editor = cx.view().downgrade(); let editor = cx.new_view(|cx| { @@ -1089,6 +1158,11 @@ fn editor_with_deleted_text( editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx); editor.set_show_wrap_guides(false, cx); editor.set_show_gutter(false, cx); + editor.set_show_line_numbers(false, cx); + editor.set_show_scrollbars(false, cx); + editor.set_show_runnables(false, cx); + editor.set_show_git_diff_gutter(false, cx); + editor.set_show_code_actions(false, cx); editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); @@ -1100,7 +1174,7 @@ fn editor_with_deleted_text( false, cx, ); - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); // + editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); editor ._subscriptions .extend([cx.on_blur(&editor.focus_handle, |editor, cx| { @@ -1109,51 +1183,27 @@ fn editor_with_deleted_text( }); })]); - let original_multi_buffer_range = hunk.multi_buffer_range.clone(); - let diff_base_range = hunk.diff_base_byte_range.clone(); editor .register_action::({ + let hunk = hunk.clone(); let parent_editor = parent_editor.clone(); move |_, cx| { parent_editor - .update(cx, |editor, cx| { - let Some((buffer, original_text)) = - editor.buffer().update(cx, |buffer, cx| { - let (_, buffer, _) = buffer.excerpt_containing( - original_multi_buffer_range.start, - cx, - )?; - let original_text = - buffer.read(cx).diff_base()?.slice(diff_base_range.clone()); - Some((buffer, Arc::from(original_text.to_string()))) - }) - else { - return; - }; - buffer.update(cx, |buffer, cx| { - buffer.edit( - Some(( - original_multi_buffer_range.start.text_anchor - ..original_multi_buffer_range.end.text_anchor, - original_text, - )), - None, - cx, - ) - }); - }) + .update(cx, |editor, cx| editor.revert_hunk(hunk.clone(), cx)) .ok(); } }) .detach(); - let hunk = hunk.clone(); editor - .register_action::(move |_, cx| { - parent_editor - .update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }) - .ok(); + .register_action::({ + let hunk = hunk.clone(); + move |_, cx| { + parent_editor + .update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }) + .ok(); + } }) .detach(); editor @@ -1263,78 +1313,57 @@ mod tests { let project = Project::test(fs, [], cx).await; // buffer has two modified hunks with two rows each - let buffer_1 = project.update(cx, |project, cx| { - project.create_local_buffer( - " - 1.zero - 1.ONE - 1.TWO - 1.three - 1.FOUR - 1.FIVE - 1.six - " - .unindent() - .as_str(), - None, - cx, - ) - }); - buffer_1.update(cx, |buffer, cx| { - buffer.set_diff_base( - Some( - " - 1.zero - 1.one - 1.two - 1.three - 1.four - 1.five - 1.six - " - .unindent(), - ), - cx, - ); - }); + let diff_base_1 = " + 1.zero + 1.one + 1.two + 1.three + 1.four + 1.five + 1.six + " + .unindent(); + + let text_1 = " + 1.zero + 1.ONE + 1.TWO + 1.three + 1.FOUR + 1.FIVE + 1.six + " + .unindent(); // buffer has a deletion hunk and an insertion hunk - let buffer_2 = project.update(cx, |project, cx| { - project.create_local_buffer( - " - 2.zero - 2.one - 2.two - 2.three - 2.four - 2.five - 2.six - " - .unindent() - .as_str(), - None, - cx, - ) - }); - buffer_2.update(cx, |buffer, cx| { - buffer.set_diff_base( - Some( - " - 2.zero - 2.one - 2.one-and-a-half - 2.two - 2.three - 2.four - 2.six - " - .unindent(), - ), - cx, - ); - }); + let diff_base_2 = " + 2.zero + 2.one + 2.one-and-a-half + 2.two + 2.three + 2.four + 2.six + " + .unindent(); - cx.background_executor.run_until_parked(); + let text_2 = " + 2.zero + 2.one + 2.two + 2.three + 2.four + 2.five + 2.six + " + .unindent(); + + let buffer_1 = project.update(cx, |project, cx| { + project.create_local_buffer(text_1.as_str(), None, cx) + }); + let buffer_2 = project.update(cx, |project, cx| { + project.create_local_buffer(text_2.as_str(), None, cx) + }); let multibuffer = cx.new_model(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); @@ -1383,10 +1412,30 @@ mod tests { multibuffer }); - let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); + let editor = cx.add_window(|cx| Editor::for_multibuffer(multibuffer, None, false, cx)); + editor + .update(cx, |editor, cx| { + for (buffer, diff_base) in [ + (buffer_1.clone(), diff_base_1), + (buffer_2.clone(), diff_base_2), + ] { + let change_set = cx.new_model(|cx| { + BufferChangeSet::new_with_base_text( + diff_base.to_string(), + buffer.read(cx).text_snapshot(), + cx, + ) + }); + editor.diff_map.add_change_set(change_set, cx) + } + }) + .unwrap(); + cx.background_executor.run_until_parked(); + + let snapshot = editor.update(cx, |editor, cx| editor.snapshot(cx)).unwrap(); assert_eq!( - snapshot.text(), + snapshot.buffer_snapshot.text(), " 1.zero 1.ONE @@ -1429,7 +1478,8 @@ mod tests { assert_eq!( snapshot - .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) + .diff_map + .diff_hunks_in_range(Point::zero()..Point::new(12, 0), &snapshot.buffer_snapshot) .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), &expected, @@ -1437,7 +1487,11 @@ mod tests { assert_eq!( snapshot - .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) + .diff_map + .diff_hunks_in_range_rev( + Point::zero()..Point::new(12, 0), + &snapshot.buffer_snapshot + ) .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), expected diff --git a/crates/editor/src/indent_guides.rs b/crates/editor/src/indent_guides.rs index 815825b606..1284ba8156 100644 --- a/crates/editor/src/indent_guides.rs +++ b/crates/editor/src/indent_guides.rs @@ -56,6 +56,7 @@ impl Editor { } Some(indent_guides_in_range( + self, visible_buffer_range, self.should_show_indent_guides() == Some(true), snapshot, @@ -152,6 +153,7 @@ impl Editor { } pub fn indent_guides_in_range( + editor: &Editor, visible_buffer_range: Range, ignore_disabled_for_language: bool, snapshot: &DisplaySnapshot, @@ -169,15 +171,19 @@ pub fn indent_guides_in_range( .indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx) .into_iter() .filter(|indent_guide| { + if editor.buffer_folded(indent_guide.buffer_id, cx) { + return false; + } + let start = MultiBufferRow(indent_guide.multibuffer_row_range.start.0.saturating_sub(1)); // Filter out indent guides that are inside a fold + // All indent guides that are starting "offscreen" have a start value of the first visible row minus one + // Therefore checking if a line is folded at first visible row minus one causes the other indent guides that are not related to the fold to disappear as well let is_folded = snapshot.is_line_folded(start); let line_indent = snapshot.line_indent_for_buffer_row(start); - let contained_in_fold = line_indent.len(indent_guide.tab_size) <= indent_guide.indent_level(); - !(is_folded && contained_in_fold) }) .collect() diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 877f02eefe..739fb98226 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -36,6 +36,7 @@ pub struct InlayHintCache { allowed_hint_kinds: HashSet>, version: usize, pub(super) enabled: bool, + enabled_in_settings: bool, update_tasks: HashMap, refresh_task: Option>, invalidate_debounce: Option, @@ -268,6 +269,7 @@ impl InlayHintCache { Self { allowed_hint_kinds: inlay_hint_settings.enabled_inlay_hint_kinds(), enabled: inlay_hint_settings.enabled, + enabled_in_settings: inlay_hint_settings.enabled, hints: HashMap::default(), update_tasks: HashMap::default(), refresh_task: None, @@ -288,10 +290,21 @@ impl InlayHintCache { visible_hints: Vec, cx: &mut ViewContext, ) -> ControlFlow> { + let old_enabled = self.enabled; + // If the setting for inlay hints has changed, update `enabled`. This condition avoids inlay + // hint visibility changes when other settings change (such as theme). + // + // Another option might be to store whether the user has manually toggled inlay hint + // visibility, and prefer this. This could lead to confusion as it means inlay hint + // visibility would not change when updating the setting if they were ever toggled. + if new_hint_settings.enabled != self.enabled_in_settings { + self.enabled = new_hint_settings.enabled; + }; + self.enabled_in_settings = new_hint_settings.enabled; self.invalidate_debounce = debounce_value(new_hint_settings.edit_debounce_ms); self.append_debounce = debounce_value(new_hint_settings.scroll_debounce_ms); let new_allowed_hint_kinds = new_hint_settings.enabled_inlay_hint_kinds(); - match (self.enabled, new_hint_settings.enabled) { + match (old_enabled, self.enabled) { (false, false) => { self.allowed_hint_kinds = new_allowed_hint_kinds; ControlFlow::Break(None) @@ -314,7 +327,6 @@ impl InlayHintCache { } } (true, false) => { - self.enabled = new_hint_settings.enabled; self.allowed_hint_kinds = new_allowed_hint_kinds; if self.hints.is_empty() { ControlFlow::Break(None) @@ -327,7 +339,6 @@ impl InlayHintCache { } } (false, true) => { - self.enabled = new_hint_settings.enabled; self.allowed_hint_kinds = new_allowed_hint_kinds; ControlFlow::Continue(()) } @@ -579,7 +590,7 @@ impl InlayHintCache { buffer_id: BufferId, excerpt_id: ExcerptId, id: InlayId, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) { if let Some(excerpt_hints) = self.hints.get(&excerpt_id) { let mut guard = excerpt_hints.write(); @@ -640,7 +651,7 @@ fn spawn_new_update_tasks( excerpts_to_query: HashMap, Global, Range)>, invalidate: InvalidationStrategy, update_cache_version: usize, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) { for (excerpt_id, (excerpt_buffer, new_task_buffer_version, excerpt_visible_range)) in excerpts_to_query @@ -797,7 +808,7 @@ fn new_update_task( query: ExcerptQuery, query_ranges: QueryRanges, excerpt_buffer: Model, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) -> Task<()> { cx.spawn(move |editor, mut cx| async move { let visible_range_update_results = future::join_all( @@ -1129,7 +1140,7 @@ fn apply_hint_update( invalidate: bool, buffer_snapshot: BufferSnapshot, multi_buffer_snapshot: MultiBufferSnapshot, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) { let cached_excerpt_hints = editor .inlay_hint_cache @@ -1254,27 +1265,17 @@ fn apply_hint_update( #[cfg(test)] pub mod tests { - use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering}; - - use crate::{ - scroll::{scroll_amount::ScrollAmount, Autoscroll}, - ExcerptRange, - }; + use crate::editor_tests::update_test_language_settings; + use crate::{scroll::Autoscroll, test::editor_lsp_test_context::rust_lang, ExcerptRange}; use futures::StreamExt; use gpui::{Context, SemanticVersion, TestAppContext, WindowHandle}; - use itertools::Itertools; - use language::{ - language_settings::AllLanguageSettingsContent, Capability, FakeLspAdapter, Language, - LanguageConfig, LanguageMatcher, - }; + use language::{language_settings::AllLanguageSettingsContent, Capability, FakeLspAdapter}; use lsp::FakeLanguageServer; - use parking_lot::Mutex; use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; - use text::{Point, ToPoint}; - - use crate::editor_tests::update_test_language_settings; + use std::sync::atomic::{AtomicBool, AtomicU32, Ordering}; + use text::Point; use super::*; @@ -1293,10 +1294,9 @@ pub mod tests { }) }); - let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; - let lsp_request_count = Arc::new(AtomicU32::new(0)); - fake_server - .handle_request::(move |params, _| { + let (_, editor, fake_server) = prepare_test_objects(cx, |fake_server, file_with_hints| { + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server.handle_request::(move |params, _| { let task_lsp_request_count = Arc::clone(&lsp_request_count); async move { assert_eq!( @@ -1328,9 +1328,9 @@ pub mod tests { Ok(Some(new_hints)) } - }) - .next() - .await; + }); + }) + .await; cx.executor().run_until_parked(); let mut edits_made = 1; @@ -1426,10 +1426,9 @@ pub mod tests { }) }); - let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; - let lsp_request_count = Arc::new(AtomicU32::new(0)); - fake_server - .handle_request::(move |params, _| { + let (_, editor, fake_server) = prepare_test_objects(cx, |fake_server, file_with_hints| { + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server.handle_request::(move |params, _| { let task_lsp_request_count = Arc::clone(&lsp_request_count); async move { assert_eq!( @@ -1449,9 +1448,9 @@ pub mod tests { data: None, }])) } - }) - .next() - .await; + }); + }) + .await; cx.executor().run_until_parked(); let mut edits_made = 1; @@ -1532,235 +1531,237 @@ pub mod tests { .unwrap(); } - #[gpui::test] - async fn test_no_hint_updates_for_unrelated_language_files(cx: &mut gpui::TestAppContext) { - init_test(cx, |settings| { - settings.defaults.inlay_hints = Some(InlayHintSettings { - enabled: true, - edit_debounce_ms: 0, - scroll_debounce_ms: 0, - show_type_hints: true, - show_parameter_hints: true, - show_other_hints: true, - show_background: false, - }) - }); + // #[gpui::test] + // async fn test_no_hint_updates_for_unrelated_language_files(cx: &mut gpui::TestAppContext) { + // init_test(cx, |settings| { + // settings.defaults.inlay_hints = Some(InlayHintSettings { + // enabled: true, + // edit_debounce_ms: 0, + // scroll_debounce_ms: 0, + // show_type_hints: true, + // show_parameter_hints: true, + // show_other_hints: true, + // show_background: false, + // }) + // }); - let fs = FakeFs::new(cx.background_executor.clone()); - fs.insert_tree( - "/a", - json!({ - "main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out", - "other.md": "Test md file with some text", - }), - ) - .await; + // let fs = FakeFs::new(cx.background_executor.clone()); + // fs.insert_tree( + // "/a", + // json!({ + // "main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out", + // "other.md": "Test md file with some text", + // }), + // ) + // .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + // let project = Project::test(fs, ["/a".as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let mut rs_fake_servers = None; - let mut md_fake_servers = None; - for (name, path_suffix) in [("Rust", "rs"), ("Markdown", "md")] { - language_registry.add(Arc::new(Language::new( - LanguageConfig { - name: name.into(), - matcher: LanguageMatcher { - path_suffixes: vec![path_suffix.to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ))); - let fake_servers = language_registry.register_fake_lsp( - name, - FakeLspAdapter { - name, - capabilities: lsp::ServerCapabilities { - inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - }, - ); - match name { - "Rust" => rs_fake_servers = Some(fake_servers), - "Markdown" => md_fake_servers = Some(fake_servers), - _ => unreachable!(), - } - } + // let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + // let mut rs_fake_servers = None; + // let mut md_fake_servers = None; + // for (name, path_suffix) in [("Rust", "rs"), ("Markdown", "md")] { + // language_registry.add(Arc::new(Language::new( + // LanguageConfig { + // name: name.into(), + // matcher: LanguageMatcher { + // path_suffixes: vec![path_suffix.to_string()], + // ..Default::default() + // }, + // ..Default::default() + // }, + // Some(tree_sitter_rust::LANGUAGE.into()), + // ))); + // let fake_servers = language_registry.register_fake_lsp( + // name, + // FakeLspAdapter { + // name, + // capabilities: lsp::ServerCapabilities { + // inlay_hint_provider: Some(lsp::OneOf::Left(true)), + // ..Default::default() + // }, + // ..Default::default() + // }, + // ); + // match name { + // "Rust" => rs_fake_servers = Some(fake_servers), + // "Markdown" => md_fake_servers = Some(fake_servers), + // _ => unreachable!(), + // } + // } - let rs_buffer = project - .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) - }) - .await - .unwrap(); - cx.executor().run_until_parked(); - cx.executor().start_waiting(); - let rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap(); - let rs_editor = - cx.add_window(|cx| Editor::for_buffer(rs_buffer, Some(project.clone()), cx)); - let rs_lsp_request_count = Arc::new(AtomicU32::new(0)); - rs_fake_server - .handle_request::(move |params, _| { - let task_lsp_request_count = Arc::clone(&rs_lsp_request_count); - async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), - ); - let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, i), - label: lsp::InlayHintLabel::String(i.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - } - }) - .next() - .await; - cx.executor().run_until_parked(); - rs_editor - .update(cx, |editor, cx| { - let expected_hints = vec!["0".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should get its first hints when opening the editor" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!( - editor.inlay_hint_cache().version, - 1, - "Rust editor update the cache version after every cache/view change" - ); - }) - .unwrap(); + // let rs_buffer = project + // .update(cx, |project, cx| { + // project.open_local_buffer("/a/main.rs", cx) + // }) + // .await + // .unwrap(); + // let rs_editor = + // cx.add_window(|cx| Editor::for_buffer(rs_buffer, Some(project.clone()), cx)); - cx.executor().run_until_parked(); - let md_buffer = project - .update(cx, |project, cx| { - project.open_local_buffer("/a/other.md", cx) - }) - .await - .unwrap(); - cx.executor().run_until_parked(); - cx.executor().start_waiting(); - let md_fake_server = md_fake_servers.unwrap().next().await.unwrap(); - let md_editor = cx.add_window(|cx| Editor::for_buffer(md_buffer, Some(project), cx)); - let md_lsp_request_count = Arc::new(AtomicU32::new(0)); - md_fake_server - .handle_request::(move |params, _| { - let task_lsp_request_count = Arc::clone(&md_lsp_request_count); - async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/a/other.md").unwrap(), - ); - let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, i), - label: lsp::InlayHintLabel::String(i.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - } - }) - .next() - .await; - cx.executor().run_until_parked(); - md_editor - .update(cx, |editor, cx| { - let expected_hints = vec!["0".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Markdown editor should have a separate version, repeating Rust editor rules" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!(editor.inlay_hint_cache().version, 1); - }) - .unwrap(); + // cx.executor().run_until_parked(); + // cx.executor().start_waiting(); + // let rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap(); + // let rs_lsp_request_count = Arc::new(AtomicU32::new(0)); + // rs_fake_server + // .handle_request::(move |params, _| { + // let task_lsp_request_count = Arc::clone(&rs_lsp_request_count); + // async move { + // assert_eq!( + // params.text_document.uri, + // lsp::Url::from_file_path("/a/main.rs").unwrap(), + // ); + // let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + // Ok(Some(vec![lsp::InlayHint { + // position: lsp::Position::new(0, i), + // label: lsp::InlayHintLabel::String(i.to_string()), + // kind: None, + // text_edits: None, + // tooltip: None, + // padding_left: None, + // padding_right: None, + // data: None, + // }])) + // } + // }) + // .next() + // .await; + // cx.executor().run_until_parked(); + // rs_editor + // .update(cx, |editor, cx| { + // let expected_hints = vec!["0".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Should get its first hints when opening the editor" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!( + // editor.inlay_hint_cache().version, + // 1, + // "Rust editor update the cache version after every cache/view change" + // ); + // }) + // .unwrap(); - rs_editor - .update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| s.select_ranges([13..13])); - editor.handle_input("some rs change", cx); - }) - .unwrap(); - cx.executor().run_until_parked(); - rs_editor - .update(cx, |editor, cx| { - let expected_hints = vec!["1".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Rust inlay cache should change after the edit" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!( - editor.inlay_hint_cache().version, - 2, - "Every time hint cache changes, cache version should be incremented" - ); - }) - .unwrap(); - md_editor - .update(cx, |editor, cx| { - let expected_hints = vec!["0".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Markdown editor should not be affected by Rust editor changes" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!(editor.inlay_hint_cache().version, 1); - }) - .unwrap(); + // cx.executor().run_until_parked(); + // let md_buffer = project + // .update(cx, |project, cx| { + // project.open_local_buffer("/a/other.md", cx) + // }) + // .await + // .unwrap(); + // let md_editor = cx.add_window(|cx| Editor::for_buffer(md_buffer, Some(project), cx)); - md_editor - .update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| s.select_ranges([13..13])); - editor.handle_input("some md change", cx); - }) - .unwrap(); - cx.executor().run_until_parked(); - md_editor - .update(cx, |editor, cx| { - let expected_hints = vec!["1".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Rust editor should not be affected by Markdown editor changes" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!(editor.inlay_hint_cache().version, 2); - }) - .unwrap(); - rs_editor - .update(cx, |editor, cx| { - let expected_hints = vec!["1".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Markdown editor should also change independently" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!(editor.inlay_hint_cache().version, 2); - }) - .unwrap(); - } + // cx.executor().run_until_parked(); + // cx.executor().start_waiting(); + // let md_fake_server = md_fake_servers.unwrap().next().await.unwrap(); + // let md_lsp_request_count = Arc::new(AtomicU32::new(0)); + // md_fake_server + // .handle_request::(move |params, _| { + // let task_lsp_request_count = Arc::clone(&md_lsp_request_count); + // async move { + // assert_eq!( + // params.text_document.uri, + // lsp::Url::from_file_path("/a/other.md").unwrap(), + // ); + // let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + // Ok(Some(vec![lsp::InlayHint { + // position: lsp::Position::new(0, i), + // label: lsp::InlayHintLabel::String(i.to_string()), + // kind: None, + // text_edits: None, + // tooltip: None, + // padding_left: None, + // padding_right: None, + // data: None, + // }])) + // } + // }) + // .next() + // .await; + // cx.executor().run_until_parked(); + // md_editor + // .update(cx, |editor, cx| { + // let expected_hints = vec!["0".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Markdown editor should have a separate version, repeating Rust editor rules" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!(editor.inlay_hint_cache().version, 1); + // }) + // .unwrap(); + + // rs_editor + // .update(cx, |editor, cx| { + // editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + // editor.handle_input("some rs change", cx); + // }) + // .unwrap(); + // cx.executor().run_until_parked(); + // rs_editor + // .update(cx, |editor, cx| { + // let expected_hints = vec!["1".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Rust inlay cache should change after the edit" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!( + // editor.inlay_hint_cache().version, + // 2, + // "Every time hint cache changes, cache version should be incremented" + // ); + // }) + // .unwrap(); + // md_editor + // .update(cx, |editor, cx| { + // let expected_hints = vec!["0".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Markdown editor should not be affected by Rust editor changes" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!(editor.inlay_hint_cache().version, 1); + // }) + // .unwrap(); + + // md_editor + // .update(cx, |editor, cx| { + // editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + // editor.handle_input("some md change", cx); + // }) + // .unwrap(); + // cx.executor().run_until_parked(); + // md_editor + // .update(cx, |editor, cx| { + // let expected_hints = vec!["1".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Rust editor should not be affected by Markdown editor changes" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!(editor.inlay_hint_cache().version, 2); + // }) + // .unwrap(); + // rs_editor + // .update(cx, |editor, cx| { + // let expected_hints = vec!["1".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Markdown editor should also change independently" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!(editor.inlay_hint_cache().version, 2); + // }) + // .unwrap(); + // } #[gpui::test] async fn test_hint_setting_changes(cx: &mut gpui::TestAppContext) { @@ -1777,54 +1778,59 @@ pub mod tests { }) }); - let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; let lsp_request_count = Arc::new(AtomicU32::new(0)); - let another_lsp_request_count = Arc::clone(&lsp_request_count); - fake_server - .handle_request::(move |params, _| { - let task_lsp_request_count = Arc::clone(&another_lsp_request_count); - async move { - Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), - ); - Ok(Some(vec![ - lsp::InlayHint { - position: lsp::Position::new(0, 1), - label: lsp::InlayHintLabel::String("type hint".to_string()), - kind: Some(lsp::InlayHintKind::TYPE), - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }, - lsp::InlayHint { - position: lsp::Position::new(0, 2), - label: lsp::InlayHintLabel::String("parameter hint".to_string()), - kind: Some(lsp::InlayHintKind::PARAMETER), - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }, - lsp::InlayHint { - position: lsp::Position::new(0, 3), - label: lsp::InlayHintLabel::String("other hint".to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }, - ])) - } - }) - .next() - .await; + let (_, editor, fake_server) = prepare_test_objects(cx, { + let lsp_request_count = lsp_request_count.clone(); + move |fake_server, file_with_hints| { + let lsp_request_count = lsp_request_count.clone(); + fake_server.handle_request::( + move |params, _| { + lsp_request_count.fetch_add(1, Ordering::SeqCst); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + Ok(Some(vec![ + lsp::InlayHint { + position: lsp::Position::new(0, 1), + label: lsp::InlayHintLabel::String("type hint".to_string()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(0, 2), + label: lsp::InlayHintLabel::String( + "parameter hint".to_string(), + ), + kind: Some(lsp::InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(0, 3), + label: lsp::InlayHintLabel::String("other hint".to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + ])) + } + }, + ); + } + }) + .await; cx.executor().run_until_parked(); let mut edits_made = 1; @@ -2126,33 +2132,36 @@ pub mod tests { }) }); - let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; - let fake_server = Arc::new(fake_server); let lsp_request_count = Arc::new(AtomicU32::new(0)); - let another_lsp_request_count = Arc::clone(&lsp_request_count); - fake_server - .handle_request::(move |params, _| { - let task_lsp_request_count = Arc::clone(&another_lsp_request_count); - async move { - let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst) + 1; - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), - ); - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, i), - label: lsp::InlayHintLabel::String(i.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - } - }) - .next() - .await; + let (_, editor, _) = prepare_test_objects(cx, { + let lsp_request_count = lsp_request_count.clone(); + move |fake_server, file_with_hints| { + let lsp_request_count = lsp_request_count.clone(); + fake_server.handle_request::( + move |params, _| { + let lsp_request_count = lsp_request_count.clone(); + async move { + let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1; + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + }) + .await; let mut expected_changes = Vec::new(); for change_after_opening in [ @@ -2191,10 +2200,6 @@ pub mod tests { "Should get hints from the last edit landed only" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!( - editor.inlay_hint_cache().version, 1, - "Only one update should be registered in the cache after all cancellations" - ); }).unwrap(); let mut edits = Vec::new(); @@ -2238,312 +2243,315 @@ pub mod tests { "Should get hints from the last edit landed only" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!( - editor.inlay_hint_cache().version, - 2, - "Should update the cache version once more, for the new change" - ); }) .unwrap(); } - #[gpui::test(iterations = 10)] - async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) { - init_test(cx, |settings| { - settings.defaults.inlay_hints = Some(InlayHintSettings { - enabled: true, - edit_debounce_ms: 0, - scroll_debounce_ms: 0, - show_type_hints: true, - show_parameter_hints: true, - show_other_hints: true, - show_background: false, - }) - }); + // #[gpui::test(iterations = 10)] + // async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) { + // init_test(cx, |settings| { + // settings.defaults.inlay_hints = Some(InlayHintSettings { + // enabled: true, + // edit_debounce_ms: 0, + // scroll_debounce_ms: 0, + // show_type_hints: true, + // show_parameter_hints: true, + // show_other_hints: true, + // show_background: false, + // }) + // }); - let fs = FakeFs::new(cx.background_executor.clone()); - fs.insert_tree( - "/a", - json!({ - "main.rs": format!("fn main() {{\n{}\n}}", "let i = 5;\n".repeat(500)), - "other.rs": "// Test file", - }), - ) - .await; + // let fs = FakeFs::new(cx.background_executor.clone()); + // fs.insert_tree( + // "/a", + // json!({ + // "main.rs": format!("fn main() {{\n{}\n}}", "let i = 5;\n".repeat(500)), + // "other.rs": "// Test file", + // }), + // ) + // .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + // let project = Project::test(fs, ["/a".as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(crate::editor_tests::rust_lang()); - let mut fake_servers = language_registry.register_fake_lsp( - "Rust", - FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - }, - ); + // let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + // language_registry.add(rust_lang()); - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) - }) - .await - .unwrap(); - cx.executor().run_until_parked(); - cx.executor().start_waiting(); - let fake_server = fake_servers.next().await.unwrap(); - let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); - let lsp_request_ranges = Arc::new(Mutex::new(Vec::new())); - let lsp_request_count = Arc::new(AtomicUsize::new(0)); - let closure_lsp_request_ranges = Arc::clone(&lsp_request_ranges); - let closure_lsp_request_count = Arc::clone(&lsp_request_count); - fake_server - .handle_request::(move |params, _| { - let task_lsp_request_ranges = Arc::clone(&closure_lsp_request_ranges); - let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); - async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), - ); + // let lsp_request_ranges = Arc::new(Mutex::new(Vec::new())); + // let lsp_request_count = Arc::new(AtomicUsize::new(0)); + // let mut fake_servers = language_registry.register_fake_lsp( + // "Rust", + // FakeLspAdapter { + // capabilities: lsp::ServerCapabilities { + // inlay_hint_provider: Some(lsp::OneOf::Left(true)), + // ..Default::default() + // }, + // initializer: Some(Box::new({ + // let lsp_request_ranges = lsp_request_ranges.clone(); + // let lsp_request_count = lsp_request_count.clone(); + // move |fake_server| { + // let closure_lsp_request_ranges = Arc::clone(&lsp_request_ranges); + // let closure_lsp_request_count = Arc::clone(&lsp_request_count); + // fake_server.handle_request::( + // move |params, _| { + // let task_lsp_request_ranges = + // Arc::clone(&closure_lsp_request_ranges); + // let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); + // async move { + // assert_eq!( + // params.text_document.uri, + // lsp::Url::from_file_path("/a/main.rs").unwrap(), + // ); - task_lsp_request_ranges.lock().push(params.range); - let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::Release) + 1; - Ok(Some(vec![lsp::InlayHint { - position: params.range.end, - label: lsp::InlayHintLabel::String(i.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - } - }) - .next() - .await; + // task_lsp_request_ranges.lock().push(params.range); + // let i = Arc::clone(&task_lsp_request_count) + // .fetch_add(1, Ordering::Release) + // + 1; + // Ok(Some(vec![lsp::InlayHint { + // position: params.range.end, + // label: lsp::InlayHintLabel::String(i.to_string()), + // kind: None, + // text_edits: None, + // tooltip: None, + // padding_left: None, + // padding_right: None, + // data: None, + // }])) + // } + // }, + // ); + // } + // })), + // ..Default::default() + // }, + // ); - fn editor_visible_range( - editor: &WindowHandle, - cx: &mut gpui::TestAppContext, - ) -> Range { - let ranges = editor - .update(cx, |editor, cx| { - editor.excerpts_for_inlay_hints_query(None, cx) - }) - .unwrap(); - assert_eq!( - ranges.len(), - 1, - "Single buffer should produce a single excerpt with visible range" - ); - let (_, (excerpt_buffer, _, excerpt_visible_range)) = - ranges.into_iter().next().unwrap(); - excerpt_buffer.update(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - let start = buffer - .anchor_before(excerpt_visible_range.start) - .to_point(&snapshot); - let end = buffer - .anchor_after(excerpt_visible_range.end) - .to_point(&snapshot); - start..end - }) - } + // let buffer = project + // .update(cx, |project, cx| { + // project.open_local_buffer("/a/main.rs", cx) + // }) + // .await + // .unwrap(); + // let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); - // in large buffers, requests are made for more than visible range of a buffer. - // invisible parts are queried later, to avoid excessive requests on quick typing. - // wait the timeout needed to get all requests. - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); - cx.executor().run_until_parked(); - let initial_visible_range = editor_visible_range(&editor, cx); - let lsp_initial_visible_range = lsp::Range::new( - lsp::Position::new( - initial_visible_range.start.row, - initial_visible_range.start.column, - ), - lsp::Position::new( - initial_visible_range.end.row, - initial_visible_range.end.column, - ), - ); - let expected_initial_query_range_end = - lsp::Position::new(initial_visible_range.end.row * 2, 2); - let mut expected_invisible_query_start = lsp_initial_visible_range.end; - expected_invisible_query_start.character += 1; - editor.update(cx, |editor, cx| { - let ranges = lsp_request_ranges.lock().drain(..).collect::>(); - assert_eq!(ranges.len(), 2, - "When scroll is at the edge of a big document, its visible part and the same range further should be queried in order, but got: {ranges:?}"); - let visible_query_range = &ranges[0]; - assert_eq!(visible_query_range.start, lsp_initial_visible_range.start); - assert_eq!(visible_query_range.end, lsp_initial_visible_range.end); - let invisible_query_range = &ranges[1]; + // cx.executor().run_until_parked(); - assert_eq!(invisible_query_range.start, expected_invisible_query_start, "Should initially query visible edge of the document"); - assert_eq!(invisible_query_range.end, expected_initial_query_range_end, "Should initially query visible edge of the document"); + // fn editor_visible_range( + // editor: &WindowHandle, + // cx: &mut gpui::TestAppContext, + // ) -> Range { + // let ranges = editor + // .update(cx, |editor, cx| { + // editor.excerpts_for_inlay_hints_query(None, cx) + // }) + // .unwrap(); + // assert_eq!( + // ranges.len(), + // 1, + // "Single buffer should produce a single excerpt with visible range" + // ); + // let (_, (excerpt_buffer, _, excerpt_visible_range)) = + // ranges.into_iter().next().unwrap(); + // excerpt_buffer.update(cx, |buffer, _| { + // let snapshot = buffer.snapshot(); + // let start = buffer + // .anchor_before(excerpt_visible_range.start) + // .to_point(&snapshot); + // let end = buffer + // .anchor_after(excerpt_visible_range.end) + // .to_point(&snapshot); + // start..end + // }) + // } - let requests_count = lsp_request_count.load(Ordering::Acquire); - assert_eq!(requests_count, 2, "Visible + invisible request"); - let expected_hints = vec!["1".to_string(), "2".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should have hints from both LSP requests made for a big file" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx), "Should display only hints from the visible range"); - assert_eq!( - editor.inlay_hint_cache().version, requests_count, - "LSP queries should've bumped the cache version" - ); - }).unwrap(); + // // in large buffers, requests are made for more than visible range of a buffer. + // // invisible parts are queried later, to avoid excessive requests on quick typing. + // // wait the timeout needed to get all requests. + // cx.executor().advance_clock(Duration::from_millis( + // INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + // )); + // cx.executor().run_until_parked(); + // let initial_visible_range = editor_visible_range(&editor, cx); + // let lsp_initial_visible_range = lsp::Range::new( + // lsp::Position::new( + // initial_visible_range.start.row, + // initial_visible_range.start.column, + // ), + // lsp::Position::new( + // initial_visible_range.end.row, + // initial_visible_range.end.column, + // ), + // ); + // let expected_initial_query_range_end = + // lsp::Position::new(initial_visible_range.end.row * 2, 2); + // let mut expected_invisible_query_start = lsp_initial_visible_range.end; + // expected_invisible_query_start.character += 1; + // editor.update(cx, |editor, cx| { + // let ranges = lsp_request_ranges.lock().drain(..).collect::>(); + // assert_eq!(ranges.len(), 2, + // "When scroll is at the edge of a big document, its visible part and the same range further should be queried in order, but got: {ranges:?}"); + // let visible_query_range = &ranges[0]; + // assert_eq!(visible_query_range.start, lsp_initial_visible_range.start); + // assert_eq!(visible_query_range.end, lsp_initial_visible_range.end); + // let invisible_query_range = &ranges[1]; - editor - .update(cx, |editor, cx| { - editor.scroll_screen(&ScrollAmount::Page(1.0), cx); - }) - .unwrap(); - cx.executor().run_until_parked(); - editor - .update(cx, |editor, cx| { - editor.scroll_screen(&ScrollAmount::Page(1.0), cx); - }) - .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); - cx.executor().run_until_parked(); - let visible_range_after_scrolls = editor_visible_range(&editor, cx); - let visible_line_count = editor - .update(cx, |editor, _| editor.visible_line_count().unwrap()) - .unwrap(); - let selection_in_cached_range = editor - .update(cx, |editor, cx| { - let ranges = lsp_request_ranges - .lock() - .drain(..) - .sorted_by_key(|r| r.start) - .collect::>(); - assert_eq!( - ranges.len(), - 2, - "Should query 2 ranges after both scrolls, but got: {ranges:?}" - ); - let first_scroll = &ranges[0]; - let second_scroll = &ranges[1]; - assert_eq!( - first_scroll.end, second_scroll.start, - "Should query 2 adjacent ranges after the scrolls, but got: {ranges:?}" - ); - assert_eq!( - first_scroll.start, expected_initial_query_range_end, - "First scroll should start the query right after the end of the original scroll", - ); - assert_eq!( - second_scroll.end, - lsp::Position::new( - visible_range_after_scrolls.end.row - + visible_line_count.ceil() as u32, - 1, - ), - "Second scroll should query one more screen down after the end of the visible range" - ); + // assert_eq!(invisible_query_range.start, expected_invisible_query_start, "Should initially query visible edge of the document"); + // assert_eq!(invisible_query_range.end, expected_initial_query_range_end, "Should initially query visible edge of the document"); - let lsp_requests = lsp_request_count.load(Ordering::Acquire); - assert_eq!(lsp_requests, 4, "Should query for hints after every scroll"); - let expected_hints = vec![ - "1".to_string(), - "2".to_string(), - "3".to_string(), - "4".to_string(), - ]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should have hints from the new LSP response after the edit" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!( - editor.inlay_hint_cache().version, - lsp_requests, - "Should update the cache for every LSP response with hints added" - ); + // let requests_count = lsp_request_count.load(Ordering::Acquire); + // assert_eq!(requests_count, 2, "Visible + invisible request"); + // let expected_hints = vec!["1".to_string(), "2".to_string()]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Should have hints from both LSP requests made for a big file" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx), "Should display only hints from the visible range"); + // assert_eq!( + // editor.inlay_hint_cache().version, requests_count, + // "LSP queries should've bumped the cache version" + // ); + // }).unwrap(); - let mut selection_in_cached_range = visible_range_after_scrolls.end; - selection_in_cached_range.row -= visible_line_count.ceil() as u32; - selection_in_cached_range - }) - .unwrap(); + // editor + // .update(cx, |editor, cx| { + // editor.scroll_screen(&ScrollAmount::Page(1.0), cx); + // }) + // .unwrap(); + // cx.executor().run_until_parked(); + // editor + // .update(cx, |editor, cx| { + // editor.scroll_screen(&ScrollAmount::Page(1.0), cx); + // }) + // .unwrap(); + // cx.executor().advance_clock(Duration::from_millis( + // INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + // )); + // cx.executor().run_until_parked(); + // let visible_range_after_scrolls = editor_visible_range(&editor, cx); + // let visible_line_count = editor + // .update(cx, |editor, _| editor.visible_line_count().unwrap()) + // .unwrap(); + // let selection_in_cached_range = editor + // .update(cx, |editor, cx| { + // let ranges = lsp_request_ranges + // .lock() + // .drain(..) + // .sorted_by_key(|r| r.start) + // .collect::>(); + // assert_eq!( + // ranges.len(), + // 2, + // "Should query 2 ranges after both scrolls, but got: {ranges:?}" + // ); + // let first_scroll = &ranges[0]; + // let second_scroll = &ranges[1]; + // assert_eq!( + // first_scroll.end, second_scroll.start, + // "Should query 2 adjacent ranges after the scrolls, but got: {ranges:?}" + // ); + // assert_eq!( + // first_scroll.start, expected_initial_query_range_end, + // "First scroll should start the query right after the end of the original scroll", + // ); + // assert_eq!( + // second_scroll.end, + // lsp::Position::new( + // visible_range_after_scrolls.end.row + // + visible_line_count.ceil() as u32, + // 1, + // ), + // "Second scroll should query one more screen down after the end of the visible range" + // ); - editor - .update(cx, |editor, cx| { - editor.change_selections(Some(Autoscroll::center()), cx, |s| { - s.select_ranges([selection_in_cached_range..selection_in_cached_range]) - }); - }) - .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); - cx.executor().run_until_parked(); - editor.update(cx, |_, _| { - let ranges = lsp_request_ranges - .lock() - .drain(..) - .sorted_by_key(|r| r.start) - .collect::>(); - assert!(ranges.is_empty(), "No new ranges or LSP queries should be made after returning to the selection with cached hints"); - assert_eq!(lsp_request_count.load(Ordering::Acquire), 4); - }).unwrap(); + // let lsp_requests = lsp_request_count.load(Ordering::Acquire); + // assert_eq!(lsp_requests, 4, "Should query for hints after every scroll"); + // let expected_hints = vec![ + // "1".to_string(), + // "2".to_string(), + // "3".to_string(), + // "4".to_string(), + // ]; + // assert_eq!( + // expected_hints, + // cached_hint_labels(editor), + // "Should have hints from the new LSP response after the edit" + // ); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!( + // editor.inlay_hint_cache().version, + // lsp_requests, + // "Should update the cache for every LSP response with hints added" + // ); - editor - .update(cx, |editor, cx| { - editor.handle_input("++++more text++++", cx); - }) - .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); - cx.executor().run_until_parked(); - editor.update(cx, |editor, cx| { - let mut ranges = lsp_request_ranges.lock().drain(..).collect::>(); - ranges.sort_by_key(|r| r.start); + // let mut selection_in_cached_range = visible_range_after_scrolls.end; + // selection_in_cached_range.row -= visible_line_count.ceil() as u32; + // selection_in_cached_range + // }) + // .unwrap(); - assert_eq!(ranges.len(), 3, - "On edit, should scroll to selection and query a range around it: visible + same range above and below. Instead, got query ranges {ranges:?}"); - let above_query_range = &ranges[0]; - let visible_query_range = &ranges[1]; - let below_query_range = &ranges[2]; - assert!(above_query_range.end.character < visible_query_range.start.character || above_query_range.end.line + 1 == visible_query_range.start.line, - "Above range {above_query_range:?} should be before visible range {visible_query_range:?}"); - assert!(visible_query_range.end.character < below_query_range.start.character || visible_query_range.end.line + 1 == below_query_range.start.line, - "Visible range {visible_query_range:?} should be before below range {below_query_range:?}"); - assert!(above_query_range.start.line < selection_in_cached_range.row, - "Hints should be queried with the selected range after the query range start"); - assert!(below_query_range.end.line > selection_in_cached_range.row, - "Hints should be queried with the selected range before the query range end"); - assert!(above_query_range.start.line <= selection_in_cached_range.row - (visible_line_count * 3.0 / 2.0) as u32, - "Hints query range should contain one more screen before"); - assert!(below_query_range.end.line >= selection_in_cached_range.row + (visible_line_count * 3.0 / 2.0) as u32, - "Hints query range should contain one more screen after"); + // editor + // .update(cx, |editor, cx| { + // editor.change_selections(Some(Autoscroll::center()), cx, |s| { + // s.select_ranges([selection_in_cached_range..selection_in_cached_range]) + // }); + // }) + // .unwrap(); + // cx.executor().advance_clock(Duration::from_millis( + // INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + // )); + // cx.executor().run_until_parked(); + // editor.update(cx, |_, _| { + // let ranges = lsp_request_ranges + // .lock() + // .drain(..) + // .sorted_by_key(|r| r.start) + // .collect::>(); + // assert!(ranges.is_empty(), "No new ranges or LSP queries should be made after returning to the selection with cached hints"); + // assert_eq!(lsp_request_count.load(Ordering::Acquire), 4); + // }).unwrap(); - let lsp_requests = lsp_request_count.load(Ordering::Acquire); - assert_eq!(lsp_requests, 7, "There should be a visible range and two ranges above and below it queried"); - let expected_hints = vec!["5".to_string(), "6".to_string(), "7".to_string()]; - assert_eq!(expected_hints, cached_hint_labels(editor), - "Should have hints from the new LSP response after the edit"); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - assert_eq!(editor.inlay_hint_cache().version, lsp_requests, "Should update the cache for every LSP response with hints added"); - }).unwrap(); - } + // editor + // .update(cx, |editor, cx| { + // editor.handle_input("++++more text++++", cx); + // }) + // .unwrap(); + // cx.executor().advance_clock(Duration::from_millis( + // INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + // )); + // cx.executor().run_until_parked(); + // editor.update(cx, |editor, cx| { + // let mut ranges = lsp_request_ranges.lock().drain(..).collect::>(); + // ranges.sort_by_key(|r| r.start); - #[gpui::test(iterations = 30)] + // assert_eq!(ranges.len(), 3, + // "On edit, should scroll to selection and query a range around it: visible + same range above and below. Instead, got query ranges {ranges:?}"); + // let above_query_range = &ranges[0]; + // let visible_query_range = &ranges[1]; + // let below_query_range = &ranges[2]; + // assert!(above_query_range.end.character < visible_query_range.start.character || above_query_range.end.line + 1 == visible_query_range.start.line, + // "Above range {above_query_range:?} should be before visible range {visible_query_range:?}"); + // assert!(visible_query_range.end.character < below_query_range.start.character || visible_query_range.end.line + 1 == below_query_range.start.line, + // "Visible range {visible_query_range:?} should be before below range {below_query_range:?}"); + // assert!(above_query_range.start.line < selection_in_cached_range.row, + // "Hints should be queried with the selected range after the query range start"); + // assert!(below_query_range.end.line > selection_in_cached_range.row, + // "Hints should be queried with the selected range before the query range end"); + // assert!(above_query_range.start.line <= selection_in_cached_range.row - (visible_line_count * 3.0 / 2.0) as u32, + // "Hints query range should contain one more screen before"); + // assert!(below_query_range.end.line >= selection_in_cached_range.row + (visible_line_count * 3.0 / 2.0) as u32, + // "Hints query range should contain one more screen after"); + + // let lsp_requests = lsp_request_count.load(Ordering::Acquire); + // assert_eq!(lsp_requests, 7, "There should be a visible range and two ranges above and below it queried"); + // let expected_hints = vec!["5".to_string(), "6".to_string(), "7".to_string()]; + // assert_eq!(expected_hints, cached_hint_labels(editor), + // "Should have hints from the new LSP response after the edit"); + // assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + // assert_eq!(editor.inlay_hint_cache().version, lsp_requests, "Should update the cache for every LSP response with hints added"); + // }).unwrap(); + // } + + #[gpui::test] async fn test_multiple_excerpts_large_multibuffer(cx: &mut gpui::TestAppContext) { init_test(cx, |settings| { settings.defaults.inlay_hints = Some(InlayHintSettings { @@ -2570,7 +2578,7 @@ pub mod tests { let project = Project::test(fs, ["/a".as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let language = crate::editor_tests::rust_lang(); + let language = rust_lang(); language_registry.add(language); let mut fake_servers = language_registry.register_fake_lsp( "Rust", @@ -2583,19 +2591,15 @@ pub mod tests { }, ); - let worktree_id = project.update(cx, |project, cx| { - project.worktrees(cx).next().unwrap().read(cx).id() - }); - - let buffer_1 = project + let (buffer_1, _handle1) = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_local_buffer_with_lsp("/a/main.rs", cx) }) .await .unwrap(); - let buffer_2 = project + let (buffer_2, _handle2) = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "other.rs"), cx) + project.open_local_buffer_with_lsp("/a/other.rs", cx) }) .await .unwrap(); @@ -2922,7 +2926,7 @@ pub mod tests { let project = Project::test(fs, ["/a".as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(crate::editor_tests::rust_lang()); + language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp( "Rust", FakeLspAdapter { @@ -2934,19 +2938,15 @@ pub mod tests { }, ); - let worktree_id = project.update(cx, |project, cx| { - project.worktrees(cx).next().unwrap().read(cx).id() - }); - - let buffer_1 = project + let (buffer_1, _handle) = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_local_buffer_with_lsp("/a/main.rs", cx) }) .await .unwrap(); - let buffer_2 = project + let (buffer_2, _handle2) = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "other.rs"), cx) + project.open_local_buffer_with_lsp("/a/other.rs", cx) }) .await .unwrap(); @@ -3045,7 +3045,6 @@ pub mod tests { .next() .await; cx.executor().run_until_parked(); - editor .update(cx, |editor, cx| { assert_eq!( @@ -3153,14 +3152,39 @@ pub mod tests { let project = Project::test(fs, ["/a".as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(crate::editor_tests::rust_lang()); - let mut fake_servers = language_registry.register_fake_lsp( + language_registry.add(rust_lang()); + language_registry.register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), ..Default::default() }, + initializer: Some(Box::new(move |fake_server| { + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server.handle_request::( + move |params, _| { + let i = lsp_request_count.fetch_add(1, Ordering::Release) + 1; + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + let query_start = params.range.start; + Ok(Some(vec![lsp::InlayHint { + position: query_start, + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + })), ..Default::default() }, ); @@ -3171,36 +3195,10 @@ pub mod tests { }) .await .unwrap(); + let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); + cx.executor().run_until_parked(); cx.executor().start_waiting(); - let fake_server = fake_servers.next().await.unwrap(); - let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); - let lsp_request_count = Arc::new(AtomicU32::new(0)); - let closure_lsp_request_count = Arc::clone(&lsp_request_count); - fake_server - .handle_request::(move |params, _| { - let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); - async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), - ); - let query_start = params.range.start; - let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::Release) + 1; - Ok(Some(vec![lsp::InlayHint { - position: query_start, - label: lsp::InlayHintLabel::String(i.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - } - }) - .next() - .await; cx.executor().run_until_parked(); editor @@ -3235,26 +3233,17 @@ pub mod tests { }) }); - let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; - - editor - .update(cx, |editor, cx| { - editor.toggle_inlay_hints(&crate::ToggleInlayHints, cx) - }) - .unwrap(); - cx.executor().start_waiting(); - let lsp_request_count = Arc::new(AtomicU32::new(0)); - let closure_lsp_request_count = Arc::clone(&lsp_request_count); - fake_server - .handle_request::(move |params, _| { - let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); + let (_, editor, _fake_server) = prepare_test_objects(cx, |fake_server, file_with_hints| { + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server.handle_request::(move |params, _| { + let lsp_request_count = lsp_request_count.clone(); async move { assert_eq!( params.text_document.uri, lsp::Url::from_file_path(file_with_hints).unwrap(), ); - let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst) + 1; + let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1; Ok(Some(vec![lsp::InlayHint { position: lsp::Position::new(0, i), label: lsp::InlayHintLabel::String(i.to_string()), @@ -3266,9 +3255,17 @@ pub mod tests { data: None, }])) } + }); + }) + .await; + + editor + .update(cx, |editor, cx| { + editor.toggle_inlay_hints(&crate::ToggleInlayHints, cx) }) - .next() - .await; + .unwrap(); + cx.executor().start_waiting(); + cx.executor().run_until_parked(); editor .update(cx, |editor, cx| { @@ -3382,6 +3379,7 @@ pub mod tests { async fn prepare_test_objects( cx: &mut TestAppContext, + initialize: impl 'static + Send + Fn(&mut FakeLanguageServer, &'static str) + Send + Sync, ) -> (&'static str, WindowHandle, FakeLanguageServer) { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( @@ -3394,9 +3392,10 @@ pub mod tests { .await; let project = Project::test(fs, ["/a".as_ref()], cx).await; + let file_path = "/a/main.rs"; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(crate::editor_tests::rust_lang()); + language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp( "Rust", FakeLspAdapter { @@ -3404,6 +3403,7 @@ pub mod tests { inlay_hint_provider: Some(lsp::OneOf::Left(true)), ..Default::default() }, + initializer: Some(Box::new(move |server| initialize(server, file_path))), ..Default::default() }, ); @@ -3414,9 +3414,6 @@ pub mod tests { }) .await .unwrap(); - cx.executor().run_until_parked(); - cx.executor().start_waiting(); - let fake_server = fake_servers.next().await.unwrap(); let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); editor @@ -3427,7 +3424,12 @@ pub mod tests { }) .unwrap(); - ("/a/main.rs", editor, fake_server) + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + cx.executor().finish_waiting(); + + (file_path, editor, fake_server) } pub fn cached_hint_labels(editor: &Editor) -> Vec { @@ -3443,7 +3445,7 @@ pub mod tests { labels } - pub fn visible_hint_labels(editor: &Editor, cx: &ViewContext<'_, Editor>) -> Vec { + pub fn visible_hint_labels(editor: &Editor, cx: &ViewContext) -> Vec { let mut hints = editor .visible_inlay_hints(cx) .into_iter() diff --git a/crates/editor/src/inline_completion_tests.rs b/crates/editor/src/inline_completion_tests.rs new file mode 100644 index 0000000000..d7c44dc95e --- /dev/null +++ b/crates/editor/src/inline_completion_tests.rs @@ -0,0 +1,424 @@ +use gpui::{prelude::*, Model}; +use indoc::indoc; +use inline_completion::InlineCompletionProvider; +use language::{Language, LanguageConfig}; +use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint}; +use std::{num::NonZeroU32, ops::Range, sync::Arc}; +use text::{Point, ToOffset}; + +use crate::{ + editor_tests::init_test, test::editor_test_context::EditorTestContext, InlineCompletion, +}; + +#[gpui::test] +async fn test_inline_completion_insert(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new_model(|_| FakeInlineCompletionProvider::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + cx.set_state("let absolute_zero_celsius = ˇ;"); + + propose_edits(&provider, vec![(28..28, "-273.15")], &mut cx); + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + + assert_editor_active_edit_completion(&mut cx, |_, edits| { + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].1.as_str(), "-273.15"); + }); + + accept_completion(&mut cx); + + cx.assert_editor_state("let absolute_zero_celsius = -273.15ˇ;") +} + +#[gpui::test] +async fn test_inline_completion_modification(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new_model(|_| FakeInlineCompletionProvider::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + cx.set_state("let pi = ˇ\"foo\";"); + + propose_edits(&provider, vec![(9..14, "3.14159")], &mut cx); + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + + assert_editor_active_edit_completion(&mut cx, |_, edits| { + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].1.as_str(), "3.14159"); + }); + + accept_completion(&mut cx); + + cx.assert_editor_state("let pi = 3.14159ˇ;") +} + +#[gpui::test] +async fn test_inline_completion_jump_button(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new_model(|_| FakeInlineCompletionProvider::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + + // Cursor is 2+ lines above the proposed edit + cx.set_state(indoc! {" + line 0 + line ˇ1 + line 2 + line 3 + line + "}); + + propose_edits( + &provider, + vec![(Point::new(4, 3)..Point::new(4, 3), " 4")], + &mut cx, + ); + + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + assert_editor_active_move_completion(&mut cx, |snapshot, move_target| { + assert_eq!(move_target.to_point(&snapshot), Point::new(4, 3)); + }); + + // When accepting, cursor is moved to the proposed location + accept_completion(&mut cx); + cx.assert_editor_state(indoc! {" + line 0 + line 1 + line 2 + line 3 + linˇe + "}); + + // Cursor is 2+ lines below the proposed edit + cx.set_state(indoc! {" + line 0 + line + line 2 + line 3 + line ˇ4 + "}); + + propose_edits( + &provider, + vec![(Point::new(1, 3)..Point::new(1, 3), " 1")], + &mut cx, + ); + + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + assert_editor_active_move_completion(&mut cx, |snapshot, move_target| { + assert_eq!(move_target.to_point(&snapshot), Point::new(1, 3)); + }); + + // When accepting, cursor is moved to the proposed location + accept_completion(&mut cx); + cx.assert_editor_state(indoc! {" + line 0 + linˇe + line 2 + line 3 + line 4 + "}); +} + +#[gpui::test] +async fn test_indentation(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(4) + }); + + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_indents_query(r#"(_ "(" ")" @end) @indent"#) + .unwrap(), + ); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + let provider = cx.new_model(|_| FakeInlineCompletionProvider::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + + cx.set_state(indoc! {" + const a: A = ( + ˇ + ); + "}); + + propose_edits( + &provider, + vec![(Point::new(1, 0)..Point::new(1, 0), " const function()")], + &mut cx, + ); + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + + assert_editor_active_edit_completion(&mut cx, |_, edits| { + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].1.as_str(), " const function()"); + }); + + // When the cursor is before the suggested indentation level, accepting a + // completion should just indent. + accept_completion(&mut cx); + cx.assert_editor_state(indoc! {" + const a: A = ( + ˇ + ); + "}); +} + +#[gpui::test] +async fn test_inline_completion_invalidation_range(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new_model(|_| FakeInlineCompletionProvider::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + + // Cursor is 3+ lines above the proposed edit + cx.set_state(indoc! {" + line 0 + line ˇ1 + line 2 + line 3 + line 4 + line + "}); + let edit_location = Point::new(5, 3); + + propose_edits( + &provider, + vec![(edit_location..edit_location, " 5")], + &mut cx, + ); + + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + assert_editor_active_move_completion(&mut cx, |snapshot, move_target| { + assert_eq!(move_target.to_point(&snapshot), edit_location); + }); + + // If we move *towards* the completion, it stays active + cx.set_selections_state(indoc! {" + line 0 + line 1 + line ˇ2 + line 3 + line 4 + line + "}); + assert_editor_active_move_completion(&mut cx, |snapshot, move_target| { + assert_eq!(move_target.to_point(&snapshot), edit_location); + }); + + // If we move *away* from the completion, it is discarded + cx.set_selections_state(indoc! {" + line ˇ0 + line 1 + line 2 + line 3 + line 4 + line + "}); + cx.editor(|editor, _| { + assert!(editor.active_inline_completion.is_none()); + }); + + // Cursor is 3+ lines below the proposed edit + cx.set_state(indoc! {" + line + line 1 + line 2 + line 3 + line ˇ4 + line 5 + "}); + let edit_location = Point::new(0, 3); + + propose_edits( + &provider, + vec![(edit_location..edit_location, " 0")], + &mut cx, + ); + + cx.update_editor(|editor, cx| editor.update_visible_inline_completion(cx)); + assert_editor_active_move_completion(&mut cx, |snapshot, move_target| { + assert_eq!(move_target.to_point(&snapshot), edit_location); + }); + + // If we move *towards* the completion, it stays active + cx.set_selections_state(indoc! {" + line + line 1 + line 2 + line ˇ3 + line 4 + line 5 + "}); + assert_editor_active_move_completion(&mut cx, |snapshot, move_target| { + assert_eq!(move_target.to_point(&snapshot), edit_location); + }); + + // If we move *away* from the completion, it is discarded + cx.set_selections_state(indoc! {" + line + line 1 + line 2 + line 3 + line 4 + line ˇ5 + "}); + cx.editor(|editor, _| { + assert!(editor.active_inline_completion.is_none()); + }); +} + +fn assert_editor_active_edit_completion( + cx: &mut EditorTestContext, + assert: impl FnOnce(MultiBufferSnapshot, &Vec<(Range, String)>), +) { + cx.editor(|editor, cx| { + let completion_state = editor + .active_inline_completion + .as_ref() + .expect("editor has no active completion"); + + if let InlineCompletion::Edit(edits) = &completion_state.completion { + assert(editor.buffer().read(cx).snapshot(cx), edits); + } else { + panic!("expected edit completion"); + } + }) +} + +fn assert_editor_active_move_completion( + cx: &mut EditorTestContext, + assert: impl FnOnce(MultiBufferSnapshot, Anchor), +) { + cx.editor(|editor, cx| { + let completion_state = editor + .active_inline_completion + .as_ref() + .expect("editor has no active completion"); + + if let InlineCompletion::Move(anchor) = &completion_state.completion { + assert(editor.buffer().read(cx).snapshot(cx), *anchor); + } else { + panic!("expected move completion"); + } + }) +} + +fn accept_completion(cx: &mut EditorTestContext) { + cx.update_editor(|editor, cx| { + editor.accept_inline_completion(&crate::AcceptInlineCompletion, cx) + }) +} + +fn propose_edits( + provider: &Model, + edits: Vec<(Range, &str)>, + cx: &mut EditorTestContext, +) { + let snapshot = cx.buffer_snapshot(); + let edits = edits.into_iter().map(|(range, text)| { + let range = snapshot.anchor_after(range.start)..snapshot.anchor_before(range.end); + (range, text.into()) + }); + + cx.update(|cx| { + provider.update(cx, |provider, _| { + provider.set_inline_completion(Some(inline_completion::InlineCompletion { + edits: edits.collect(), + })) + }) + }); +} + +fn assign_editor_completion_provider( + provider: Model, + cx: &mut EditorTestContext, +) { + cx.update_editor(|editor, cx| { + editor.set_inline_completion_provider(Some(provider), cx); + }) +} + +#[derive(Default, Clone)] +struct FakeInlineCompletionProvider { + completion: Option, +} + +impl FakeInlineCompletionProvider { + pub fn set_inline_completion( + &mut self, + completion: Option, + ) { + self.completion = completion; + } +} + +impl InlineCompletionProvider for FakeInlineCompletionProvider { + fn name() -> &'static str { + "fake-completion-provider" + } + + fn display_name() -> &'static str { + "Fake Completion Provider" + } + + fn show_completions_in_menu() -> bool { + false + } + + fn show_completions_in_normal_mode() -> bool { + false + } + + fn is_enabled( + &self, + _buffer: &gpui::Model, + _cursor_position: language::Anchor, + _cx: &gpui::AppContext, + ) -> bool { + true + } + + fn is_refreshing(&self) -> bool { + false + } + + fn refresh( + &mut self, + _buffer: gpui::Model, + _cursor_position: language::Anchor, + _debounce: bool, + _cx: &mut gpui::ModelContext, + ) { + } + + fn cycle( + &mut self, + _buffer: gpui::Model, + _cursor_position: language::Anchor, + _direction: inline_completion::Direction, + _cx: &mut gpui::ModelContext, + ) { + } + + fn accept(&mut self, _cx: &mut gpui::ModelContext) {} + + fn discard(&mut self, _cx: &mut gpui::ModelContext) {} + + fn suggest<'a>( + &mut self, + _buffer: &gpui::Model, + _cursor_position: language::Anchor, + _cx: &mut gpui::ModelContext, + ) -> Option { + self.completion.clone() + } +} diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index fb26a5654b..620fcd5ec4 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -16,13 +16,14 @@ use gpui::{ VisualContext, WeakView, WindowContext, }; use language::{ - proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, CharKind, Point, SelectionGoal, + proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, CharKind, DiskState, Point, + SelectionGoal, }; use lsp::DiagnosticSeverity; use multi_buffer::AnchorRangeExt; use project::{ - lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Item as _, - Project, ProjectPath, + lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Project, + ProjectItem as _, ProjectPath, }; use rpc::proto::{self, update_view, PeerId}; use settings::Settings; @@ -46,7 +47,7 @@ use workspace::item::{BreadcrumbText, FollowEvent}; use workspace::{ item::{FollowableItem, Item, ItemEvent, ProjectItem}, searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle}, - ItemId, ItemNavHistory, Pane, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, + ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, }; pub const MAX_TAB_TITLE_LEN: usize = 24; @@ -614,9 +615,20 @@ impl Item for Editor { .read(cx) .as_singleton() .and_then(|buffer| buffer.read(cx).project_path(cx)) - .and_then(|path| self.project.as_ref()?.read(cx).entry_for_path(&path, cx)) - .map(|entry| { - entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected) + .and_then(|path| { + let project = self.project.as_ref()?.read(cx); + let entry = project.entry_for_path(&path, cx)?; + let git_status = project + .worktree_for_id(path.worktree_id, cx)? + .read(cx) + .snapshot() + .status_for_file(path.path); + + Some(entry_git_aware_label_color( + git_status, + entry.is_ignored, + params.selected, + )) }) .unwrap_or_else(|| entry_label_color(params.selected)) } else { @@ -641,7 +653,7 @@ impl Item for Editor { .read(cx) .as_singleton() .and_then(|buffer| buffer.read(cx).file()) - .map_or(false, |file| file.is_deleted() && file.is_created()); + .map_or(false, |file| file.disk_state() == DiskState::Deleted); h_flex() .gap_2() @@ -664,7 +676,7 @@ impl Item for Editor { fn for_each_project_item( &self, cx: &AppContext, - f: &mut dyn FnMut(EntityId, &dyn project::Item), + f: &mut dyn FnMut(EntityId, &dyn project::ProjectItem), ) { self.buffer .read(cx) @@ -732,11 +744,11 @@ impl Item for Editor { project: Model, cx: &mut ViewContext, ) -> Task> { - self.report_editor_event("save", None, cx); + self.report_editor_event("Editor Saved", None, cx); let buffers = self.buffer().clone().read(cx).all_buffers(); let buffers = buffers .into_iter() - .map(|handle| handle.read(cx).diff_base_buffer().unwrap_or(handle.clone())) + .map(|handle| handle.read(cx).base_buffer().unwrap_or(handle.clone())) .collect::>(); cx.spawn(|this, mut cx| async move { if format { @@ -804,7 +816,7 @@ impl Item for Editor { .path .extension() .map(|a| a.to_string_lossy().to_string()); - self.report_editor_event("save", file_extension, cx); + self.report_editor_event("Editor Saved", file_extension, cx); project.update(cx, |project, cx| project.save_buffer_as(buffer, path, cx)) } @@ -840,7 +852,7 @@ impl Item for Editor { self.pixel_position_of_newest_cursor } - fn breadcrumb_location(&self) -> ToolbarItemLocation { + fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation { if self.show_breadcrumbs { ToolbarItemLocation::PrimaryLeft } else { @@ -953,7 +965,7 @@ impl SerializableItem for Editor { workspace: WeakView, workspace_id: workspace::WorkspaceId, item_id: ItemId, - cx: &mut ViewContext, + cx: &mut WindowContext, ) -> Task>> { let serialized_editor = match DB .get_serialized_editor(item_id, workspace_id) @@ -988,7 +1000,7 @@ impl SerializableItem for Editor { contents: Some(contents), language, .. - } => cx.spawn(|pane, mut cx| { + } => cx.spawn(|mut cx| { let project = project.clone(); async move { let language = if let Some(language_name) = language { @@ -1018,7 +1030,7 @@ impl SerializableItem for Editor { buffer.set_text(contents, cx); })?; - pane.update(&mut cx, |_, cx| { + cx.update(|cx| { cx.new_view(|cx| { let mut editor = Editor::for_buffer(buffer, Some(project), cx); @@ -1045,7 +1057,7 @@ impl SerializableItem for Editor { match project_item { Some(project_item) => { - cx.spawn(|pane, mut cx| async move { + cx.spawn(|mut cx| async move { let (_, project_item) = project_item.await?; let buffer = project_item.downcast::().map_err(|_| { anyhow!("Project item at stored path was not a buffer") @@ -1072,7 +1084,7 @@ impl SerializableItem for Editor { })?; } - pane.update(&mut cx, |_, cx| { + cx.update(|cx| { cx.new_view(|cx| { let mut editor = Editor::for_buffer(buffer, Some(project), cx); @@ -1086,7 +1098,7 @@ impl SerializableItem for Editor { let open_by_abs_path = workspace.update(cx, |workspace, cx| { workspace.open_abs_path(abs_path.clone(), false, cx) }); - cx.spawn(|_, mut cx| async move { + cx.spawn(|mut cx| async move { let editor = open_by_abs_path?.await?.downcast::().with_context(|| format!("Failed to downcast to Editor after opening abs path {abs_path:?}"))?; editor.update(&mut cx, |editor, cx| { editor.read_scroll_position_from_db(item_id, workspace_id, cx); @@ -1558,10 +1570,10 @@ pub fn entry_git_aware_label_color( Color::Ignored } else { match git_status { - Some(GitFileStatus::Added) => Color::Created, + Some(GitFileStatus::Added) | Some(GitFileStatus::Untracked) => Color::Created, Some(GitFileStatus::Modified) => Color::Modified, Some(GitFileStatus::Conflict) => Color::Conflict, - None => entry_label_color(selected), + Some(GitFileStatus::Deleted) | None => entry_label_color(selected), } } } @@ -1617,15 +1629,14 @@ fn path_for_file<'a>( #[cfg(test)] mod tests { use crate::editor_tests::init_test; + use fs::Fs; use super::*; + use fs::MTime; use gpui::{AppContext, VisualTestContext}; use language::{LanguageMatcher, TestFile}; use project::FakeFs; - use std::{ - path::{Path, PathBuf}, - time::SystemTime, - }; + use std::path::{Path, PathBuf}; #[gpui::test] fn test_path_for_file(cx: &mut AppContext) { @@ -1678,9 +1689,7 @@ mod tests { async fn test_deserialize(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); - let now = SystemTime::now(); let fs = FakeFs::new(cx.executor()); - fs.set_next_mtime(now); fs.insert_file("/file.rs", Default::default()).await; // Test case 1: Deserialize with path and contents @@ -1689,12 +1698,18 @@ mod tests { let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap(); let item_id = 1234 as ItemId; + let mtime = fs + .metadata(Path::new("/file.rs")) + .await + .unwrap() + .unwrap() + .mtime; let serialized_editor = SerializedEditor { abs_path: Some(PathBuf::from("/file.rs")), contents: Some("fn main() {}".to_string()), language: Some("Rust".to_string()), - mtime: Some(now), + mtime: Some(mtime), }; DB.save_serialized_editor(item_id, workspace_id, serialized_editor.clone()) @@ -1791,9 +1806,7 @@ mod tests { let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap(); let item_id = 9345 as ItemId; - let old_mtime = now - .checked_sub(std::time::Duration::from_secs(60 * 60 * 24)) - .unwrap(); + let old_mtime = MTime::from_seconds_and_nanos(0, 50); let serialized_editor = SerializedEditor { abs_path: Some(PathBuf::from("/file.rs")), contents: Some("fn main() {}".to_string()), diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index 15026b0728..2937e75943 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -1,6 +1,8 @@ +use std::collections::hash_map::Entry; use std::sync::Arc; use crate::Editor; +use collections::HashMap; use gpui::{Model, WindowContext}; use language::Buffer; use language::Language; @@ -20,6 +22,7 @@ where return None; }; let multibuffer = editor.buffer().read(cx); + let mut language_servers_for = HashMap::default(); editor .selections .disjoint_anchors() @@ -28,27 +31,36 @@ where .filter_map(|selection| Some((selection.start.buffer_id?, selection.start))) .filter_map(|(buffer_id, trigger_anchor)| { let buffer = multibuffer.buffer(buffer_id)?; + let server_id = *match language_servers_for.entry(buffer_id) { + Entry::Occupied(occupied_entry) => occupied_entry.into_mut(), + Entry::Vacant(vacant_entry) => { + let language_server_id = project + .read(cx) + .language_servers_for_local_buffer(buffer.read(cx), cx) + .find_map(|(adapter, server)| { + if adapter.name.0.as_ref() == language_server_name { + Some(server.server_id()) + } else { + None + } + }); + vacant_entry.insert(language_server_id) + } + } + .as_ref()?; + + Some((buffer, trigger_anchor, server_id)) + }) + .find_map(|(buffer, trigger_anchor, server_id)| { let language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?; if !filter_language(&language) { return None; } - Some((trigger_anchor, language, buffer)) - }) - .find_map(|(trigger_anchor, language, buffer)| { - project - .read(cx) - .language_servers_for_buffer(buffer.read(cx), cx) - .find_map(|(adapter, server)| { - if adapter.name.0.as_ref() == language_server_name { - Some(( - trigger_anchor, - Arc::clone(&language), - server.server_id(), - buffer.clone(), - )) - } else { - None - } - }) + Some(( + trigger_anchor, + Arc::clone(&language), + server_id, + buffer.clone(), + )) }) } diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 9abf4d990c..6861d424ec 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -20,8 +20,7 @@ pub enum MenuPosition { /// Disappears when the position is no longer visible. PinnedToEditor { source: multi_buffer::Anchor, - offset_x: Pixels, - offset_y: Pixels, + offset: Point, }, } @@ -48,36 +47,22 @@ impl MouseContextMenu { context_menu: View, cx: &mut ViewContext, ) -> Option { - let context_menu_focus = context_menu.focus_handle(cx); - cx.focus(&context_menu_focus); - - let _subscription = cx.subscribe( - &context_menu, - move |editor, _, _event: &DismissEvent, cx| { - editor.mouse_context_menu.take(); - if context_menu_focus.contains_focused(cx) { - editor.focus(cx); - } - }, - ); - let editor_snapshot = editor.snapshot(cx); - let source_point = editor.to_pixel_point(source, &editor_snapshot, cx)?; - let offset = position - source_point; - - Some(Self { - position: MenuPosition::PinnedToEditor { - source, - offset_x: offset.x, - offset_y: offset.y, - }, - context_menu, - _subscription, - }) + let content_origin = editor.last_bounds?.origin + + Point { + x: editor.gutter_dimensions.width, + y: Pixels(0.0), + }; + let source_position = editor.to_pixel_point(source, &editor_snapshot, cx)?; + let menu_position = MenuPosition::PinnedToEditor { + source, + offset: position - (source_position + content_origin), + }; + return Some(MouseContextMenu::new(menu_position, context_menu, cx)); } - pub(crate) fn pinned_to_screen( - position: Point, + pub(crate) fn new( + position: MenuPosition, context_menu: View, cx: &mut ViewContext, ) -> Self { @@ -95,7 +80,7 @@ impl MouseContextMenu { ); Self { - position: MenuPosition::PinnedToScreen(position), + position, context_menu, _subscription, } @@ -119,7 +104,7 @@ fn display_ranges<'a>( pub fn deploy_context_menu( editor: &mut Editor, - position: Point, + position: Option>, point: DisplayPoint, cx: &mut ViewContext, ) { @@ -213,8 +198,18 @@ pub fn deploy_context_menu( }) }; - editor.mouse_context_menu = - MouseContextMenu::pinned_to_editor(editor, source_anchor, position, context_menu, cx); + editor.mouse_context_menu = match position { + Some(position) => { + MouseContextMenu::pinned_to_editor(editor, source_anchor, position, context_menu, cx) + } + None => { + let menu_position = MenuPosition::PinnedToEditor { + source: source_anchor, + offset: editor.character_size(cx), + }; + Some(MouseContextMenu::new(menu_position, context_menu, cx)) + } + }; cx.notify(); } @@ -248,7 +243,9 @@ mod tests { } "}); cx.editor(|editor, _app| assert!(editor.mouse_context_menu.is_none())); - cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx)); + cx.update_editor(|editor, cx| { + deploy_context_menu(editor, Some(Default::default()), point, cx) + }); cx.assert_editor_state(indoc! {" fn test() { diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 19ba147e16..4c6762af63 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -2,8 +2,8 @@ //! in editor given a given motion (e.g. it handles converting a "move left" command into coordinates in editor). It is exposed mostly for use by vim crate. use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint}; -use crate::{scroll::ScrollAnchor, CharKind, DisplayRow, EditorStyle, RowExt, ToOffset, ToPoint}; -use gpui::{px, Pixels, WindowTextSystem}; +use crate::{scroll::ScrollAnchor, CharKind, DisplayRow, EditorStyle, ToOffset, ToPoint}; +use gpui::{Pixels, WindowTextSystem}; use language::Point; use multi_buffer::{MultiBufferRow, MultiBufferSnapshot}; use serde::Deserialize; @@ -120,7 +120,7 @@ pub(crate) fn up_by_rows( preserve_column_at_start: bool, text_layout_details: &TextLayoutDetails, ) -> (DisplayPoint, SelectionGoal) { - let mut goal_x = match goal { + let goal_x = match goal { SelectionGoal::HorizontalPosition(x) => x.into(), SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(), SelectionGoal::HorizontalRange { end, .. } => end.into(), @@ -138,7 +138,6 @@ pub(crate) fn up_by_rows( return (start, goal); } else { point = DisplayPoint::new(DisplayRow(0), 0); - goal_x = px(0.); } let mut clipped_point = map.clip_point(point, Bias::Left); @@ -159,7 +158,7 @@ pub(crate) fn down_by_rows( preserve_column_at_end: bool, text_layout_details: &TextLayoutDetails, ) -> (DisplayPoint, SelectionGoal) { - let mut goal_x = match goal { + let goal_x = match goal { SelectionGoal::HorizontalPosition(x) => x.into(), SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(), SelectionGoal::HorizontalRange { end, .. } => end.into(), @@ -174,7 +173,6 @@ pub(crate) fn down_by_rows( return (start, goal); } else { point = map.max_point(); - goal_x = map.x_for_display_point(point, text_layout_details) } let mut clipped_point = map.clip_point(point, Bias::Right); @@ -384,12 +382,12 @@ pub fn end_of_paragraph( mut count: usize, ) -> DisplayPoint { let point = display_point.to_point(map); - if point.row == map.max_buffer_row().0 { + if point.row == map.buffer_snapshot.max_row().0 { return map.max_point(); } let mut found_non_blank_line = false; - for row in point.row..map.max_buffer_row().next_row().0 { + for row in point.row..=map.buffer_snapshot.max_row().0 { let blank = map.buffer_snapshot.is_line_blank(MultiBufferRow(row)); if found_non_blank_line && blank { if count <= 1 { @@ -490,6 +488,101 @@ pub fn find_boundary_point( map.clip_point(offset.to_display_point(map), Bias::Right) } +pub fn find_preceding_boundary_trail( + map: &DisplaySnapshot, + head: DisplayPoint, + mut is_boundary: impl FnMut(char, char) -> bool, +) -> (Option, DisplayPoint) { + let mut offset = head.to_offset(map, Bias::Left); + let mut trail_offset = None; + + let mut prev_ch = map.buffer_snapshot.chars_at(offset).next(); + let mut forward = map.buffer_snapshot.reversed_chars_at(offset).peekable(); + + // Skip newlines + while let Some(&ch) = forward.peek() { + if ch == '\n' { + prev_ch = forward.next(); + offset -= ch.len_utf8(); + trail_offset = Some(offset); + } else { + break; + } + } + + // Find the boundary + let start_offset = offset; + for ch in forward { + if let Some(prev_ch) = prev_ch { + if is_boundary(prev_ch, ch) { + if start_offset == offset { + trail_offset = Some(offset); + } else { + break; + } + } + } + offset -= ch.len_utf8(); + prev_ch = Some(ch); + } + + let trail = trail_offset + .map(|trail_offset: usize| map.clip_point(trail_offset.to_display_point(map), Bias::Left)); + + ( + trail, + map.clip_point(offset.to_display_point(map), Bias::Left), + ) +} + +/// Finds the location of a boundary +pub fn find_boundary_trail( + map: &DisplaySnapshot, + head: DisplayPoint, + mut is_boundary: impl FnMut(char, char) -> bool, +) -> (Option, DisplayPoint) { + let mut offset = head.to_offset(map, Bias::Right); + let mut trail_offset = None; + + let mut prev_ch = map.buffer_snapshot.reversed_chars_at(offset).next(); + let mut forward = map.buffer_snapshot.chars_at(offset).peekable(); + + // Skip newlines + while let Some(&ch) = forward.peek() { + if ch == '\n' { + prev_ch = forward.next(); + offset += ch.len_utf8(); + trail_offset = Some(offset); + } else { + break; + } + } + + // Find the boundary + let start_offset = offset; + for ch in forward { + if let Some(prev_ch) = prev_ch { + if is_boundary(prev_ch, ch) { + if start_offset == offset { + trail_offset = Some(offset); + } else { + break; + } + } + } + offset += ch.len_utf8(); + prev_ch = Some(ch); + } + + let trail = trail_offset + .map(|trail_offset: usize| map.clip_point(trail_offset.to_display_point(map), Bias::Right)); + + ( + trail, + map.clip_point(offset.to_display_point(map), Bias::Right), + ) +} + pub fn find_boundary( map: &DisplaySnapshot, from: DisplayPoint, @@ -610,7 +703,7 @@ mod tests { test::{editor_test_context::EditorTestContext, marked_display_snapshot}, Buffer, DisplayMap, DisplayRow, ExcerptRange, FoldPlaceholder, InlayId, MultiBuffer, }; - use gpui::{font, Context as _}; + use gpui::{font, px, Context as _}; use language::Capability; use project::Project; use settings::SettingsStore; @@ -748,12 +841,12 @@ mod tests { .flat_map(|offset| { [ Inlay { - id: InlayId::Suggestion(post_inc(&mut id)), + id: InlayId::InlineCompletion(post_inc(&mut id)), position: buffer_snapshot.anchor_at(offset, Bias::Left), text: "test".into(), }, Inlay { - id: InlayId::Suggestion(post_inc(&mut id)), + id: InlayId::InlineCompletion(post_inc(&mut id)), position: buffer_snapshot.anchor_at(offset, Bias::Right), text: "test".into(), }, @@ -977,7 +1070,7 @@ mod tests { ), ( DisplayPoint::new(DisplayRow(2), 0), - SelectionGoal::HorizontalPosition(0.0) + SelectionGoal::HorizontalPosition(col_2_x.0), ), ); assert_eq!( @@ -990,7 +1083,7 @@ mod tests { ), ( DisplayPoint::new(DisplayRow(2), 0), - SelectionGoal::HorizontalPosition(0.0) + SelectionGoal::HorizontalPosition(0.0), ), ); @@ -1059,7 +1152,7 @@ mod tests { let max_point_x = snapshot .x_for_display_point(DisplayPoint::new(DisplayRow(7), 2), &text_layout_details); - // Can't move down off the end + // Can't move down off the end, and attempting to do so leaves the selection goal unchanged assert_eq!( down( &snapshot, @@ -1070,7 +1163,7 @@ mod tests { ), ( DisplayPoint::new(DisplayRow(7), 2), - SelectionGoal::HorizontalPosition(max_point_x.0) + SelectionGoal::HorizontalPosition(0.0) ), ); assert_eq!( diff --git a/crates/editor/src/persistence.rs b/crates/editor/src/persistence.rs index a52fb60543..06e2ea1f9b 100644 --- a/crates/editor/src/persistence.rs +++ b/crates/editor/src/persistence.rs @@ -1,8 +1,8 @@ use anyhow::Result; use db::sqlez::bindable::{Bind, Column, StaticColumnCount}; use db::sqlez::statement::Statement; +use fs::MTime; use std::path::PathBuf; -use std::time::{Duration, SystemTime, UNIX_EPOCH}; use db::sqlez_macros::sql; use db::{define_connection, query}; @@ -14,7 +14,7 @@ pub(crate) struct SerializedEditor { pub(crate) abs_path: Option, pub(crate) contents: Option, pub(crate) language: Option, - pub(crate) mtime: Option, + pub(crate) mtime: Option, } impl StaticColumnCount for SerializedEditor { @@ -29,16 +29,13 @@ impl Bind for SerializedEditor { let start_index = statement.bind(&self.contents, start_index)?; let start_index = statement.bind(&self.language, start_index)?; - let mtime = self.mtime.and_then(|mtime| { - mtime - .duration_since(UNIX_EPOCH) - .ok() - .map(|duration| (duration.as_secs() as i64, duration.subsec_nanos() as i32)) - }); - let start_index = match mtime { + let start_index = match self + .mtime + .and_then(|mtime| mtime.to_seconds_and_nanos_for_persistence()) + { Some((seconds, nanos)) => { - let start_index = statement.bind(&seconds, start_index)?; - statement.bind(&nanos, start_index)? + let start_index = statement.bind(&(seconds as i64), start_index)?; + statement.bind(&(nanos as i32), start_index)? } None => { let start_index = statement.bind::>(&None, start_index)?; @@ -64,7 +61,7 @@ impl Column for SerializedEditor { let mtime = mtime_seconds .zip(mtime_nanos) - .map(|(seconds, nanos)| UNIX_EPOCH + Duration::new(seconds as u64, nanos as u32)); + .map(|(seconds, nanos)| MTime::from_seconds_and_nanos(seconds as u64, nanos as u32)); let editor = Self { abs_path, @@ -280,12 +277,11 @@ mod tests { assert_eq!(have, serialized_editor); // Storing and retrieving mtime - let now = SystemTime::now(); let serialized_editor = SerializedEditor { abs_path: None, contents: None, language: None, - mtime: Some(now), + mtime: Some(MTime::from_seconds_and_nanos(100, 42)), }; DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone()) diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index ac97fe18da..923dcc24b9 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -4,7 +4,7 @@ use futures::{channel::mpsc, future::join_all}; use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View}; use language::{Buffer, BufferEvent, Capability}; use multi_buffer::{ExcerptRange, MultiBuffer}; -use project::Project; +use project::{buffer_store::BufferChangeSet, Project}; use smol::stream::StreamExt; use std::{any::TypeId, ops::Range, rc::Rc, time::Duration}; use text::ToOffset; @@ -75,7 +75,7 @@ impl ProposedChangesEditor { title: title.into(), buffer_entries: Vec::new(), recalculate_diffs_tx, - _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { + _recalculate_diffs_task: cx.spawn(|this, mut cx| async move { let mut buffers_to_diff = HashSet::default(); while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await { buffers_to_diff.insert(recalculate_diff.buffer); @@ -96,12 +96,37 @@ impl ProposedChangesEditor { } } - join_all(buffers_to_diff.drain().filter_map(|buffer| { - buffer - .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) - .ok()? - })) - .await; + let recalculate_diff_futures = this + .update(&mut cx, |this, cx| { + buffers_to_diff + .drain() + .filter_map(|buffer| { + let buffer = buffer.read(cx); + let base_buffer = buffer.base_buffer()?; + let buffer = buffer.text_snapshot(); + let change_set = this.editor.update(cx, |editor, _| { + Some( + editor + .diff_map + .diff_bases + .get(&buffer.remote_id())? + .change_set + .clone(), + ) + })?; + Some(change_set.update(cx, |change_set, cx| { + change_set.set_base_text( + base_buffer.read(cx).text(), + buffer, + cx, + ) + })) + }) + .collect::>() + }) + .ok()?; + + join_all(recalculate_diff_futures).await; } None }), @@ -154,6 +179,7 @@ impl ProposedChangesEditor { }); let mut buffer_entries = Vec::new(); + let mut new_change_sets = Vec::new(); for location in locations { let branch_buffer; if let Some(ix) = self @@ -166,6 +192,15 @@ impl ProposedChangesEditor { buffer_entries.push(entry); } else { branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx)); + new_change_sets.push(cx.new_model(|cx| { + let mut change_set = BufferChangeSet::new(branch_buffer.read(cx)); + let _ = change_set.set_base_text( + location.buffer.read(cx).text(), + branch_buffer.read(cx).text_snapshot(), + cx, + ); + change_set + })); buffer_entries.push(BufferEntry { branch: branch_buffer.clone(), base: location.buffer.clone(), @@ -187,7 +222,10 @@ impl ProposedChangesEditor { self.buffer_entries = buffer_entries; self.editor.update(cx, |editor, cx| { - editor.change_selections(None, cx, |selections| selections.refresh()) + editor.change_selections(None, cx, |selections| selections.refresh()); + for change_set in new_change_sets { + editor.diff_map.add_change_set(change_set, cx) + } }); } @@ -217,14 +255,14 @@ impl ProposedChangesEditor { }) .ok(); } - BufferEvent::DiffBaseChanged => { - self.recalculate_diffs_tx - .unbounded_send(RecalculateDiff { - buffer, - debounce: false, - }) - .ok(); - } + // BufferEvent::DiffBaseChanged => { + // self.recalculate_diffs_tx + // .unbounded_send(RecalculateDiff { + // buffer, + // debounce: false, + // }) + // .ok(); + // } _ => (), } } @@ -250,7 +288,7 @@ impl EventEmitter for ProposedChangesEditor {} impl Item for ProposedChangesEditor { type Event = EditorEvent; - fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + fn tab_icon(&self, _cx: &WindowContext) -> Option { Some(Icon::new(IconName::Diff)) } @@ -373,7 +411,7 @@ impl BranchBufferSemanticsProvider { positions: &[text::Anchor], cx: &AppContext, ) -> Option> { - let base_buffer = buffer.read(cx).diff_base_buffer()?; + let base_buffer = buffer.read(cx).base_buffer()?; let version = base_buffer.read(cx).version(); if positions .iter() diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index ba14f91ed2..8885f6062e 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -33,7 +33,7 @@ pub fn apply_related_actions(editor: &View, cx: &mut WindowContext) { pub fn expand_macro_recursively( editor: &mut Editor, _: &ExpandMacroRecursively, - cx: &mut ViewContext<'_, Editor>, + cx: &mut ViewContext, ) { if editor.selections.count() == 0 { return; @@ -98,7 +98,7 @@ pub fn expand_macro_recursively( .detach_and_log_err(cx); } -pub fn open_docs(editor: &mut Editor, _: &OpenDocs, cx: &mut ViewContext<'_, Editor>) { +pub fn open_docs(editor: &mut Editor, _: &OpenDocs, cx: &mut ViewContext) { if editor.selections.count() == 0 { return; } diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index 97e0db5b37..1fae8099d4 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -2,7 +2,7 @@ mod actions; pub(crate) mod autoscroll; pub(crate) mod scroll_amount; -use crate::editor_settings::ScrollBeyondLastLine; +use crate::editor_settings::{ScrollBeyondLastLine, ScrollbarAxes}; use crate::{ display_map::{DisplaySnapshot, ToDisplayPoint}, hover_popover::hide_hover, @@ -11,7 +11,10 @@ use crate::{ InlayHintRefreshReason, MultiBufferSnapshot, RowExt, ToPoint, }; pub use autoscroll::{Autoscroll, AutoscrollStrategy}; -use gpui::{point, px, AppContext, Entity, Global, Pixels, Task, ViewContext, WindowContext}; +use core::fmt::Debug; +use gpui::{ + point, px, Along, AppContext, Axis, Entity, Global, Pixels, Task, ViewContext, WindowContext, +}; use language::{Bias, Point}; pub use scroll_amount::ScrollAmount; use settings::Settings; @@ -60,10 +63,53 @@ impl ScrollAnchor { } } -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum Axis { - Vertical, - Horizontal, +#[derive(Debug, Clone)] +pub struct AxisPair { + pub vertical: T, + pub horizontal: T, +} + +pub fn axis_pair(horizontal: T, vertical: T) -> AxisPair { + AxisPair { + vertical, + horizontal, + } +} + +impl AxisPair { + pub fn as_xy(&self) -> (&T, &T) { + (&self.horizontal, &self.vertical) + } +} + +impl Along for AxisPair { + type Unit = T; + + fn along(&self, axis: gpui::Axis) -> Self::Unit { + match axis { + gpui::Axis::Horizontal => self.horizontal.clone(), + gpui::Axis::Vertical => self.vertical.clone(), + } + } + + fn apply_along(&self, axis: gpui::Axis, f: impl FnOnce(Self::Unit) -> Self::Unit) -> Self { + match axis { + gpui::Axis::Horizontal => Self { + horizontal: f(self.horizontal.clone()), + vertical: self.vertical.clone(), + }, + gpui::Axis::Vertical => Self { + horizontal: self.horizontal.clone(), + vertical: f(self.vertical.clone()), + }, + } + } +} + +impl From for AxisPair { + fn from(value: ScrollbarAxes) -> Self { + axis_pair(value.horizontal, value.vertical) + } } #[derive(Clone, Copy, Debug)] @@ -136,7 +182,7 @@ pub struct ScrollManager { last_autoscroll: Option<(gpui::Point, f32, f32, AutoscrollStrategy)>, show_scrollbars: bool, hide_scrollbar_task: Option>, - dragging_scrollbar: bool, + dragging_scrollbar: AxisPair, visible_line_count: Option, forbid_vertical_scroll: bool, } @@ -150,7 +196,7 @@ impl ScrollManager { autoscroll_request: None, show_scrollbars: true, hide_scrollbar_task: None, - dragging_scrollbar: false, + dragging_scrollbar: axis_pair(false, false), last_autoscroll: None, visible_line_count: None, forbid_vertical_scroll: false, @@ -311,15 +357,18 @@ impl ScrollManager { self.autoscroll_request.map(|(autoscroll, _)| autoscroll) } - pub fn is_dragging_scrollbar(&self) -> bool { - self.dragging_scrollbar + pub fn is_dragging_scrollbar(&self, axis: Axis) -> bool { + self.dragging_scrollbar.along(axis) } - pub fn set_is_dragging_scrollbar(&mut self, dragging: bool, cx: &mut ViewContext) { - if dragging != self.dragging_scrollbar { - self.dragging_scrollbar = dragging; - cx.notify(); - } + pub fn set_is_dragging_scrollbar( + &mut self, + axis: Axis, + dragging: bool, + cx: &mut ViewContext, + ) { + self.dragging_scrollbar = self.dragging_scrollbar.apply_along(axis, |_| dragging); + cx.notify(); } pub fn clamp_scroll_left(&mut self, max: f32) -> bool { diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index eb49ac6219..df6f4c67cd 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -4,7 +4,7 @@ use crate::{ ScrollAnchor, ScrollCursorBottom, ScrollCursorCenter, ScrollCursorCenterTopBottom, ScrollCursorTop, SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT, }; -use gpui::{Point, ViewContext}; +use gpui::{AsyncWindowContext, Point, ViewContext}; impl Editor { pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext) { @@ -75,7 +75,7 @@ impl Editor { self.next_scroll_position = self.next_scroll_position.next(); self._scroll_cursor_center_top_bottom_task = - cx.spawn(|editor, mut cx: gpui::AsyncWindowContext| async move { + cx.spawn(|editor, mut cx: AsyncWindowContext| async move { cx.background_executor() .timer(SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT) .await; diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 1edfc6a4fb..b79f9d44ff 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -391,7 +391,7 @@ impl SelectionsCollection { } } - pub(crate) fn change_with( + pub fn change_with( &mut self, cx: &mut AppContext, change: impl FnOnce(&mut MutableSelectionsCollection) -> R, @@ -764,7 +764,7 @@ impl<'a> MutableSelectionsCollection<'a> { pub fn replace_cursors_with( &mut self, - mut find_replacement_cursors: impl FnMut(&DisplaySnapshot) -> Vec, + find_replacement_cursors: impl FnOnce(&DisplaySnapshot) -> Vec, ) { let display_map = self.display_map(); let new_selections = find_replacement_cursors(&display_map) diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs index 51945a1780..4e2cccea02 100644 --- a/crates/editor/src/tasks.rs +++ b/crates/editor/src/tasks.rs @@ -8,7 +8,7 @@ use workspace::Workspace; fn task_context_with_editor( editor: &mut Editor, - cx: &mut WindowContext<'_>, + cx: &mut WindowContext, ) -> AsyncTask> { let Some(project) = editor.project.clone() else { return AsyncTask::ready(None); @@ -74,7 +74,7 @@ fn task_context_with_editor( }) } -pub fn task_context(workspace: &Workspace, cx: &mut WindowContext<'_>) -> AsyncTask { +pub fn task_context(workspace: &Workspace, cx: &mut WindowContext) -> AsyncTask { let Some(editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index 0384ed065b..3831ca963f 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -31,6 +31,47 @@ pub struct EditorLspTestContext { pub buffer_lsp_url: lsp::Url, } +pub(crate) fn rust_lang() -> Arc { + let language = Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()], + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_queries(LanguageQueries { + indents: Some(Cow::from(indoc! {r#" + [ + ((where_clause) _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) + (let_chain) + (await_expression) + ] @indent + + (_ "[" "]" @end) @indent + (_ "<" ">" @end) @indent + (_ "{" "}" @end) @indent + (_ "(" ")" @end) @indent"#})), + brackets: Some(Cow::from(indoc! {r#" + ("(" @open ")" @close) + ("[" @open "]" @close) + ("{" @open "}" @close) + ("<" @open ">" @close) + ("\"" @open "\"" @close) + (closure_parameters "|" @open "|" @close)"#})), + ..Default::default() + }) + .expect("Could not parse queries"); + Arc::new(language) +} impl EditorLspTestContext { pub async fn new( language: Language, @@ -72,7 +113,15 @@ impl EditorLspTestContext { app_state .fs .as_fake() - .insert_tree(root, json!({ "dir": { file_name.clone(): "" }})) + .insert_tree( + root, + json!({ + ".git": {}, + "dir": { + file_name.clone(): "" + } + }), + ) .await; let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); @@ -119,46 +168,7 @@ impl EditorLspTestContext { capabilities: lsp::ServerCapabilities, cx: &mut gpui::TestAppContext, ) -> EditorLspTestContext { - let language = Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()], - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_queries(LanguageQueries { - indents: Some(Cow::from(indoc! {r#" - [ - ((where_clause) _ @end) - (field_expression) - (call_expression) - (assignment_expression) - (let_declaration) - (let_chain) - (await_expression) - ] @indent - - (_ "[" "]" @end) @indent - (_ "<" ">" @end) @indent - (_ "{" "}" @end) @indent - (_ "(" ")" @end) @indent"#})), - brackets: Some(Cow::from(indoc! {r#" - ("(" @open ")" @close) - ("[" @open "]" @close) - ("{" @open "}" @close) - ("<" @open ">" @close) - ("\"" @open "\"" @close) - (closure_parameters "|" @open "|" @close)"#})), - ..Default::default() - }) - .expect("Could not parse queries"); - - Self::new(language, capabilities, cx).await + Self::new(Arc::into_inner(rust_lang()).unwrap(), capabilities, cx).await } pub async fn new_typescript( @@ -247,7 +257,8 @@ impl EditorLspTestContext { Self::new(language, Default::default(), cx).await } - // Constructs lsp range using a marked string with '[', ']' range delimiters + /// Constructs lsp range using a marked string with '[', ']' range delimiters + #[track_caller] pub fn lsp_range(&mut self, marked_text: &str) -> lsp::Range { let ranges = self.ranges(marked_text); self.to_lsp_range(ranges[0].clone()) diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index de5065d265..1cbd238e7d 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -6,8 +6,8 @@ use collections::BTreeMap; use futures::Future; use git::diff::DiffHunkStatus; use gpui::{ - AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext, - VisualTestContext, WindowHandle, + prelude::*, AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, + ViewContext, VisualTestContext, WindowHandle, }; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; @@ -23,8 +23,6 @@ use std::{ Arc, }, }; - -use ui::Context; use util::{ assert_set_eq, test::{generate_marked_text, marked_text_ranges}, @@ -42,16 +40,16 @@ pub struct EditorTestContext { impl EditorTestContext { pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext { let fs = FakeFs::new(cx.executor()); - // fs.insert_file("/file", "".to_owned()).await; let root = Self::root_path(); fs.insert_tree( root, serde_json::json!({ + ".git": {}, "file": "", }), ) .await; - let project = Project::test(fs, [root], cx).await; + let project = Project::test(fs.clone(), [root], cx).await; let buffer = project .update(cx, |project, cx| { project.open_local_buffer(root.join("file"), cx) @@ -65,6 +63,8 @@ impl EditorTestContext { editor }); let editor_view = editor.root_view(cx).unwrap(); + + cx.run_until_parked(); Self { cx: VisualTestContext::from_window(*editor.deref(), cx), window: editor.into(), @@ -230,6 +230,7 @@ impl EditorTestContext { self.cx.background_executor.run_until_parked(); } + #[track_caller] pub fn ranges(&mut self, marked_text: &str) -> Vec> { let (unmarked_text, ranges) = marked_text_ranges(marked_text, false); assert_eq!(self.buffer_text(), unmarked_text); @@ -276,8 +277,16 @@ impl EditorTestContext { snapshot.anchor_before(ranges[0].start)..snapshot.anchor_after(ranges[0].end) } - pub fn set_diff_base(&mut self, diff_base: Option<&str>) { - self.update_buffer(|buffer, cx| buffer.set_diff_base(diff_base.map(ToOwned::to_owned), cx)); + pub fn set_diff_base(&mut self, diff_base: &str) { + self.cx.run_until_parked(); + let fs = self + .update_editor(|editor, cx| editor.project.as_ref().unwrap().read(cx).fs().as_fake()); + let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); + fs.set_index_for_repo( + &Self::root_path().join(".git"), + &[(path.as_ref(), diff_base.to_string())], + ); + self.cx.run_until_parked(); } /// Change the editor's text and selections using a string containing @@ -319,10 +328,12 @@ impl EditorTestContext { state_context } + /// Assert about the text of the editor, the selections, and the expanded + /// diff hunks. + /// + /// Diff hunks are indicated by lines starting with `+` and `-`. #[track_caller] - pub fn assert_diff_hunks(&mut self, expected_diff: String) { - // Normalize the expected diff. If it has no diff markers, then insert blank markers - // before each line. Strip any whitespace-only lines. + pub fn assert_state_with_diff(&mut self, expected_diff: String) { let has_diff_markers = expected_diff .lines() .any(|line| line.starts_with("+") || line.starts_with("-")); @@ -340,11 +351,14 @@ impl EditorTestContext { }) .join("\n"); + let actual_selections = self.editor_selections(); + let actual_marked_text = + generate_marked_text(&self.buffer_text(), &actual_selections, true); + // Read the actual diff from the editor's row highlights and block // decorations. let actual_diff = self.editor.update(&mut self.cx, |editor, cx| { let snapshot = editor.snapshot(cx); - let text = editor.text(cx); let insertions = editor .highlighted_rows::() .map(|(range, _)| { @@ -354,7 +368,7 @@ impl EditorTestContext { }) .collect::>(); let deletions = editor - .expanded_hunks + .diff_map .hunks .iter() .filter_map(|hunk| { @@ -371,10 +385,20 @@ impl EditorTestContext { .read(cx) .excerpt_containing(hunk.hunk_range.start, cx) .expect("no excerpt for expanded buffer's hunk start"); - let deleted_text = buffer - .read(cx) - .diff_base() + let buffer_id = buffer.read(cx).remote_id(); + let change_set = &editor + .diff_map + .diff_bases + .get(&buffer_id) .expect("should have a diff base for expanded hunk") + .change_set; + let deleted_text = change_set + .read(cx) + .base_text + .as_ref() + .expect("no base text for expanded hunk") + .read(cx) + .as_rope() .slice(hunk.diff_base_byte_range.clone()) .to_string(); if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status { @@ -384,7 +408,7 @@ impl EditorTestContext { } }) .collect::>(); - format_diff(text, deletions, insertions) + format_diff(actual_marked_text, deletions, insertions) }); pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state"); diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index 3057edcd1a..744094aeaf 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -30,9 +30,10 @@ languages.workspace = true node_runtime.workspace = true open_ai.workspace = true project.workspace = true +reqwest_client.workspace = true semantic_index.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true smol.workspace = true -reqwest_client.workspace = true +util.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 2db13ff392..67b73a835b 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -27,7 +27,7 @@ use std::time::Duration; use std::{ fs, path::Path, - process::{exit, Command, Stdio}, + process::{exit, Stdio}, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, Arc, @@ -667,7 +667,7 @@ async fn fetch_eval_repo( return; } if !repo_dir.join(".git").exists() { - let init_output = Command::new("git") + let init_output = util::command::new_std_command("git") .current_dir(&repo_dir) .args(&["init"]) .output() @@ -682,13 +682,13 @@ async fn fetch_eval_repo( } } let url = format!("https://github.com/{}.git", repo); - Command::new("git") + util::command::new_std_command("git") .current_dir(&repo_dir) .args(&["remote", "add", "-f", "origin", &url]) .stdin(Stdio::null()) .output() .unwrap(); - let fetch_output = Command::new("git") + let fetch_output = util::command::new_std_command("git") .current_dir(&repo_dir) .args(&["fetch", "--depth", "1", "origin", &sha]) .stdin(Stdio::null()) @@ -703,7 +703,7 @@ async fn fetch_eval_repo( ); return; } - let checkout_output = Command::new("git") + let checkout_output = util::command::new_std_command("git") .current_dir(&repo_dir) .args(&["checkout", &sha]) .output() diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index b4d23fd709..b92771d09d 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -24,10 +24,12 @@ http_client.workspace = true language.workspace = true log.workspace = true lsp.workspace = true +parking_lot.workspace = true semantic_version.workspace = true serde.workspace = true serde_json.workspace = true toml.workspace = true +util.workspace = true wasm-encoder.workspace = true wasmparser.workspace = true wit-component.workspace = true diff --git a/crates/extension/src/extension.rs b/crates/extension/src/extension.rs index a3c275c537..2eb067ca40 100644 --- a/crates/extension/src/extension.rs +++ b/crates/extension/src/extension.rs @@ -1,4 +1,5 @@ pub mod extension_builder; +mod extension_host_proxy; mod extension_manifest; mod types; @@ -9,13 +10,19 @@ use ::lsp::LanguageServerName; use anyhow::{anyhow, bail, Context as _, Result}; use async_trait::async_trait; use fs::normalize_path; -use gpui::Task; +use gpui::{AppContext, Task}; use language::LanguageName; use semantic_version::SemanticVersion; +pub use crate::extension_host_proxy::*; pub use crate::extension_manifest::*; pub use crate::types::*; +/// Initializes the `extension` crate. +pub fn init(cx: &mut AppContext) { + ExtensionHostProxy::default_global(cx); +} + #[async_trait] pub trait WorktreeDelegate: Send + Sync + 'static { fn id(&self) -> u64; @@ -25,6 +32,10 @@ pub trait WorktreeDelegate: Send + Sync + 'static { async fn shell_env(&self) -> Vec<(String, String)>; } +pub trait ProjectDelegate: Send + Sync + 'static { + fn worktree_ids(&self) -> Vec; +} + pub trait KeyValueStoreDelegate: Send + Sync + 'static { fn insert(&self, key: String, docs: String) -> Task>; } @@ -87,6 +98,12 @@ pub trait Extension: Send + Sync + 'static { worktree: Option>, ) -> Result; + async fn context_server_command( + &self, + context_server_id: Arc, + project: Arc, + ) -> Result; + async fn suggest_docs_packages(&self, provider: Arc) -> Result>; async fn index_docs( diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 24daada6c8..8e944901f4 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -11,7 +11,7 @@ use serde::Deserialize; use std::{ env, fs, mem, path::{Path, PathBuf}, - process::{Command, Stdio}, + process::Stdio, sync::Arc, }; use wasm_encoder::{ComponentSectionId, Encode as _, RawSection, Section as _}; @@ -119,12 +119,9 @@ impl ExtensionBuilder { "compiling Rust crate for extension {}", extension_dir.display() ); - let output = Command::new("cargo") - .args([ - "build", - "--target", - RUST_TARGET, - ]) + + let output = util::command::new_std_command("cargo") + .args(["build", "--target", RUST_TARGET]) .args(options.release.then_some("--release")) .arg("--target-dir") .arg(extension_dir.join("target")) @@ -222,7 +219,7 @@ impl ExtensionBuilder { let scanner_path = src_path.join("scanner.c"); log::info!("compiling {grammar_name} parser"); - let clang_output = Command::new(&clang_path) + let clang_output = util::command::new_std_command(&clang_path) .args(["-fPIC", "-shared", "-Os"]) .arg(format!("-Wl,--export=tree_sitter_{grammar_name}")) .arg("-o") @@ -249,7 +246,7 @@ impl ExtensionBuilder { let git_dir = directory.join(".git"); if directory.exists() { - let remotes_output = Command::new("git") + let remotes_output = util::command::new_std_command("git") .arg("--git-dir") .arg(&git_dir) .args(["remote", "-v"]) @@ -272,7 +269,7 @@ impl ExtensionBuilder { fs::create_dir_all(directory).with_context(|| { format!("failed to create grammar directory {}", directory.display(),) })?; - let init_output = Command::new("git") + let init_output = util::command::new_std_command("git") .arg("init") .current_dir(directory) .output()?; @@ -283,7 +280,7 @@ impl ExtensionBuilder { ); } - let remote_add_output = Command::new("git") + let remote_add_output = util::command::new_std_command("git") .arg("--git-dir") .arg(&git_dir) .args(["remote", "add", "origin", url]) @@ -297,14 +294,14 @@ impl ExtensionBuilder { } } - let fetch_output = Command::new("git") + let fetch_output = util::command::new_std_command("git") .arg("--git-dir") .arg(&git_dir) .args(["fetch", "--depth", "1", "origin", rev]) .output() .context("failed to execute `git fetch`")?; - let checkout_output = Command::new("git") + let checkout_output = util::command::new_std_command("git") .arg("--git-dir") .arg(&git_dir) .args(["checkout", rev]) @@ -331,7 +328,7 @@ impl ExtensionBuilder { } fn install_rust_wasm_target_if_needed(&self) -> Result<()> { - let rustc_output = Command::new("rustc") + let rustc_output = util::command::new_std_command("rustc") .arg("--print") .arg("sysroot") .output() @@ -348,7 +345,7 @@ impl ExtensionBuilder { return Ok(()); } - let output = Command::new("rustup") + let output = util::command::new_std_command("rustup") .args(["target", "add", RUST_TARGET]) .stderr(Stdio::piped()) .stdout(Stdio::inherit()) diff --git a/crates/extension/src/extension_host_proxy.rs b/crates/extension/src/extension_host_proxy.rs new file mode 100644 index 0000000000..3fa35597a8 --- /dev/null +++ b/crates/extension/src/extension_host_proxy.rs @@ -0,0 +1,326 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::Result; +use fs::Fs; +use gpui::{AppContext, Global, ReadGlobal, SharedString, Task}; +use language::{LanguageMatcher, LanguageName, LanguageServerBinaryStatus, LoadedLanguage}; +use lsp::LanguageServerName; +use parking_lot::RwLock; + +use crate::{Extension, SlashCommand}; + +#[derive(Default)] +struct GlobalExtensionHostProxy(Arc); + +impl Global for GlobalExtensionHostProxy {} + +/// A proxy for interacting with the extension host. +/// +/// This object implements each of the individual proxy types so that their +/// methods can be called directly on it. +#[derive(Default)] +pub struct ExtensionHostProxy { + theme_proxy: RwLock>>, + grammar_proxy: RwLock>>, + language_proxy: RwLock>>, + language_server_proxy: RwLock>>, + snippet_proxy: RwLock>>, + slash_command_proxy: RwLock>>, + context_server_proxy: RwLock>>, + indexed_docs_provider_proxy: RwLock>>, +} + +impl ExtensionHostProxy { + /// Returns the global [`ExtensionHostProxy`]. + pub fn global(cx: &AppContext) -> Arc { + GlobalExtensionHostProxy::global(cx).0.clone() + } + + /// Returns the global [`ExtensionHostProxy`]. + /// + /// Inserts a default [`ExtensionHostProxy`] if one does not yet exist. + pub fn default_global(cx: &mut AppContext) -> Arc { + cx.default_global::().0.clone() + } + + pub fn new() -> Self { + Self { + theme_proxy: RwLock::default(), + grammar_proxy: RwLock::default(), + language_proxy: RwLock::default(), + language_server_proxy: RwLock::default(), + snippet_proxy: RwLock::default(), + slash_command_proxy: RwLock::default(), + context_server_proxy: RwLock::default(), + indexed_docs_provider_proxy: RwLock::default(), + } + } + + pub fn register_theme_proxy(&self, proxy: impl ExtensionThemeProxy) { + self.theme_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_grammar_proxy(&self, proxy: impl ExtensionGrammarProxy) { + self.grammar_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_language_proxy(&self, proxy: impl ExtensionLanguageProxy) { + self.language_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_language_server_proxy(&self, proxy: impl ExtensionLanguageServerProxy) { + self.language_server_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_snippet_proxy(&self, proxy: impl ExtensionSnippetProxy) { + self.snippet_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_slash_command_proxy(&self, proxy: impl ExtensionSlashCommandProxy) { + self.slash_command_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_context_server_proxy(&self, proxy: impl ExtensionContextServerProxy) { + self.context_server_proxy.write().replace(Arc::new(proxy)); + } + + pub fn register_indexed_docs_provider_proxy( + &self, + proxy: impl ExtensionIndexedDocsProviderProxy, + ) { + self.indexed_docs_provider_proxy + .write() + .replace(Arc::new(proxy)); + } +} + +pub trait ExtensionThemeProxy: Send + Sync + 'static { + fn list_theme_names(&self, theme_path: PathBuf, fs: Arc) -> Task>>; + + fn remove_user_themes(&self, themes: Vec); + + fn load_user_theme(&self, theme_path: PathBuf, fs: Arc) -> Task>; + + fn reload_current_theme(&self, cx: &mut AppContext); +} + +impl ExtensionThemeProxy for ExtensionHostProxy { + fn list_theme_names(&self, theme_path: PathBuf, fs: Arc) -> Task>> { + let Some(proxy) = self.theme_proxy.read().clone() else { + return Task::ready(Ok(Vec::new())); + }; + + proxy.list_theme_names(theme_path, fs) + } + + fn remove_user_themes(&self, themes: Vec) { + let Some(proxy) = self.theme_proxy.read().clone() else { + return; + }; + + proxy.remove_user_themes(themes) + } + + fn load_user_theme(&self, theme_path: PathBuf, fs: Arc) -> Task> { + let Some(proxy) = self.theme_proxy.read().clone() else { + return Task::ready(Ok(())); + }; + + proxy.load_user_theme(theme_path, fs) + } + + fn reload_current_theme(&self, cx: &mut AppContext) { + let Some(proxy) = self.theme_proxy.read().clone() else { + return; + }; + + proxy.reload_current_theme(cx) + } +} + +pub trait ExtensionGrammarProxy: Send + Sync + 'static { + fn register_grammars(&self, grammars: Vec<(Arc, PathBuf)>); +} + +impl ExtensionGrammarProxy for ExtensionHostProxy { + fn register_grammars(&self, grammars: Vec<(Arc, PathBuf)>) { + let Some(proxy) = self.grammar_proxy.read().clone() else { + return; + }; + + proxy.register_grammars(grammars) + } +} + +pub trait ExtensionLanguageProxy: Send + Sync + 'static { + fn register_language( + &self, + language: LanguageName, + grammar: Option>, + matcher: LanguageMatcher, + hidden: bool, + load: Arc Result + Send + Sync + 'static>, + ); + + fn remove_languages( + &self, + languages_to_remove: &[LanguageName], + grammars_to_remove: &[Arc], + ); +} + +impl ExtensionLanguageProxy for ExtensionHostProxy { + fn register_language( + &self, + language: LanguageName, + grammar: Option>, + matcher: LanguageMatcher, + hidden: bool, + load: Arc Result + Send + Sync + 'static>, + ) { + let Some(proxy) = self.language_proxy.read().clone() else { + return; + }; + + proxy.register_language(language, grammar, matcher, hidden, load) + } + + fn remove_languages( + &self, + languages_to_remove: &[LanguageName], + grammars_to_remove: &[Arc], + ) { + let Some(proxy) = self.language_proxy.read().clone() else { + return; + }; + + proxy.remove_languages(languages_to_remove, grammars_to_remove) + } +} + +pub trait ExtensionLanguageServerProxy: Send + Sync + 'static { + fn register_language_server( + &self, + extension: Arc, + language_server_id: LanguageServerName, + language: LanguageName, + ); + + fn remove_language_server( + &self, + language: &LanguageName, + language_server_id: &LanguageServerName, + ); + + fn update_language_server_status( + &self, + language_server_id: LanguageServerName, + status: LanguageServerBinaryStatus, + ); +} + +impl ExtensionLanguageServerProxy for ExtensionHostProxy { + fn register_language_server( + &self, + extension: Arc, + language_server_id: LanguageServerName, + language: LanguageName, + ) { + let Some(proxy) = self.language_server_proxy.read().clone() else { + return; + }; + + proxy.register_language_server(extension, language_server_id, language) + } + + fn remove_language_server( + &self, + language: &LanguageName, + language_server_id: &LanguageServerName, + ) { + let Some(proxy) = self.language_server_proxy.read().clone() else { + return; + }; + + proxy.remove_language_server(language, language_server_id) + } + + fn update_language_server_status( + &self, + language_server_id: LanguageServerName, + status: LanguageServerBinaryStatus, + ) { + let Some(proxy) = self.language_server_proxy.read().clone() else { + return; + }; + + proxy.update_language_server_status(language_server_id, status) + } +} + +pub trait ExtensionSnippetProxy: Send + Sync + 'static { + fn register_snippet(&self, path: &PathBuf, snippet_contents: &str) -> Result<()>; +} + +impl ExtensionSnippetProxy for ExtensionHostProxy { + fn register_snippet(&self, path: &PathBuf, snippet_contents: &str) -> Result<()> { + let Some(proxy) = self.snippet_proxy.read().clone() else { + return Ok(()); + }; + + proxy.register_snippet(path, snippet_contents) + } +} + +pub trait ExtensionSlashCommandProxy: Send + Sync + 'static { + fn register_slash_command(&self, extension: Arc, command: SlashCommand); +} + +impl ExtensionSlashCommandProxy for ExtensionHostProxy { + fn register_slash_command(&self, extension: Arc, command: SlashCommand) { + let Some(proxy) = self.slash_command_proxy.read().clone() else { + return; + }; + + proxy.register_slash_command(extension, command) + } +} + +pub trait ExtensionContextServerProxy: Send + Sync + 'static { + fn register_context_server( + &self, + extension: Arc, + server_id: Arc, + cx: &mut AppContext, + ); +} + +impl ExtensionContextServerProxy for ExtensionHostProxy { + fn register_context_server( + &self, + extension: Arc, + server_id: Arc, + cx: &mut AppContext, + ) { + let Some(proxy) = self.context_server_proxy.read().clone() else { + return; + }; + + proxy.register_context_server(extension, server_id, cx) + } +} + +pub trait ExtensionIndexedDocsProviderProxy: Send + Sync + 'static { + fn register_indexed_docs_provider(&self, extension: Arc, provider_id: Arc); +} + +impl ExtensionIndexedDocsProviderProxy for ExtensionHostProxy { + fn register_indexed_docs_provider(&self, extension: Arc, provider_id: Arc) { + let Some(proxy) = self.indexed_docs_provider_proxy.read().clone() else { + return; + }; + + proxy.register_indexed_docs_provider(extension, provider_id) + } +} diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index 53fe935c1b..367d312d99 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -175,7 +175,7 @@ impl ExtensionManifest { .await .with_context(|| format!("failed to load {extension_name} extension.toml"))?; toml::from_str(&manifest_content) - .with_context(|| format!("invalid extension.json for extension {extension_name}")) + .with_context(|| format!("invalid extension.toml for extension {extension_name}")) } } } diff --git a/crates/extension/src/types.rs b/crates/extension/src/types.rs index f4c37b5daf..f04d31300f 100644 --- a/crates/extension/src/types.rs +++ b/crates/extension/src/types.rs @@ -10,6 +10,7 @@ pub use slash_command::*; pub type EnvVars = Vec<(String, String)>; /// A command. +#[derive(Debug)] pub struct Command { /// The command to execute. pub command: String, diff --git a/crates/extension_api/wit/since_v0.2.0/github.wit b/crates/extension_api/wit/since_v0.2.0/github.wit index bb138f5d31..21cd5d4805 100644 --- a/crates/extension_api/wit/since_v0.2.0/github.wit +++ b/crates/extension_api/wit/since_v0.2.0/github.wit @@ -24,6 +24,8 @@ interface github { } /// Returns the latest release for the given GitHub repository. + /// + /// Takes repo as a string in the form "/", for example: "zed-industries/zed". latest-github-release: func(repo: string, options: github-release-options) -> result; /// Returns the GitHub release with the specified tag name for the given GitHub repository. diff --git a/crates/extension_host/Cargo.toml b/crates/extension_host/Cargo.toml index 856466e1a1..b196733596 100644 --- a/crates/extension_host/Cargo.toml +++ b/crates/extension_host/Cargo.toml @@ -22,7 +22,7 @@ async-tar.workspace = true async-trait.workspace = true client.workspace = true collections.workspace = true -context_servers.workspace = true +context_server_settings.workspace = true extension.workspace = true fs.workspace = true futures.workspace = true @@ -34,6 +34,7 @@ lsp.workspace = true node_runtime.workspace = true paths.workspace = true project.workspace = true +remote.workspace = true release_channel.workspace = true schemars.workspace = true semantic_version.workspace = true @@ -42,6 +43,8 @@ serde_json.workspace = true serde_json_lenient.workspace = true settings.workspace = true task.workspace = true +telemetry.workspace = true +tempfile.workspace = true toml.workspace = true url.workspace = true util.workspace = true @@ -55,7 +58,9 @@ env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } +language_extension.workspace = true parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } reqwest_client.workspace = true theme = { workspace = true, features = ["test-support"] } +theme_extension.workspace = true diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 1adea4e0fb..afe78e49a0 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -1,19 +1,22 @@ -pub mod extension_lsp_adapter; pub mod extension_settings; +pub mod headless_host; pub mod wasm_host; #[cfg(test)] mod extension_store_test; -use crate::extension_lsp_adapter::ExtensionLspAdapter; use anyhow::{anyhow, bail, Context as _, Result}; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; -use client::{telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse}; -use collections::{btree_map, BTreeMap, HashSet}; +use client::{proto, telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse}; +use collections::{btree_map, BTreeMap, HashMap, HashSet}; use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; -use extension::Extension; pub use extension::ExtensionManifest; +use extension::{ + ExtensionContextServerProxy, ExtensionGrammarProxy, ExtensionHostProxy, + ExtensionIndexedDocsProviderProxy, ExtensionLanguageProxy, ExtensionLanguageServerProxy, + ExtensionSlashCommandProxy, ExtensionSnippetProxy, ExtensionThemeProxy, +}; use fs::{Fs, RemoveOptions}; use futures::{ channel::{ @@ -24,18 +27,18 @@ use futures::{ select_biased, AsyncReadExt as _, Future, FutureExt as _, StreamExt as _, }; use gpui::{ - actions, AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, - SharedString, Task, WeakModel, + actions, AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Task, + WeakModel, }; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use language::{ - LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LoadedLanguage, + LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LoadedLanguage, Rope, QUERY_FILENAME_PREFIXES, }; -use lsp::LanguageServerName; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; use release_channel::ReleaseChannel; +use remote::SshRemoteClient; use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; use settings::Settings; @@ -94,76 +97,8 @@ pub fn is_version_compatible( true } -pub trait ExtensionRegistrationHooks: Send + Sync + 'static { - fn remove_user_themes(&self, _themes: Vec) {} - - fn load_user_theme(&self, _theme_path: PathBuf, _fs: Arc) -> Task> { - Task::ready(Ok(())) - } - - fn list_theme_names( - &self, - _theme_path: PathBuf, - _fs: Arc, - ) -> Task>> { - Task::ready(Ok(Vec::new())) - } - - fn reload_current_theme(&self, _cx: &mut AppContext) {} - - fn register_language( - &self, - _language: LanguageName, - _grammar: Option>, - _matcher: language::LanguageMatcher, - _load: Arc Result + 'static + Send + Sync>, - ) { - } - - fn register_lsp_adapter(&self, _language: LanguageName, _adapter: ExtensionLspAdapter) {} - - fn remove_lsp_adapter(&self, _language: &LanguageName, _server_name: &LanguageServerName) {} - - fn register_wasm_grammars(&self, _grammars: Vec<(Arc, PathBuf)>) {} - - fn remove_languages( - &self, - _languages_to_remove: &[LanguageName], - _grammars_to_remove: &[Arc], - ) { - } - - fn register_slash_command( - &self, - _extension: Arc, - _command: extension::SlashCommand, - ) { - } - - fn register_context_server( - &self, - _id: Arc, - _extension: WasmExtension, - _cx: &mut AppContext, - ) { - } - - fn register_docs_provider(&self, _extension: Arc, _provider_id: Arc) {} - - fn register_snippets(&self, _path: &PathBuf, _snippet_contents: &str) -> Result<()> { - Ok(()) - } - - fn update_lsp_status( - &self, - _server_name: lsp::LanguageServerName, - _status: language::LanguageServerBinaryStatus, - ) { - } -} - pub struct ExtensionStore { - pub registration_hooks: Arc, + pub proxy: Arc, pub builder: Arc, pub extension_index: ExtensionIndex, pub fs: Arc, @@ -178,6 +113,8 @@ pub struct ExtensionStore { pub wasm_host: Arc, pub wasm_extensions: Vec<(Arc, WasmExtension)>, pub tasks: Vec>, + pub ssh_clients: HashMap>, + pub ssh_registered_tx: UnboundedSender<()>, } #[derive(Clone, Copy)] @@ -225,13 +162,14 @@ pub struct ExtensionIndexLanguageEntry { pub extension: Arc, pub path: PathBuf, pub matcher: LanguageMatcher, + pub hidden: bool, pub grammar: Option>, } actions!(zed, [ReloadExtensions]); pub fn init( - registration_hooks: Arc, + extension_host_proxy: Arc, fs: Arc, client: Arc, node_runtime: NodeRuntime, @@ -243,7 +181,7 @@ pub fn init( ExtensionStore::new( paths::extensions_dir().clone(), None, - registration_hooks, + extension_host_proxy, fs, client.http_client().clone(), client.http_client().clone(), @@ -275,7 +213,7 @@ impl ExtensionStore { pub fn new( extensions_dir: PathBuf, build_dir: Option, - extension_api: Arc, + extension_host_proxy: Arc, fs: Arc, http_client: Arc, builder_client: Arc, @@ -289,8 +227,9 @@ impl ExtensionStore { let index_path = extensions_dir.join("index.json"); let (reload_tx, mut reload_rx) = unbounded(); + let (connection_registered_tx, mut connection_registered_rx) = unbounded(); let mut this = Self { - registration_hooks: extension_api.clone(), + proxy: extension_host_proxy.clone(), extension_index: Default::default(), installed_dir, index_path, @@ -302,7 +241,7 @@ impl ExtensionStore { fs.clone(), http_client.clone(), node_runtime, - extension_api, + extension_host_proxy, work_dir, cx, ), @@ -312,6 +251,9 @@ impl ExtensionStore { telemetry, reload_tx, tasks: Vec::new(), + + ssh_clients: HashMap::default(), + ssh_registered_tx: connection_registered_tx, }; // The extensions store maintains an index file, which contains a complete @@ -337,7 +279,10 @@ impl ExtensionStore { if let (Ok(Some(index_metadata)), Ok(Some(extensions_metadata))) = (index_metadata, extensions_metadata) { - if index_metadata.mtime > extensions_metadata.mtime { + if index_metadata + .mtime + .bad_is_greater_than(extensions_metadata.mtime) + { extension_index_needs_rebuild = false; } } @@ -386,6 +331,14 @@ impl ExtensionStore { .await; index_changed = false; } + + Self::update_ssh_clients(&this, &mut cx).await?; + } + _ = connection_registered_rx.next() => { + debounce_timer = cx + .background_executor() + .timer(RELOAD_DEBOUNCE_DURATION) + .fuse(); } extension_id = reload_rx.next() => { let Some(extension_id) = extension_id else { break; }; @@ -1048,14 +1001,13 @@ impl ExtensionStore { extensions_to_unload.len() - reload_count ); - if let Some(telemetry) = &self.telemetry { - for extension_id in &extensions_to_load { - if let Some(extension) = new_index.extensions.get(extension_id) { - telemetry.report_extension_event( - extension_id.clone(), - extension.manifest.version.clone(), - ); - } + for extension_id in &extensions_to_load { + if let Some(extension) = new_index.extensions.get(extension_id) { + telemetry::event!( + "Extension Loaded", + extension_id, + version = extension.manifest.version + ); } } @@ -1089,16 +1041,16 @@ impl ExtensionStore { grammars_to_remove.extend(extension.manifest.grammars.keys().cloned()); for (language_server_name, config) in extension.manifest.language_servers.iter() { for language in config.languages() { - self.registration_hooks - .remove_lsp_adapter(&language, language_server_name); + self.proxy + .remove_language_server(&language, language_server_name); } } } self.wasm_extensions .retain(|(extension, _)| !extensions_to_unload.contains(&extension.id)); - self.registration_hooks.remove_user_themes(themes_to_remove); - self.registration_hooks + self.proxy.remove_user_themes(themes_to_remove); + self.proxy .remove_languages(&languages_to_remove, &grammars_to_remove); let languages_to_add = new_index @@ -1133,8 +1085,7 @@ impl ExtensionStore { })); } - self.registration_hooks - .register_wasm_grammars(grammars_to_add); + self.proxy.register_grammars(grammars_to_add); for (language_name, language) in languages_to_add { let mut language_path = self.installed_dir.clone(); @@ -1142,10 +1093,11 @@ impl ExtensionStore { Path::new(language.extension.as_ref()), language.path.as_path(), ]); - self.registration_hooks.register_language( + self.proxy.register_language( language_name.clone(), language.grammar.clone(), language.matcher.clone(), + language.hidden, Arc::new(move || { let config = std::fs::read_to_string(language_path.join("config.toml"))?; let config: LanguageConfig = ::toml::from_str(&config)?; @@ -1172,7 +1124,7 @@ impl ExtensionStore { let fs = self.fs.clone(); let wasm_host = self.wasm_host.clone(); let root_dir = self.installed_dir.clone(); - let api = self.registration_hooks.clone(); + let proxy = self.proxy.clone(); let extension_entries = extensions_to_load .iter() .filter_map(|name| new_index.extensions.get(name).cloned()) @@ -1188,13 +1140,17 @@ impl ExtensionStore { let fs = fs.clone(); async move { for theme_path in themes_to_add.into_iter() { - api.load_user_theme(theme_path, fs.clone()).await.log_err(); + proxy + .load_user_theme(theme_path, fs.clone()) + .await + .log_err(); } for snippets_path in &snippets_to_add { if let Some(snippets_contents) = fs.load(snippets_path).await.log_err() { - api.register_snippets(snippets_path, &snippets_contents) + proxy + .register_snippet(snippets_path, &snippets_contents) .log_err(); } } @@ -1235,19 +1191,16 @@ impl ExtensionStore { for (language_server_id, language_server_config) in &manifest.language_servers { for language in language_server_config.languages() { - this.registration_hooks.register_lsp_adapter( + this.proxy.register_language_server( + extension.clone(), + language_server_id.clone(), language.clone(), - ExtensionLspAdapter { - extension: extension.clone(), - language_server_id: language_server_id.clone(), - language_name: language.clone(), - }, ); } } for (slash_command_name, slash_command) in &manifest.slash_commands { - this.registration_hooks.register_slash_command( + this.proxy.register_slash_command( extension.clone(), extension::SlashCommand { name: slash_command_name.to_string(), @@ -1262,21 +1215,18 @@ impl ExtensionStore { } for (id, _context_server_entry) in &manifest.context_servers { - this.registration_hooks.register_context_server( - id.clone(), - wasm_extension.clone(), - cx, - ); + this.proxy + .register_context_server(extension.clone(), id.clone(), cx); } for (provider_id, _provider) in &manifest.indexed_docs_providers { - this.registration_hooks - .register_docs_provider(extension.clone(), provider_id.clone()); + this.proxy + .register_indexed_docs_provider(extension.clone(), provider_id.clone()); } } this.wasm_extensions.extend(wasm_extensions); - this.registration_hooks.reload_current_theme(cx); + this.proxy.reload_current_theme(cx); }) .ok(); }) @@ -1287,7 +1237,7 @@ impl ExtensionStore { let work_dir = self.wasm_host.work_dir.clone(); let extensions_dir = self.installed_dir.clone(); let index_path = self.index_path.clone(); - let extension_api = self.registration_hooks.clone(); + let proxy = self.proxy.clone(); cx.background_executor().spawn(async move { let start_time = Instant::now(); let mut index = ExtensionIndex::default(); @@ -1313,7 +1263,7 @@ impl ExtensionStore { fs.clone(), extension_dir, &mut index, - extension_api.clone(), + proxy.clone(), ) .await .log_err(); @@ -1336,7 +1286,7 @@ impl ExtensionStore { fs: Arc, extension_dir: PathBuf, index: &mut ExtensionIndex, - extension_api: Arc, + proxy: Arc, ) -> Result<()> { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_dir).await?; let extension_id = extension_manifest.id.clone(); @@ -1375,6 +1325,7 @@ impl ExtensionStore { extension: extension_id.clone(), path: relative_path, matcher: config.matcher, + hidden: config.hidden, grammar: config.grammar, }, ); @@ -1388,7 +1339,7 @@ impl ExtensionStore { continue; }; - let Some(theme_families) = extension_api + let Some(theme_families) = proxy .list_theme_names(theme_path.clone(), fs.clone()) .await .log_err() @@ -1431,6 +1382,170 @@ impl ExtensionStore { Ok(()) } + + fn prepare_remote_extension( + &mut self, + extension_id: Arc, + is_dev: bool, + tmp_dir: PathBuf, + cx: &mut ModelContext, + ) -> Task> { + let src_dir = self.extensions_dir().join(extension_id.as_ref()); + let Some(loaded_extension) = self.extension_index.extensions.get(&extension_id).cloned() + else { + return Task::ready(Err(anyhow!("extension no longer installed"))); + }; + let fs = self.fs.clone(); + cx.background_executor().spawn(async move { + const EXTENSION_TOML: &str = "extension.toml"; + const EXTENSION_WASM: &str = "extension.wasm"; + const CONFIG_TOML: &str = "config.toml"; + + if is_dev { + let manifest_toml = toml::to_string(&loaded_extension.manifest)?; + fs.save( + &tmp_dir.join(EXTENSION_TOML), + &Rope::from(manifest_toml), + language::LineEnding::Unix, + ) + .await?; + } else { + fs.copy_file( + &src_dir.join(EXTENSION_TOML), + &tmp_dir.join(EXTENSION_TOML), + fs::CopyOptions::default(), + ) + .await? + } + + if fs.is_file(&src_dir.join(EXTENSION_WASM)).await { + fs.copy_file( + &src_dir.join(EXTENSION_WASM), + &tmp_dir.join(EXTENSION_WASM), + fs::CopyOptions::default(), + ) + .await? + } + + for language_path in loaded_extension.manifest.languages.iter() { + if fs + .is_file(&src_dir.join(language_path).join(CONFIG_TOML)) + .await + { + fs.create_dir(&tmp_dir.join(language_path)).await?; + fs.copy_file( + &src_dir.join(language_path).join(CONFIG_TOML), + &tmp_dir.join(language_path).join(CONFIG_TOML), + fs::CopyOptions::default(), + ) + .await? + } + } + + Ok(()) + }) + } + + async fn sync_extensions_over_ssh( + this: &WeakModel, + client: WeakModel, + cx: &mut AsyncAppContext, + ) -> Result<()> { + let extensions = this.update(cx, |this, _cx| { + this.extension_index + .extensions + .iter() + .filter_map(|(id, entry)| { + if entry.manifest.language_servers.is_empty() { + return None; + } + Some(proto::Extension { + id: id.to_string(), + version: entry.manifest.version.to_string(), + dev: entry.dev, + }) + }) + .collect() + })?; + + let response = client + .update(cx, |client, _cx| { + client + .proto_client() + .request(proto::SyncExtensions { extensions }) + })? + .await?; + + for missing_extension in response.missing_extensions.into_iter() { + let tmp_dir = tempfile::tempdir()?; + this.update(cx, |this, cx| { + this.prepare_remote_extension( + missing_extension.id.clone().into(), + missing_extension.dev, + tmp_dir.path().to_owned(), + cx, + ) + })? + .await?; + let dest_dir = PathBuf::from(&response.tmp_dir).join(missing_extension.clone().id); + log::info!("Uploading extension {}", missing_extension.clone().id); + + client + .update(cx, |client, cx| { + client.upload_directory(tmp_dir.path().to_owned(), dest_dir.clone(), cx) + })? + .await?; + + log::info!( + "Finished uploading extension {}", + missing_extension.clone().id + ); + + client + .update(cx, |client, _cx| { + client.proto_client().request(proto::InstallExtension { + tmp_dir: dest_dir.to_string_lossy().to_string(), + extension: Some(missing_extension), + }) + })? + .await?; + } + + anyhow::Ok(()) + } + + pub async fn update_ssh_clients( + this: &WeakModel, + cx: &mut AsyncAppContext, + ) -> Result<()> { + let clients = this.update(cx, |this, _cx| { + this.ssh_clients.retain(|_k, v| v.upgrade().is_some()); + this.ssh_clients.values().cloned().collect::>() + })?; + + for client in clients { + Self::sync_extensions_over_ssh(&this, client, cx) + .await + .log_err(); + } + + anyhow::Ok(()) + } + + pub fn register_ssh_client( + &mut self, + client: Model, + cx: &mut ModelContext, + ) { + let connection_options = client.read(cx).connection_options(); + if self.ssh_clients.contains_key(&connection_options.ssh_url()) { + return; + } + + self.ssh_clients + .insert(connection_options.ssh_url(), client.downgrade()); + self.ssh_registered_tx.unbounded_send(()).ok(); + } } fn load_plugin_queries(root_path: &Path) -> LanguageQueries { diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index 23004e9d7f..724988fadd 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -1,17 +1,16 @@ -use crate::extension_lsp_adapter::ExtensionLspAdapter; use crate::{ Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry, ExtensionIndexThemeEntry, ExtensionManifest, ExtensionSettings, ExtensionStore, GrammarManifestEntry, SchemaVersion, RELOAD_DEBOUNCE_DURATION, }; -use anyhow::Result; use async_compression::futures::bufread::GzipEncoder; use collections::BTreeMap; +use extension::ExtensionHostProxy; use fs::{FakeFs, Fs, RealFs}; use futures::{io::BufReader, AsyncReadExt, StreamExt}; -use gpui::{BackgroundExecutor, Context, SemanticVersion, SharedString, Task, TestAppContext}; +use gpui::{Context, SemanticVersion, TestAppContext}; use http_client::{FakeHttpClient, Response}; -use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LoadedLanguage}; +use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus}; use lsp::LanguageServerName; use node_runtime::NodeRuntime; use parking_lot::Mutex; @@ -28,84 +27,6 @@ use std::{ use theme::ThemeRegistry; use util::test::temp_tree; -use crate::ExtensionRegistrationHooks; - -struct TestExtensionRegistrationHooks { - executor: BackgroundExecutor, - language_registry: Arc, - theme_registry: Arc, -} - -impl ExtensionRegistrationHooks for TestExtensionRegistrationHooks { - fn list_theme_names(&self, path: PathBuf, fs: Arc) -> Task>> { - self.executor.spawn(async move { - let themes = theme::read_user_theme(&path, fs).await?; - Ok(themes.themes.into_iter().map(|theme| theme.name).collect()) - }) - } - - fn load_user_theme(&self, theme_path: PathBuf, fs: Arc) -> Task> { - let theme_registry = self.theme_registry.clone(); - self.executor - .spawn(async move { theme_registry.load_user_theme(&theme_path, fs).await }) - } - - fn remove_user_themes(&self, themes: Vec) { - self.theme_registry.remove_user_themes(&themes); - } - - fn register_language( - &self, - language: language::LanguageName, - grammar: Option>, - matcher: language::LanguageMatcher, - load: Arc Result + 'static + Send + Sync>, - ) { - self.language_registry - .register_language(language, grammar, matcher, load) - } - - fn remove_languages( - &self, - languages_to_remove: &[language::LanguageName], - grammars_to_remove: &[Arc], - ) { - self.language_registry - .remove_languages(&languages_to_remove, &grammars_to_remove); - } - - fn register_wasm_grammars(&self, grammars: Vec<(Arc, PathBuf)>) { - self.language_registry.register_wasm_grammars(grammars) - } - - fn register_lsp_adapter( - &self, - language_name: language::LanguageName, - adapter: ExtensionLspAdapter, - ) { - self.language_registry - .register_lsp_adapter(language_name, Arc::new(adapter)); - } - - fn update_lsp_status( - &self, - server_name: lsp::LanguageServerName, - status: LanguageServerBinaryStatus, - ) { - self.language_registry - .update_lsp_status(server_name, status); - } - - fn remove_lsp_adapter( - &self, - language_name: &language::LanguageName, - server_name: &lsp::LanguageServerName, - ) { - self.language_registry - .remove_lsp_adapter(language_name, server_name); - } -} - #[cfg(test)] #[ctor::ctor] fn init_logger() { @@ -282,6 +203,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { extension: "zed-ruby".into(), path: "languages/erb".into(), grammar: Some("embedded_template".into()), + hidden: false, matcher: LanguageMatcher { path_suffixes: vec!["erb".into()], first_line_pattern: None, @@ -294,6 +216,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { extension: "zed-ruby".into(), path: "languages/ruby".into(), grammar: Some("ruby".into()), + hidden: false, matcher: LanguageMatcher { path_suffixes: vec!["rb".into()], first_line_pattern: None, @@ -337,20 +260,18 @@ async fn test_extension_store(cx: &mut TestAppContext) { .collect(), }; - let language_registry = Arc::new(LanguageRegistry::test(cx.executor())); + let proxy = Arc::new(ExtensionHostProxy::new()); let theme_registry = Arc::new(ThemeRegistry::new(Box::new(()))); - let registration_hooks = Arc::new(TestExtensionRegistrationHooks { - executor: cx.executor(), - language_registry: language_registry.clone(), - theme_registry: theme_registry.clone(), - }); + theme_extension::init(proxy.clone(), theme_registry.clone(), cx.executor()); + let language_registry = Arc::new(LanguageRegistry::test(cx.executor())); + language_extension::init(proxy.clone(), language_registry.clone()); let node_runtime = NodeRuntime::unavailable(); let store = cx.new_model(|cx| { ExtensionStore::new( PathBuf::from("/the-extension-dir"), None, - registration_hooks.clone(), + proxy.clone(), fs.clone(), http_client.clone(), http_client.clone(), @@ -475,7 +396,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { ExtensionStore::new( PathBuf::from("/the-extension-dir"), None, - registration_hooks, + proxy, fs.clone(), http_client.clone(), http_client.clone(), @@ -558,13 +479,11 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let proxy = Arc::new(ExtensionHostProxy::new()); let theme_registry = Arc::new(ThemeRegistry::new(Box::new(()))); - let registration_hooks = Arc::new(TestExtensionRegistrationHooks { - executor: cx.executor(), - language_registry: language_registry.clone(), - theme_registry: theme_registry.clone(), - }); + theme_extension::init(proxy.clone(), theme_registry.clone(), cx.executor()); + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + language_extension::init(proxy.clone(), language_registry.clone()); let node_runtime = NodeRuntime::unavailable(); let mut status_updates = language_registry.language_server_binary_statuses(); @@ -658,7 +577,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { ExtensionStore::new( extensions_dir.clone(), Some(cache_dir), - registration_hooks, + proxy, fs.clone(), extension_client.clone(), builder_client, @@ -704,9 +623,9 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { None, ); - let buffer = project + let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer(project_dir.join("test.gleam"), cx) + project.open_local_buffer_with_lsp(project_dir.join("test.gleam"), cx) }) .await .unwrap(); diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs new file mode 100644 index 0000000000..687f05db47 --- /dev/null +++ b/crates/extension_host/src/headless_host.rs @@ -0,0 +1,320 @@ +use std::{path::PathBuf, sync::Arc}; + +use anyhow::{anyhow, Context as _, Result}; +use client::{proto, TypedEnvelope}; +use collections::{HashMap, HashSet}; +use extension::{ + Extension, ExtensionHostProxy, ExtensionLanguageProxy, ExtensionLanguageServerProxy, + ExtensionManifest, +}; +use fs::{Fs, RemoveOptions, RenameOptions}; +use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, Task, WeakModel}; +use http_client::HttpClient; +use language::{LanguageConfig, LanguageName, LanguageQueries, LoadedLanguage}; +use lsp::LanguageServerName; +use node_runtime::NodeRuntime; + +use crate::wasm_host::{WasmExtension, WasmHost}; + +#[derive(Clone, Debug)] +pub struct ExtensionVersion { + pub id: String, + pub version: String, + pub dev: bool, +} + +pub struct HeadlessExtensionStore { + pub fs: Arc, + pub extension_dir: PathBuf, + pub proxy: Arc, + pub wasm_host: Arc, + pub loaded_extensions: HashMap, Arc>, + pub loaded_languages: HashMap, Vec>, + pub loaded_language_servers: HashMap, Vec<(LanguageServerName, LanguageName)>>, +} + +impl HeadlessExtensionStore { + pub fn new( + fs: Arc, + http_client: Arc, + extension_dir: PathBuf, + extension_host_proxy: Arc, + node_runtime: NodeRuntime, + cx: &mut AppContext, + ) -> Model { + cx.new_model(|cx| Self { + fs: fs.clone(), + wasm_host: WasmHost::new( + fs.clone(), + http_client.clone(), + node_runtime, + extension_host_proxy.clone(), + extension_dir.join("work"), + cx, + ), + extension_dir, + proxy: extension_host_proxy, + loaded_extensions: Default::default(), + loaded_languages: Default::default(), + loaded_language_servers: Default::default(), + }) + } + + pub fn sync_extensions( + &mut self, + extensions: Vec, + cx: &ModelContext, + ) -> Task>> { + let on_client = HashSet::from_iter(extensions.iter().map(|e| e.id.as_str())); + let to_remove: Vec> = self + .loaded_extensions + .keys() + .filter(|id| !on_client.contains(id.as_ref())) + .cloned() + .collect(); + let to_load: Vec = extensions + .into_iter() + .filter(|e| { + if e.dev { + return true; + } + !self + .loaded_extensions + .get(e.id.as_str()) + .is_some_and(|loaded| loaded.as_ref() == e.version.as_str()) + }) + .collect(); + + cx.spawn(|this, mut cx| async move { + let mut missing = Vec::new(); + + for extension_id in to_remove { + log::info!("removing extension: {}", extension_id); + this.update(&mut cx, |this, cx| { + this.uninstall_extension(&extension_id, cx) + })? + .await?; + } + + for extension in to_load { + if let Err(e) = Self::load_extension(this.clone(), extension.clone(), &mut cx).await + { + log::info!("failed to load extension: {}, {:?}", extension.id, e); + missing.push(extension) + } else if extension.dev { + missing.push(extension) + } + } + + Ok(missing) + }) + } + + pub async fn load_extension( + this: WeakModel, + extension: ExtensionVersion, + cx: &mut AsyncAppContext, + ) -> Result<()> { + let (fs, wasm_host, extension_dir) = this.update(cx, |this, _cx| { + this.loaded_extensions.insert( + extension.id.clone().into(), + extension.version.clone().into(), + ); + ( + this.fs.clone(), + this.wasm_host.clone(), + this.extension_dir.join(&extension.id), + ) + })?; + + let manifest = Arc::new(ExtensionManifest::load(fs.clone(), &extension_dir).await?); + + debug_assert!(!manifest.languages.is_empty() || !manifest.language_servers.is_empty()); + + if manifest.version.as_ref() != extension.version.as_str() { + anyhow::bail!( + "mismatched versions: ({}) != ({})", + manifest.version, + extension.version + ) + } + + for language_path in &manifest.languages { + let language_path = extension_dir.join(language_path); + let config = fs.load(&language_path.join("config.toml")).await?; + let mut config = ::toml::from_str::(&config)?; + + this.update(cx, |this, _cx| { + this.loaded_languages + .entry(manifest.id.clone()) + .or_default() + .push(config.name.clone()); + + config.grammar = None; + + this.proxy.register_language( + config.name.clone(), + None, + config.matcher.clone(), + config.hidden, + Arc::new(move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: LanguageQueries::default(), + context_provider: None, + toolchain_provider: None, + }) + }), + ); + })?; + } + + if manifest.language_servers.is_empty() { + return Ok(()); + } + + let wasm_extension: Arc = + Arc::new(WasmExtension::load(extension_dir, &manifest, wasm_host.clone(), &cx).await?); + + for (language_server_id, language_server_config) in &manifest.language_servers { + for language in language_server_config.languages() { + this.update(cx, |this, _cx| { + this.loaded_language_servers + .entry(manifest.id.clone()) + .or_default() + .push((language_server_id.clone(), language.clone())); + this.proxy.register_language_server( + wasm_extension.clone(), + language_server_id.clone(), + language.clone(), + ); + })?; + } + } + + Ok(()) + } + + fn uninstall_extension( + &mut self, + extension_id: &Arc, + cx: &mut ModelContext, + ) -> Task> { + self.loaded_extensions.remove(extension_id); + + let languages_to_remove = self + .loaded_languages + .remove(extension_id) + .unwrap_or_default(); + self.proxy.remove_languages(&languages_to_remove, &[]); + + for (language_server_name, language) in self + .loaded_language_servers + .remove(extension_id) + .unwrap_or_default() + { + self.proxy + .remove_language_server(&language, &language_server_name); + } + + let path = self.extension_dir.join(&extension_id.to_string()); + let fs = self.fs.clone(); + cx.spawn(|_, _| async move { + fs.remove_dir( + &path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + }) + } + + pub fn install_extension( + &mut self, + extension: ExtensionVersion, + tmp_path: PathBuf, + cx: &mut ModelContext, + ) -> Task> { + let path = self.extension_dir.join(&extension.id); + let fs = self.fs.clone(); + + cx.spawn(|this, mut cx| async move { + if fs.is_dir(&path).await { + this.update(&mut cx, |this, cx| { + this.uninstall_extension(&extension.id.clone().into(), cx) + })? + .await?; + } + + fs.rename(&tmp_path, &path, RenameOptions::default()) + .await?; + + Self::load_extension(this, extension, &mut cx).await + }) + } + + pub async fn handle_sync_extensions( + extension_store: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let requested_extensions = + envelope + .payload + .extensions + .into_iter() + .map(|p| ExtensionVersion { + id: p.id, + version: p.version, + dev: p.dev, + }); + let missing_extensions = extension_store + .update(&mut cx, |extension_store, cx| { + extension_store.sync_extensions(requested_extensions.collect(), cx) + })? + .await?; + + Ok(proto::SyncExtensionsResponse { + missing_extensions: missing_extensions + .into_iter() + .map(|e| proto::Extension { + id: e.id, + version: e.version, + dev: e.dev, + }) + .collect(), + tmp_dir: paths::remote_extensions_uploads_dir() + .to_string_lossy() + .to_string(), + }) + } + + pub async fn handle_install_extension( + extensions: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let extension = envelope + .payload + .extension + .with_context(|| anyhow!("Invalid InstallExtension request"))?; + + extensions + .update(&mut cx, |extensions, cx| { + extensions.install_extension( + ExtensionVersion { + id: extension.id, + version: extension.version, + dev: extension.dev, + }, + PathBuf::from(envelope.payload.tmp_dir), + cx, + ) + })? + .await?; + + Ok(proto::Ack {}) + } +} diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index 54699ac0a1..766ca8c0bb 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -1,11 +1,11 @@ pub mod wit; -use crate::{ExtensionManifest, ExtensionRegistrationHooks}; +use crate::ExtensionManifest; use anyhow::{anyhow, bail, Context as _, Result}; use async_trait::async_trait; use extension::{ - CodeLabel, Command, Completion, KeyValueStoreDelegate, SlashCommand, - SlashCommandArgumentCompletion, SlashCommandOutput, Symbol, WorktreeDelegate, + CodeLabel, Command, Completion, ExtensionHostProxy, KeyValueStoreDelegate, ProjectDelegate, + SlashCommand, SlashCommandArgumentCompletion, SlashCommandOutput, Symbol, WorktreeDelegate, }; use fs::{normalize_path, Fs}; use futures::future::LocalBoxFuture; @@ -34,14 +34,13 @@ use wasmtime::{ }; use wasmtime_wasi::{self as wasi, WasiView}; use wit::Extension; -pub use wit::ExtensionProject; pub struct WasmHost { engine: Engine, release_channel: ReleaseChannel, http_client: Arc, node_runtime: NodeRuntime, - pub registration_hooks: Arc, + pub(crate) proxy: Arc, fs: Arc, pub work_dir: PathBuf, _main_thread_message_task: Task<()>, @@ -238,6 +237,25 @@ impl extension::Extension for WasmExtension { .await } + async fn context_server_command( + &self, + context_server_id: Arc, + project: Arc, + ) -> Result { + self.call(|extension, store| { + async move { + let project_resource = store.data_mut().table().push(project)?; + let command = extension + .call_context_server_command(store, context_server_id.clone(), project_resource) + .await? + .map_err(|err| anyhow!("{err}"))?; + anyhow::Ok(command.into()) + } + .boxed() + }) + .await + } + async fn suggest_docs_packages(&self, provider: Arc) -> Result> { self.call(|extension, store| { async move { @@ -312,7 +330,7 @@ impl WasmHost { fs: Arc, http_client: Arc, node_runtime: NodeRuntime, - registration_hooks: Arc, + proxy: Arc, work_dir: PathBuf, cx: &mut AppContext, ) -> Arc { @@ -328,7 +346,7 @@ impl WasmHost { work_dir, http_client, node_runtime, - registration_hooks, + proxy, release_channel: ReleaseChannel::global(cx), _main_thread_message_task: task, main_thread_message_tx: tx, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs index a4d31d60c0..1ce43ca203 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs @@ -3,7 +3,7 @@ use crate::wasm_host::wit::since_v0_0_4; use crate::wasm_host::WasmState; use anyhow::Result; use async_trait::async_trait; -use extension::WorktreeDelegate; +use extension::{ExtensionLanguageServerProxy, WorktreeDelegate}; use language::LanguageServerBinaryStatus; use semantic_version::SemanticVersion; use std::sync::{Arc, OnceLock}; @@ -149,8 +149,9 @@ impl ExtensionImports for WasmState { }; self.host - .registration_hooks - .update_lsp_status(lsp::LanguageServerName(server_name.into()), status); + .proxy + .update_language_server_status(lsp::LanguageServerName(server_name.into()), status); + Ok(()) } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index cf5caa77f0..058a86ab36 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -5,7 +5,7 @@ use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; use async_trait::async_trait; -use extension::{KeyValueStoreDelegate, WorktreeDelegate}; +use extension::{ExtensionLanguageServerProxy, KeyValueStoreDelegate, WorktreeDelegate}; use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use language::LanguageName; @@ -495,8 +495,9 @@ impl ExtensionImports for WasmState { }; self.host - .registration_hooks - .update_lsp_status(::lsp::LanguageServerName(server_name.into()), status); + .proxy + .update_language_server_status(::lsp::LanguageServerName(server_name.into()), status); + Ok(()) } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs index 6dbd76ee76..f9bebf0c01 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs @@ -7,8 +7,10 @@ use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; use async_trait::async_trait; -use context_servers::manager::ContextServerSettings; -use extension::{KeyValueStoreDelegate, WorktreeDelegate}; +use context_server_settings::ContextServerSettings; +use extension::{ + ExtensionLanguageServerProxy, KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate, +}; use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use language::{language_settings::AllLanguageSettings, LanguageName, LanguageServerBinaryStatus}; @@ -44,13 +46,10 @@ mod settings { } pub type ExtensionWorktree = Arc; +pub type ExtensionProject = Arc; pub type ExtensionKeyValueStore = Arc; pub type ExtensionHttpResponseStream = Arc>>; -pub struct ExtensionProject { - pub worktree_ids: Vec, -} - pub fn linker() -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) @@ -273,7 +272,7 @@ impl HostProject for WasmState { project: Resource, ) -> wasmtime::Result> { let project = self.table.get(&project)?; - Ok(project.worktree_ids.clone()) + Ok(project.worktree_ids()) } async fn drop(&mut self, _project: Resource) -> Result<()> { @@ -685,8 +684,9 @@ impl ExtensionImports for WasmState { }; self.host - .registration_hooks - .update_lsp_status(::lsp::LanguageServerName(server_name.into()), status); + .proxy + .update_language_server_status(::lsp::LanguageServerName(server_name.into()), status); + Ok(()) } diff --git a/crates/extensions_ui/Cargo.toml b/crates/extensions_ui/Cargo.toml index 9709aa7a2b..be263b286e 100644 --- a/crates/extensions_ui/Cargo.toml +++ b/crates/extensions_ui/Cargo.toml @@ -13,21 +13,15 @@ path = "src/extensions_ui.rs" [dependencies] anyhow.workspace = true -assistant_slash_command.workspace = true client.workspace = true collections.workspace = true -context_servers.workspace = true db.workspace = true editor.workspace = true -extension.workspace = true extension_host.workspace = true fs.workspace = true fuzzy.workspace = true gpui.workspace = true -indexed_docs.workspace = true language.workspace = true -log.workspace = true -lsp.workspace = true num-format.workspace = true picker.workspace = true project.workspace = true @@ -36,14 +30,13 @@ semantic_version.workspace = true serde.workspace = true settings.workspace = true smallvec.workspace = true -snippet_provider.workspace = true +telemetry.workspace = true theme.workspace = true -theme_selector.workspace = true ui.workspace = true util.workspace = true -vim.workspace = true -wasmtime-wasi.workspace = true +vim_mode_setting.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/extensions_ui/src/components/extension_card.rs b/crates/extensions_ui/src/components/extension_card.rs index 2dc472f801..fa3d84b47b 100644 --- a/crates/extensions_ui/src/components/extension_card.rs +++ b/crates/extensions_ui/src/components/extension_card.rs @@ -48,7 +48,8 @@ impl RenderOnce for ExtensionCard { .absolute() .top_0() .left_0() - .occlude() + .block_mouse_down() + .cursor_default() .size_full() .items_center() .justify_center() diff --git a/crates/extensions_ui/src/components/feature_upsell.rs b/crates/extensions_ui/src/components/feature_upsell.rs index 883174db41..6eed0439df 100644 --- a/crates/extensions_ui/src/components/feature_upsell.rs +++ b/crates/extensions_ui/src/components/feature_upsell.rs @@ -1,6 +1,3 @@ -use std::sync::Arc; - -use client::telemetry::Telemetry; use gpui::{AnyElement, Div, StyleRefinement}; use smallvec::SmallVec; use ui::{prelude::*, ButtonLike}; @@ -8,17 +5,15 @@ use ui::{prelude::*, ButtonLike}; #[derive(IntoElement)] pub struct FeatureUpsell { base: Div, - telemetry: Arc, text: SharedString, docs_url: Option, children: SmallVec<[AnyElement; 2]>, } impl FeatureUpsell { - pub fn new(telemetry: Arc, text: impl Into) -> Self { + pub fn new(text: impl Into) -> Self { Self { base: h_flex(), - telemetry, text: text.into(), docs_url: None, children: SmallVec::new(), @@ -67,12 +62,13 @@ impl RenderOnce for FeatureUpsell { .child(Icon::new(IconName::ArrowUpRight)), ) .on_click({ - let telemetry = self.telemetry.clone(); let docs_url = docs_url.clone(); move |_event, cx| { - telemetry.report_app_event(format!( - "feature upsell: viewed docs ({docs_url})" - )); + telemetry::event!( + "Documentation Viewed", + source = "Feature Upsell", + url = docs_url, + ); cx.open_url(&docs_url) } }), diff --git a/crates/extensions_ui/src/extension_registration_hooks.rs b/crates/extensions_ui/src/extension_registration_hooks.rs deleted file mode 100644 index f8cd9a3429..0000000000 --- a/crates/extensions_ui/src/extension_registration_hooks.rs +++ /dev/null @@ -1,212 +0,0 @@ -use std::{path::PathBuf, sync::Arc}; - -use anyhow::{anyhow, Result}; -use assistant_slash_command::{ExtensionSlashCommand, SlashCommandRegistry}; -use context_servers::manager::ServerCommand; -use context_servers::ContextServerFactoryRegistry; -use db::smol::future::FutureExt as _; -use extension::Extension; -use extension_host::wasm_host::ExtensionProject; -use extension_host::{extension_lsp_adapter::ExtensionLspAdapter, wasm_host}; -use fs::Fs; -use gpui::{AppContext, BackgroundExecutor, Model, Task}; -use indexed_docs::{ExtensionIndexedDocsProvider, IndexedDocsRegistry, ProviderId}; -use language::{LanguageRegistry, LanguageServerBinaryStatus, LoadedLanguage}; -use snippet_provider::SnippetRegistry; -use theme::{ThemeRegistry, ThemeSettings}; -use ui::SharedString; -use wasmtime_wasi::WasiView as _; - -pub struct ConcreteExtensionRegistrationHooks { - slash_command_registry: Arc, - theme_registry: Arc, - indexed_docs_registry: Arc, - snippet_registry: Arc, - language_registry: Arc, - context_server_factory_registry: Model, - executor: BackgroundExecutor, -} - -impl ConcreteExtensionRegistrationHooks { - pub fn new( - theme_registry: Arc, - slash_command_registry: Arc, - indexed_docs_registry: Arc, - snippet_registry: Arc, - language_registry: Arc, - context_server_factory_registry: Model, - cx: &AppContext, - ) -> Arc { - Arc::new(Self { - theme_registry, - slash_command_registry, - indexed_docs_registry, - snippet_registry, - language_registry, - context_server_factory_registry, - executor: cx.background_executor().clone(), - }) - } -} - -impl extension_host::ExtensionRegistrationHooks for ConcreteExtensionRegistrationHooks { - fn remove_user_themes(&self, themes: Vec) { - self.theme_registry.remove_user_themes(&themes); - } - - fn load_user_theme(&self, theme_path: PathBuf, fs: Arc) -> Task> { - let theme_registry = self.theme_registry.clone(); - self.executor - .spawn(async move { theme_registry.load_user_theme(&theme_path, fs).await }) - } - - fn register_slash_command( - &self, - extension: Arc, - command: extension::SlashCommand, - ) { - self.slash_command_registry - .register_command(ExtensionSlashCommand::new(extension, command), false) - } - - fn register_context_server( - &self, - id: Arc, - extension: wasm_host::WasmExtension, - cx: &mut AppContext, - ) { - self.context_server_factory_registry - .update(cx, |registry, _| { - registry.register_server_factory( - id.clone(), - Arc::new({ - move |project, cx| { - log::info!( - "loading command for context server {id} from extension {}", - extension.manifest.id - ); - - let id = id.clone(); - let extension = extension.clone(); - cx.spawn(|mut cx| async move { - let extension_project = - project.update(&mut cx, |project, cx| ExtensionProject { - worktree_ids: project - .visible_worktrees(cx) - .map(|worktree| worktree.read(cx).id().to_proto()) - .collect(), - })?; - - let command = extension - .call({ - let id = id.clone(); - |extension, store| { - async move { - let project = store - .data_mut() - .table() - .push(extension_project)?; - let command = extension - .call_context_server_command( - store, - id.clone(), - project, - ) - .await? - .map_err(|e| anyhow!("{}", e))?; - anyhow::Ok(command) - } - .boxed() - } - }) - .await?; - - log::info!("loaded command for context server {id}: {command:?}"); - - Ok(ServerCommand { - path: command.command, - args: command.args, - env: Some(command.env.into_iter().collect()), - }) - }) - } - }), - ) - }); - } - - fn register_docs_provider(&self, extension: Arc, provider_id: Arc) { - self.indexed_docs_registry - .register_provider(Box::new(ExtensionIndexedDocsProvider::new( - extension, - ProviderId(provider_id), - ))); - } - - fn register_snippets(&self, path: &PathBuf, snippet_contents: &str) -> Result<()> { - self.snippet_registry - .register_snippets(path, snippet_contents) - } - - fn update_lsp_status( - &self, - server_name: lsp::LanguageServerName, - status: LanguageServerBinaryStatus, - ) { - self.language_registry - .update_lsp_status(server_name, status); - } - - fn register_lsp_adapter( - &self, - language_name: language::LanguageName, - adapter: ExtensionLspAdapter, - ) { - self.language_registry - .register_lsp_adapter(language_name, Arc::new(adapter)); - } - - fn remove_lsp_adapter( - &self, - language_name: &language::LanguageName, - server_name: &lsp::LanguageServerName, - ) { - self.language_registry - .remove_lsp_adapter(language_name, server_name); - } - - fn remove_languages( - &self, - languages_to_remove: &[language::LanguageName], - grammars_to_remove: &[Arc], - ) { - self.language_registry - .remove_languages(&languages_to_remove, &grammars_to_remove); - } - - fn register_wasm_grammars(&self, grammars: Vec<(Arc, PathBuf)>) { - self.language_registry.register_wasm_grammars(grammars) - } - - fn register_language( - &self, - language: language::LanguageName, - grammar: Option>, - matcher: language::LanguageMatcher, - load: Arc Result + 'static + Send + Sync>, - ) { - self.language_registry - .register_language(language, grammar, matcher, load) - } - - fn reload_current_theme(&self, cx: &mut AppContext) { - ThemeSettings::reload_current_theme(cx) - } - - fn list_theme_names(&self, path: PathBuf, fs: Arc) -> Task>> { - self.executor.spawn(async move { - let themes = theme::read_user_theme(&path, fs).await?; - Ok(themes.themes.into_iter().map(|theme| theme.name).collect()) - }) - } -} diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index dd1aae9cd6..2d4f865819 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -19,6 +19,7 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[ ("clojure", &["bb", "clj", "cljc", "cljs", "edn"]), ("neocmake", &["CMakeLists.txt", "cmake"]), ("csharp", &["cs"]), + ("cython", &["pyx", "pxd", "pxi"]), ("dart", &["dart"]), ("dockerfile", &["Dockerfile"]), ("elisp", &["el"]), @@ -174,7 +175,7 @@ pub(crate) fn suggest(buffer: Model, cx: &mut ViewContext) { "Do you want to install the recommended '{}' extension for '{}' files?", extension_id, file_name_or_extension )) - .with_click_message("Yes") + .with_click_message("Yes, install extension") .on_click({ let extension_id = extension_id.clone(); move |cx| { @@ -185,7 +186,7 @@ pub(crate) fn suggest(buffer: Model, cx: &mut ViewContext) { }); } }) - .with_secondary_click_message("No") + .with_secondary_click_message("No, don't install it") .on_secondary_click(move |cx| { let key = language_extension_key(&extension_id); db::write_and_log(cx, move || { diff --git a/crates/extensions_ui/src/extension_version_selector.rs b/crates/extensions_ui/src/extension_version_selector.rs index 1041e9524f..bb7ee179a4 100644 --- a/crates/extensions_ui/src/extension_version_selector.rs +++ b/crates/extensions_ui/src/extension_version_selector.rs @@ -113,13 +113,7 @@ impl PickerDelegate for ExtensionVersionSelectorDelegate { .iter() .enumerate() .map(|(id, extension)| { - let text = format!("v{}", extension.manifest.version); - - StringMatchCandidate { - id, - char_bag: text.as_str().into(), - string: text, - } + StringMatchCandidate::new(id, &format!("v{}", extension.manifest.version)) }) .collect::>(); @@ -210,7 +204,7 @@ impl PickerDelegate for ExtensionVersionSelectorDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .disabled(disabled) .child( HighlightedLabel::new( diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index c2ef9cf9e6..0ed79c250b 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -1,25 +1,21 @@ mod components; -mod extension_registration_hooks; mod extension_suggest; mod extension_version_selector; -pub use extension_registration_hooks::ConcreteExtensionRegistrationHooks; - use std::ops::DerefMut; use std::sync::OnceLock; use std::time::Duration; use std::{ops::Range, sync::Arc}; -use client::telemetry::Telemetry; use client::ExtensionMetadata; use collections::{BTreeMap, BTreeSet}; use editor::{Editor, EditorElement, EditorStyle}; use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore}; use fuzzy::{match_strings, StringMatchCandidate}; use gpui::{ - actions, uniform_list, AppContext, EventEmitter, Flatten, FocusableView, InteractiveElement, - KeyContext, ParentElement, Render, Styled, Task, TextStyle, UniformListScrollHandle, View, - ViewContext, VisualContext, WeakView, WindowContext, + actions, uniform_list, Action, AppContext, ClipboardItem, EventEmitter, Flatten, FocusableView, + InteractiveElement, KeyContext, ParentElement, Render, Styled, Task, TextStyle, + UniformListScrollHandle, View, ViewContext, VisualContext, WeakView, WindowContext, }; use num_format::{Locale, ToFormattedString}; use project::DirectoryLister; @@ -27,7 +23,7 @@ use release_channel::ReleaseChannel; use settings::Settings; use theme::ThemeSettings; use ui::{prelude::*, CheckboxWithLabel, ContextMenu, PopoverMenu, ToggleButton, Tooltip}; -use vim::VimModeSetting; +use vim_mode_setting::VimModeSetting; use workspace::{ item::{Item, ItemEvent}, Workspace, WorkspaceId, @@ -38,12 +34,12 @@ use crate::extension_version_selector::{ ExtensionVersionSelector, ExtensionVersionSelectorDelegate, }; -actions!(zed, [Extensions, InstallDevExtension]); +actions!(zed, [InstallDevExtension]); pub fn init(cx: &mut AppContext) { cx.observe_new_views(move |workspace: &mut Workspace, cx| { workspace - .register_action(move |workspace, _: &Extensions, cx| { + .register_action(move |workspace, _: &zed_actions::Extensions, cx| { let existing = workspace .active_pane() .read(cx) @@ -185,7 +181,6 @@ fn keywords_by_feature() -> &'static BTreeMap> { pub struct ExtensionsPage { workspace: WeakView, list: UniformListScrollHandle, - telemetry: Arc, is_fetching_extensions: bool, filter: ExtensionFilter, remote_extension_entries: Vec, @@ -224,7 +219,6 @@ impl ExtensionsPage { let mut this = Self { workspace: workspace.weak_handle(), list: UniformListScrollHandle::new(), - telemetry: workspace.client().telemetry().clone(), is_fetching_extensions: false, filter: ExtensionFilter::All, dev_extension_entries: Vec::new(), @@ -254,14 +248,13 @@ impl ExtensionsPage { .collect::>(); if !themes.is_empty() { workspace - .update(cx, |workspace, cx| { - theme_selector::toggle( - workspace, - &theme_selector::Toggle { + .update(cx, |_workspace, cx| { + cx.dispatch_action( + zed_actions::theme_selector::Toggle { themes_filter: Some(themes), - }, - cx, - ) + } + .boxed_clone(), + ); }) .ok(); } @@ -332,11 +325,7 @@ impl ExtensionsPage { let match_candidates = dev_extensions .iter() .enumerate() - .map(|(ix, manifest)| StringMatchCandidate { - id: ix, - string: manifest.name.clone(), - char_bag: manifest.name.as_str().into(), - }) + .map(|(ix, manifest)| StringMatchCandidate::new(ix, &manifest.name)) .collect::>(); let matches = match_strings( @@ -457,18 +446,17 @@ impl ExtensionsPage { .gap_2() .justify_between() .child( - div().overflow_x_hidden().text_ellipsis().child( - Label::new(format!( - "{}: {}", - if extension.authors.len() > 1 { - "Authors" - } else { - "Author" - }, - extension.authors.join(", ") - )) - .size(LabelSize::Small), - ), + Label::new(format!( + "{}: {}", + if extension.authors.len() > 1 { + "Authors" + } else { + "Author" + }, + extension.authors.join(", ") + )) + .size(LabelSize::Small) + .text_ellipsis(), ) .child(Label::new("<>").size(LabelSize::Small)), ) @@ -477,11 +465,10 @@ impl ExtensionsPage { .gap_2() .justify_between() .children(extension.description.as_ref().map(|description| { - div().overflow_x_hidden().text_ellipsis().child( - Label::new(description.clone()) - .size(LabelSize::Small) - .color(Color::Default), - ) + Label::new(description.clone()) + .size(LabelSize::Small) + .color(Color::Default) + .text_ellipsis() })) .children(repository_url.map(|repository_url| { IconButton::new( @@ -558,18 +545,17 @@ impl ExtensionsPage { .gap_2() .justify_between() .child( - div().overflow_x_hidden().text_ellipsis().child( - Label::new(format!( - "{}: {}", - if extension.manifest.authors.len() > 1 { - "Authors" - } else { - "Author" - }, - extension.manifest.authors.join(", ") - )) - .size(LabelSize::Small), - ), + Label::new(format!( + "{}: {}", + if extension.manifest.authors.len() > 1 { + "Authors" + } else { + "Author" + }, + extension.manifest.authors.join(", ") + )) + .size(LabelSize::Small) + .text_ellipsis(), ) .child( Label::new(format!( @@ -584,11 +570,10 @@ impl ExtensionsPage { .gap_2() .justify_between() .children(extension.manifest.description.as_ref().map(|description| { - div().overflow_x_hidden().text_ellipsis().child( - Label::new(description.clone()) - .size(LabelSize::Small) - .color(Color::Default), - ) + Label::new(description.clone()) + .size(LabelSize::Small) + .color(Color::Default) + .text_ellipsis() })) .child( h_flex() @@ -641,13 +626,21 @@ impl ExtensionsPage { cx: &mut WindowContext, ) -> View { let context_menu = ContextMenu::build(cx, |context_menu, cx| { - context_menu.entry( - "Install Another Version...", - None, - cx.handler_for(this, move |this, cx| { - this.show_extension_version_list(extension_id.clone(), cx) - }), - ) + context_menu + .entry( + "Install Another Version...", + None, + cx.handler_for(this, { + let extension_id = extension_id.clone(); + move |this, cx| this.show_extension_version_list(extension_id.clone(), cx) + }), + ) + .entry("Copy Extension ID", None, { + let extension_id = extension_id.clone(); + move |cx| { + cx.write_to_clipboard(ClipboardItem::new_string(extension_id.to_string())); + } + }) }); context_menu @@ -708,18 +701,15 @@ impl ExtensionsPage { match status.clone() { ExtensionStatus::NotInstalled => ( - Button::new(SharedString::from(extension.id.clone()), "Install").on_click( - cx.listener({ - let extension_id = extension.id.clone(); - move |this, _, cx| { - this.telemetry - .report_app_event("extensions: install extension".to_string()); - ExtensionStore::global(cx).update(cx, |store, cx| { - store.install_latest_extension(extension_id.clone(), cx) - }); - } - }), - ), + Button::new(SharedString::from(extension.id.clone()), "Install").on_click({ + let extension_id = extension.id.clone(); + move |_, cx| { + telemetry::event!("Extension Installed"); + ExtensionStore::global(cx).update(cx, |store, cx| { + store.install_latest_extension(extension_id.clone(), cx) + }); + } + }), None, ), ExtensionStatus::Installing => ( @@ -733,18 +723,15 @@ impl ExtensionsPage { ), ), ExtensionStatus::Installed(installed_version) => ( - Button::new(SharedString::from(extension.id.clone()), "Uninstall").on_click( - cx.listener({ - let extension_id = extension.id.clone(); - move |this, _, cx| { - this.telemetry - .report_app_event("extensions: uninstall extension".to_string()); - ExtensionStore::global(cx).update(cx, |store, cx| { - store.uninstall_extension(extension_id.clone(), cx) - }); - } - }), - ), + Button::new(SharedString::from(extension.id.clone()), "Uninstall").on_click({ + let extension_id = extension.id.clone(); + move |_, cx| { + telemetry::event!("Extension Uninstalled", extension_id); + ExtensionStore::global(cx).update(cx, |store, cx| { + store.uninstall_extension(extension_id.clone(), cx) + }); + } + }), if installed_version == extension.manifest.version { None } else { @@ -764,13 +751,11 @@ impl ExtensionsPage { }) }) .disabled(!is_compatible) - .on_click(cx.listener({ + .on_click({ let extension_id = extension.id.clone(); let version = extension.manifest.version.clone(); - move |this, _, cx| { - this.telemetry.report_app_event( - "extensions: install extension".to_string(), - ); + move |_, cx| { + telemetry::event!("Extension Installed", extension_id, version); ExtensionStore::global(cx).update(cx, |store, cx| { store .upgrade_extension( @@ -781,7 +766,7 @@ impl ExtensionsPage { .detach_and_log_err(cx) }); } - })), + }), ) }, ), @@ -858,7 +843,7 @@ impl ExtensionsPage { } } - fn fetch_extensions_debounced(&mut self, cx: &mut ViewContext<'_, ExtensionsPage>) { + fn fetch_extensions_debounced(&mut self, cx: &mut ViewContext) { self.extension_fetch_task = Some(cx.spawn(|this, mut cx| async move { let search = this .update(&mut cx, |this, cx| this.search_query(cx)) @@ -929,7 +914,7 @@ impl ExtensionsPage { fn update_settings( &mut self, - selection: &Selection, + selection: &ToggleState, cx: &mut ViewContext, callback: impl 'static + Send + Fn(&mut T::FileContent, bool), ) { @@ -938,8 +923,8 @@ impl ExtensionsPage { let selection = *selection; settings::update_settings_file::(fs, cx, move |settings, _| { let value = match selection { - Selection::Unselected => false, - Selection::Selected => true, + ToggleState::Unselected => false, + ToggleState::Selected => true, _ => return, }; @@ -976,31 +961,27 @@ impl ExtensionsPage { let upsells_count = self.upsells.len(); v_flex().children(self.upsells.iter().enumerate().map(|(ix, feature)| { - let telemetry = self.telemetry.clone(); let upsell = match feature { Feature::Git => FeatureUpsell::new( - telemetry, "Zed comes with basic Git support. More Git features are coming in the future.", ) .docs_url("https://zed.dev/docs/git"), Feature::OpenIn => FeatureUpsell::new( - telemetry, "Zed supports linking to a source line on GitHub and others.", ) .docs_url("https://zed.dev/docs/git#git-integrations"), - Feature::Vim => FeatureUpsell::new(telemetry, "Vim support is built-in to Zed!") + Feature::Vim => FeatureUpsell::new("Vim support is built-in to Zed!") .docs_url("https://zed.dev/docs/vim") .child(CheckboxWithLabel::new( "enable-vim", Label::new("Enable vim mode"), if VimModeSetting::get_global(cx).0 { - ui::Selection::Selected + ui::ToggleState::Selected } else { - ui::Selection::Unselected + ui::ToggleState::Unselected }, cx.listener(move |this, selection, cx| { - this.telemetry - .report_app_event("feature upsell: toggle vim".to_string()); + telemetry::event!("Vim Mode Toggled", source = "Feature Upsell"); this.update_settings::( selection, cx, @@ -1008,36 +989,22 @@ impl ExtensionsPage { ); }), )), - Feature::LanguageBash => { - FeatureUpsell::new(telemetry, "Shell support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/bash") - } - Feature::LanguageC => { - FeatureUpsell::new(telemetry, "C support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/c") - } - Feature::LanguageCpp => { - FeatureUpsell::new(telemetry, "C++ support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/cpp") - } - Feature::LanguageGo => { - FeatureUpsell::new(telemetry, "Go support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/go") - } - Feature::LanguagePython => { - FeatureUpsell::new(telemetry, "Python support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/python") - } - Feature::LanguageReact => { - FeatureUpsell::new(telemetry, "React support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/typescript") - } - Feature::LanguageRust => { - FeatureUpsell::new(telemetry, "Rust support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/rust") - } + Feature::LanguageBash => FeatureUpsell::new("Shell support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/bash"), + Feature::LanguageC => FeatureUpsell::new("C support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/c"), + Feature::LanguageCpp => FeatureUpsell::new("C++ support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/cpp"), + Feature::LanguageGo => FeatureUpsell::new("Go support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/go"), + Feature::LanguagePython => FeatureUpsell::new("Python support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/python"), + Feature::LanguageReact => FeatureUpsell::new("React support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/typescript"), + Feature::LanguageRust => FeatureUpsell::new("Rust support is built-in to Zed!") + .docs_url("https://zed.dev/docs/languages/rust"), Feature::LanguageTypescript => { - FeatureUpsell::new(telemetry, "Typescript support is built-in to Zed!") + FeatureUpsell::new("Typescript support is built-in to Zed!") .docs_url("https://zed.dev/docs/languages/typescript") } }; @@ -1086,7 +1053,7 @@ impl Render for ExtensionsPage { ToggleButton::new("filter-all", "All") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(self.filter == ExtensionFilter::All) + .toggle_state(self.filter == ExtensionFilter::All) .on_click(cx.listener(|this, _event, cx| { this.filter = ExtensionFilter::All; this.filter_extension_entries(cx); @@ -1100,7 +1067,7 @@ impl Render for ExtensionsPage { ToggleButton::new("filter-installed", "Installed") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(self.filter == ExtensionFilter::Installed) + .toggle_state(self.filter == ExtensionFilter::Installed) .on_click(cx.listener(|this, _event, cx| { this.filter = ExtensionFilter::Installed; this.filter_extension_entries(cx); @@ -1114,7 +1081,9 @@ impl Render for ExtensionsPage { ToggleButton::new("filter-not-installed", "Not Installed") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(self.filter == ExtensionFilter::NotInstalled) + .toggle_state( + self.filter == ExtensionFilter::NotInstalled, + ) .on_click(cx.listener(|this, _event, cx| { this.filter = ExtensionFilter::NotInstalled; this.filter_extension_entries(cx); diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index 286acdfc98..b32e56aa5e 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -39,6 +39,36 @@ pub trait FeatureFlag { } } +pub struct Assistant2FeatureFlag; + +impl FeatureFlag for Assistant2FeatureFlag { + const NAME: &'static str = "assistant2"; + + fn enabled_for_staff() -> bool { + false + } +} + +pub struct ToolUseFeatureFlag; + +impl FeatureFlag for ToolUseFeatureFlag { + const NAME: &'static str = "assistant-tool-use"; + + fn enabled_for_staff() -> bool { + false + } +} + +pub struct ZetaFeatureFlag; +impl FeatureFlag for ZetaFeatureFlag { + const NAME: &'static str = "zeta"; +} + +pub struct GitUiFeatureFlag; +impl FeatureFlag for GitUiFeatureFlag { + const NAME: &'static str = "git-ui"; +} + pub struct Remoting {} impl FeatureFlag for Remoting { const NAME: &'static str = "remoting"; diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 0447858ca5..605b572c6c 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -22,8 +22,8 @@ db.workspace = true editor.workspace = true futures.workspace = true gpui.workspace = true -human_bytes = "0.4.1" http_client.workspace = true +human_bytes = "0.4.1" language.workspace = true log.workspace = true menu.workspace = true @@ -39,6 +39,7 @@ ui.workspace = true urlencoding = "2.1.2" util.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/feedback/src/feedback.rs b/crates/feedback/src/feedback.rs index 671dea8689..eaa87b8501 100644 --- a/crates/feedback/src/feedback.rs +++ b/crates/feedback/src/feedback.rs @@ -5,8 +5,6 @@ use workspace::Workspace; pub mod feedback_modal; -actions!(feedback, [GiveFeedback, SubmitFeedback]); - mod system_specs; actions!( @@ -23,13 +21,26 @@ const fn zed_repo_url() -> &'static str { "https://github.com/zed-industries/zed" } -const fn request_feature_url() -> &'static str { - "https://github.com/zed-industries/zed/issues/new?assignees=&labels=admin+read%2Ctriage%2Cenhancement&projects=&template=0_feature_request.yml" +fn request_feature_url(specs: &SystemSpecs) -> String { + format!( + concat!( + "https://github.com/zed-industries/zed/issues/new", + "?labels=admin+read%2Ctriage%2Cenhancement", + "&template=0_feature_request.yml", + "&environment={}" + ), + urlencoding::encode(&specs.to_string()) + ) } fn file_bug_report_url(specs: &SystemSpecs) -> String { format!( - "https://github.com/zed-industries/zed/issues/new?assignees=&labels=admin+read%2Ctriage%2Cbug&projects=&template=1_bug_report.yml&environment={}", + concat!( + "https://github.com/zed-industries/zed/issues/new", + "?labels=admin+read%2Ctriage%2Cbug", + "&template=1_bug_report.yml", + "&environment={}" + ), urlencoding::encode(&specs.to_string()) ) } @@ -59,7 +70,15 @@ pub fn init(cx: &mut AppContext) { .detach(); }) .register_action(|_, _: &RequestFeature, cx| { - cx.open_url(request_feature_url()); + let specs = SystemSpecs::new(cx); + cx.spawn(|_, mut cx| async move { + let specs = specs.await; + cx.update(|cx| { + cx.open_url(&request_feature_url(&specs)); + }) + .log_err(); + }) + .detach(); }) .register_action(move |_, _: &FileBugReport, cx| { let specs = SystemSpecs::new(cx); diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index 5270492aee..15b3bc5789 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -1,4 +1,8 @@ -use std::{ops::RangeInclusive, sync::Arc, time::Duration}; +use std::{ + ops::RangeInclusive, + sync::{Arc, LazyLock}, + time::Duration, +}; use anyhow::{anyhow, bail}; use bitflags::bitflags; @@ -18,8 +22,9 @@ use serde_derive::Serialize; use ui::{prelude::*, Button, ButtonStyle, IconPosition, Tooltip}; use util::ResultExt; use workspace::{DismissDecision, ModalView, Workspace}; +use zed_actions::feedback::GiveFeedback; -use crate::{system_specs::SystemSpecs, GiveFeedback, OpenZedRepo}; +use crate::{system_specs::SystemSpecs, OpenZedRepo}; // For UI testing purposes const SEND_SUCCESS_IN_DEV_MODE: bool = true; @@ -33,7 +38,8 @@ const DEV_MODE: bool = true; const DEV_MODE: bool = false; const DATABASE_KEY_NAME: &str = "email_address"; -const EMAIL_REGEX: &str = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"; +static EMAIL_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b").unwrap()); const FEEDBACK_CHAR_LIMIT: RangeInclusive = 10..=5000; const FEEDBACK_SUBMISSION_ERROR_TEXT: &str = "Feedback failed to submit, see error log for details."; @@ -319,7 +325,7 @@ impl FeedbackModal { let mut invalid_state_flags = InvalidStateFlags::empty(); let valid_email_address = match self.email_address_editor.read(cx).text_option(cx) { - Some(email_address) => Regex::new(EMAIL_REGEX).unwrap().is_match(&email_address), + Some(email_address) => EMAIL_REGEX.is_match(&email_address), None => true, }; diff --git a/crates/feedback/src/system_specs.rs b/crates/feedback/src/system_specs.rs index a570180187..c367fe609f 100644 --- a/crates/feedback/src/system_specs.rs +++ b/crates/feedback/src/system_specs.rs @@ -1,11 +1,10 @@ use client::telemetry; -use gpui::Task; +use gpui::{Task, WindowContext}; use human_bytes::human_bytes; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use serde::Serialize; use std::{env, fmt::Display}; use sysinfo::{MemoryRefreshKind, RefreshKind, System}; -use ui::WindowContext; #[derive(Clone, Debug, Serialize)] pub struct SystemSpecs { diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index f9c058de23..c5c620b4f4 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod file_finder_tests; -mod file_finder_settings; +pub mod file_finder_settings; mod new_path_prompt; mod open_path_prompt; @@ -10,7 +10,7 @@ pub use open_path_prompt::OpenPathDelegate; use collections::HashMap; use editor::{scroll::Autoscroll, Bias, Editor}; -use file_finder_settings::FileFinderSettings; +use file_finder_settings::{FileFinderSettings, FileFinderWidth}; use file_icons::FileIcons; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ @@ -42,7 +42,7 @@ use workspace::{ Workspace, }; -actions!(file_finder, [SelectPrev, OpenMenu]); +actions!(file_finder, [SelectPrev, ToggleMenu]); impl ModalView for FileFinder { fn on_before_dismiss(&mut self, cx: &mut ViewContext) -> workspace::DismissDecision { @@ -189,10 +189,12 @@ impl FileFinder { cx.dispatch_action(Box::new(menu::SelectPrev)); } - fn handle_open_menu(&mut self, _: &OpenMenu, cx: &mut ViewContext) { + fn handle_toggle_menu(&mut self, _: &ToggleMenu, cx: &mut ViewContext) { self.picker.update(cx, |picker, cx| { let menu_handle = &picker.delegate.popover_menu_handle; - if !menu_handle.is_deployed() { + if menu_handle.is_deployed() { + menu_handle.hide(cx); + } else { menu_handle.show(cx); } }); @@ -244,6 +246,22 @@ impl FileFinder { } }) } + + pub fn modal_max_width( + width_setting: Option, + cx: &mut ViewContext, + ) -> Pixels { + let window_width = cx.viewport_size().width; + let small_width = Pixels(545.); + + match width_setting { + None | Some(FileFinderWidth::Small) => small_width, + Some(FileFinderWidth::Full) => window_width, + Some(FileFinderWidth::XLarge) => (window_width - Pixels(512.)).max(small_width), + Some(FileFinderWidth::Large) => (window_width - Pixels(768.)).max(small_width), + Some(FileFinderWidth::Medium) => (window_width - Pixels(1024.)).max(small_width), + } + } } impl EventEmitter for FileFinder {} @@ -258,16 +276,15 @@ impl Render for FileFinder { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let key_context = self.picker.read(cx).delegate.key_context(cx); - let window_max_width: Pixels = cx.viewport_size().width; - let modal_choice = FileFinderSettings::get_global(cx).modal_width; - let width = modal_choice.calc_width(window_max_width); + let file_finder_settings = FileFinderSettings::get_global(cx); + let modal_max_width = Self::modal_max_width(file_finder_settings.modal_max_width, cx); v_flex() .key_context(key_context) - .w(width) + .w(modal_max_width) .on_modifiers_changed(cx.listener(Self::handle_modifiers_changed)) .on_action(cx.listener(Self::handle_select_prev)) - .on_action(cx.listener(Self::handle_open_menu)) + .on_action(cx.listener(Self::handle_toggle_menu)) .on_action(cx.listener(Self::go_to_file_split_left)) .on_action(cx.listener(Self::go_to_file_split_right)) .on_action(cx.listener(Self::go_to_file_split_up)) @@ -631,7 +648,7 @@ impl FileFinderDelegate { cx.subscribe(project, |file_finder, _, event, cx| { match event { project::Event::WorktreeUpdatedEntries(_, _) - | project::Event::WorktreeAdded + | project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => file_finder .picker .update(cx, |picker, cx| picker.refresh(cx)), @@ -867,7 +884,7 @@ impl FileFinderDelegate { fn lookup_absolute_path( &self, query: FileSearchQuery, - cx: &mut ViewContext<'_, Picker>, + cx: &mut ViewContext>, ) -> Task<()> { cx.spawn(|picker, mut cx| async move { let Some(project) = picker @@ -1211,7 +1228,7 @@ impl PickerDelegate for FileFinderDelegate { .start_slot::(file_icon) .end_slot::(history_icon) .inset(true) - .selected(selected) + .toggle_state(selected) .child( h_flex() .gap_2() @@ -1227,6 +1244,7 @@ impl PickerDelegate for FileFinderDelegate { } fn render_footer(&self, cx: &mut ViewContext>) -> Option { + let context = self.focus_handle.clone(); Some( h_flex() .w_full() @@ -1243,24 +1261,24 @@ impl PickerDelegate for FileFinderDelegate { .child( PopoverMenu::new("menu-popover") .with_handle(self.popover_menu_handle.clone()) - .attach(gpui::AnchorCorner::TopRight) - .anchor(gpui::AnchorCorner::BottomRight) + .attach(gpui::Corner::TopRight) + .anchor(gpui::Corner::BottomRight) .trigger( Button::new("actions-trigger", "Split Options") .selected_label_color(Color::Accent) - .key_binding(KeyBinding::for_action_in( - &OpenMenu, - &self.focus_handle, - cx, - )), + .key_binding(KeyBinding::for_action_in(&ToggleMenu, &context, cx)), ) .menu({ move |cx| { - Some(ContextMenu::build(cx, move |menu, _| { - menu.action("Split Left", pane::SplitLeft.boxed_clone()) - .action("Split Right", pane::SplitRight.boxed_clone()) - .action("Split Up", pane::SplitUp.boxed_clone()) - .action("Split Down", pane::SplitDown.boxed_clone()) + Some(ContextMenu::build(cx, { + let context = context.clone(); + move |menu, _| { + menu.context(context) + .action("Split Left", pane::SplitLeft.boxed_clone()) + .action("Split Right", pane::SplitRight.boxed_clone()) + .action("Split Up", pane::SplitUp.boxed_clone()) + .action("Split Down", pane::SplitDown.boxed_clone()) + } })) } }), diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs index 4379c8f543..0512021d87 100644 --- a/crates/file_finder/src/file_finder_settings.rs +++ b/crates/file_finder/src/file_finder_settings.rs @@ -2,13 +2,11 @@ use anyhow::Result; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -use std::cmp; -use ui::Pixels; #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct FileFinderSettings { pub file_icons: bool, - pub modal_width: FileFinderWidth, + pub modal_max_width: Option, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] @@ -17,10 +15,10 @@ pub struct FileFinderSettingsContent { /// /// Default: true pub file_icons: Option, - /// The width of the file finder modal. + /// Determines how much space the file finder can take up in relation to the available window width. /// - /// Default: "medium" - pub modal_width: Option, + /// Default: small + pub modal_max_width: Option, } impl Settings for FileFinderSettings { @@ -36,40 +34,10 @@ impl Settings for FileFinderSettings { #[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "lowercase")] pub enum FileFinderWidth { - Small, #[default] + Small, Medium, Large, XLarge, Full, } - -impl FileFinderWidth { - const MIN_MODAL_WIDTH_PX: f32 = 384.; - - pub fn padding_px(&self) -> Pixels { - let padding_val = match self { - FileFinderWidth::Small => 1280., - FileFinderWidth::Medium => 1024., - FileFinderWidth::Large => 768., - FileFinderWidth::XLarge => 512., - FileFinderWidth::Full => 0., - }; - - Pixels(padding_val) - } - - pub fn calc_width(&self, window_width: Pixels) -> Pixels { - if self == &FileFinderWidth::Full { - return window_width; - } - - let min_modal_width_px = Pixels(FileFinderWidth::MIN_MODAL_WIDTH_PX); - - let padding_px = self.padding_px(); - let width_val = window_width - padding_px; - let finder_width = cmp::max(min_modal_width_px, width_val); - - finder_width - } -} diff --git a/crates/file_finder/src/new_path_prompt.rs b/crates/file_finder/src/new_path_prompt.rs index d4492857b4..7c36b67c36 100644 --- a/crates/file_finder/src/new_path_prompt.rs +++ b/crates/file_finder/src/new_path_prompt.rs @@ -71,8 +71,16 @@ impl Match { fn project_path(&self, project: &Project, cx: &WindowContext) -> Option { let worktree_id = if let Some(path_match) = &self.path_match { WorktreeId::from_usize(path_match.worktree_id) + } else if let Some(worktree) = project.visible_worktrees(cx).find(|worktree| { + worktree + .read(cx) + .root_entry() + .is_some_and(|entry| entry.is_dir()) + }) { + worktree.read(cx).id() } else { - project.worktrees(cx).next()?.read(cx).id() + // todo(): we should find_or_create a workspace. + return None; }; let path = PathBuf::from(self.relative_path()); @@ -406,7 +414,7 @@ impl PickerDelegate for NewPathDelegate { ListItem::new(ix) .spacing(ListItemSpacing::Sparse) .inset(true) - .selected(selected) + .toggle_state(selected) .child(LabelLike::new().child(m.styled_text(self.project.read(cx), cx))), ) } diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index be1e91b482..cd4b69f613 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -131,7 +131,7 @@ impl PickerDelegate for OpenPathDelegate { .iter() .enumerate() .map(|(ix, path)| { - StringMatchCandidate::new(ix, path.to_string_lossy().into()) + StringMatchCandidate::new(ix, &path.to_string_lossy()) }) .collect::>(); @@ -283,7 +283,7 @@ impl PickerDelegate for OpenPathDelegate { ListItem::new(ix) .spacing(ListItemSpacing::Sparse) .inset(true) - .selected(selected) + .toggle_state(selected) .child(LabelLike::new().child(candidate.string.clone())), ) } diff --git a/crates/file_icons/src/file_icons.rs b/crates/file_icons/src/file_icons.rs index 056a39a658..a0b39fb763 100644 --- a/crates/file_icons/src/file_icons.rs +++ b/crates/file_icons/src/file_icons.rs @@ -51,8 +51,8 @@ impl FileIcons { pub fn get_icon(path: &Path, cx: &AppContext) -> Option { let this = cx.try_global::()?; - // FIXME: Associate a type with the languages and have the file's language - // override these associations + // TODO: Associate a type with the languages and have the file's language + // override these associations maybe!({ let suffix = path.icon_stem_or_suffix()?; diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index a9dbb751b6..f31f378ea2 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -21,9 +21,11 @@ git.workspace = true git2.workspace = true gpui.workspace = true libc.workspace = true +log.workspace = true parking_lot.workspace = true paths.workspace = true rope.workspace = true +proto.workspace = true serde.workspace = true serde_json.workspace = true smol.workspace = true @@ -45,9 +47,12 @@ windows.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] ashpd.workspace = true +which.workspace = true +shlex.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } [features] test-support = ["gpui/test-support", "git/test-support"] + diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 268a9d3f32..a88114ed4b 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -1,8 +1,8 @@ #[cfg(target_os = "macos")] mod mac_watcher; -#[cfg(any(target_os = "linux", target_os = "freebsd"))] -pub mod linux_watcher; +#[cfg(not(target_os = "macos"))] +pub mod fs_watcher; use anyhow::{anyhow, Result}; use git::GitHostingProviderRegistry; @@ -10,7 +10,8 @@ use git::GitHostingProviderRegistry; #[cfg(any(target_os = "linux", target_os = "freebsd"))] use ashpd::desktop::trash; #[cfg(any(target_os = "linux", target_os = "freebsd"))] -use std::fs::File; +use smol::process::Command; + #[cfg(unix)] use std::os::fd::AsFd; #[cfg(unix)] @@ -27,13 +28,14 @@ use futures::{future::BoxFuture, AsyncRead, Stream, StreamExt}; use git::repository::{GitRepository, RealGitRepository}; use gpui::{AppContext, Global, ReadGlobal}; use rope::Rope; +use serde::{Deserialize, Serialize}; use smol::io::AsyncWriteExt; use std::{ io::{self, Write}, path::{Component, Path, PathBuf}, pin::Pin, sync::Arc, - time::{Duration, SystemTime}, + time::{Duration, SystemTime, UNIX_EPOCH}, }; use tempfile::{NamedTempFile, TempDir}; use text::LineEnding; @@ -131,7 +133,7 @@ pub trait Fs: Send + Sync { async fn is_case_sensitive(&self) -> Result; #[cfg(any(test, feature = "test-support"))] - fn as_fake(&self) -> &FakeFs { + fn as_fake(&self) -> Arc { panic!("called as_fake on a real fs"); } } @@ -179,13 +181,62 @@ pub struct RemoveOptions { #[derive(Copy, Clone, Debug)] pub struct Metadata { pub inode: u64, - pub mtime: SystemTime, + pub mtime: MTime, pub is_symlink: bool, pub is_dir: bool, pub len: u64, pub is_fifo: bool, } +/// Filesystem modification time. The purpose of this newtype is to discourage use of operations +/// that do not make sense for mtimes. In particular, it is not always valid to compare mtimes using +/// `<` or `>`, as there are many things that can cause the mtime of a file to be earlier than it +/// was. See ["mtime comparison considered harmful" - apenwarr](https://apenwarr.ca/log/20181113). +/// +/// Do not derive Ord, PartialOrd, or arithmetic operation traits. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] +#[serde(transparent)] +pub struct MTime(SystemTime); + +impl MTime { + /// Conversion intended for persistence and testing. + pub fn from_seconds_and_nanos(secs: u64, nanos: u32) -> Self { + MTime(UNIX_EPOCH + Duration::new(secs, nanos)) + } + + /// Conversion intended for persistence. + pub fn to_seconds_and_nanos_for_persistence(self) -> Option<(u64, u32)> { + self.0 + .duration_since(UNIX_EPOCH) + .ok() + .map(|duration| (duration.as_secs(), duration.subsec_nanos())) + } + + /// Returns the value wrapped by this `MTime`, for presentation to the user. The name including + /// "_for_user" is to discourage misuse - this method should not be used when making decisions + /// about file dirtiness. + pub fn timestamp_for_user(self) -> SystemTime { + self.0 + } + + /// Temporary method to split out the behavior changes from introduction of this newtype. + pub fn bad_is_greater_than(self, other: MTime) -> bool { + self.0 > other.0 + } +} + +impl From for MTime { + fn from(timestamp: proto::Timestamp) -> Self { + MTime(timestamp.into()) + } +} + +impl From for proto::Timestamp { + fn from(mtime: MTime) -> Self { + mtime.0.into() + } +} + #[derive(Default)] pub struct RealFs { git_hosting_provider_registry: Arc, @@ -393,7 +444,13 @@ impl Fs for RealFs { #[cfg(any(target_os = "linux", target_os = "freebsd"))] async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { - let file = File::open(path)?; + if let Ok(Some(metadata)) = self.metadata(path).await { + if metadata.is_symlink { + // TODO: trash_file does not support trashing symlinks yet - https://github.com/bilelmoussaoui/ashpd/issues/255 + return self.remove_file(path, RemoveOptions::default()).await; + } + } + let file = smol::fs::File::open(path).await?; match trash::trash_file(&file.as_fd()).await { Ok(_) => Ok(()), Err(err) => Err(anyhow::Error::new(err)), @@ -402,18 +459,16 @@ impl Fs for RealFs { #[cfg(target_os = "windows")] async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { + use util::paths::SanitizedPath; use windows::{ core::HSTRING, Storage::{StorageDeleteOption, StorageFile}, }; // todo(windows) // When new version of `windows-rs` release, make this operation `async` - let path = path.canonicalize()?.to_string_lossy().to_string(); - let path_str = path.trim_start_matches("\\\\?\\"); - if path_str.is_empty() { - anyhow::bail!("File path is empty!"); - } - let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_str))?.get()?; + let path = SanitizedPath::from(path.canonicalize()?); + let path_string = path.to_string(); + let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_string))?.get()?; file.DeleteAsync(StorageDeleteOption::Default)?.get()?; Ok(()) } @@ -430,19 +485,17 @@ impl Fs for RealFs { #[cfg(target_os = "windows")] async fn trash_dir(&self, path: &Path, _options: RemoveOptions) -> Result<()> { + use util::paths::SanitizedPath; use windows::{ core::HSTRING, Storage::{StorageDeleteOption, StorageFolder}, }; - let path = path.canonicalize()?.to_string_lossy().to_string(); - let path_str = path.trim_start_matches("\\\\?\\"); - if path_str.is_empty() { - anyhow::bail!("Folder path is empty!"); - } // todo(windows) // When new version of `windows-rs` release, make this operation `async` - let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_str))?.get()?; + let path = SanitizedPath::from(path.canonicalize()?); + let path_string = path.to_string(); + let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_string))?.get()?; folder.DeleteAsync(StorageDeleteOption::Default)?.get()?; Ok(()) } @@ -468,24 +521,7 @@ impl Fs for RealFs { async fn atomic_write(&self, path: PathBuf, data: String) -> Result<()> { smol::unblock(move || { - let mut tmp_file = if cfg!(any(target_os = "linux", target_os = "freebsd")) { - // Use the directory of the destination as temp dir to avoid - // invalid cross-device link error, and XDG_CACHE_DIR for fallback. - // See https://github.com/zed-industries/zed/pull/8437 for more details. - NamedTempFile::new_in(path.parent().unwrap_or(paths::temp_dir())) - } else if cfg!(target_os = "windows") { - // If temp dir is set to a different drive than the destination, - // we receive error: - // - // failed to persist temporary file: - // The system cannot move the file to a different disk drive. (os error 17) - // - // So we use the directory of the destination as a temp dir to avoid it. - // https://github.com/zed-industries/zed/issues/16571 - NamedTempFile::new_in(path.parent().unwrap_or(paths::temp_dir())) - } else { - NamedTempFile::new() - }?; + let mut tmp_file = create_temp_file(&path)?; tmp_file.write_all(data.as_bytes())?; tmp_file.persist(path)?; Ok::<(), anyhow::Error>(()) @@ -500,13 +536,43 @@ impl Fs for RealFs { if let Some(path) = path.parent() { self.create_dir(path).await?; } - let file = smol::fs::File::create(path).await?; - let mut writer = smol::io::BufWriter::with_capacity(buffer_size, file); - for chunk in chunks(text, line_ending) { - writer.write_all(chunk.as_bytes()).await?; + match smol::fs::File::create(path).await { + Ok(file) => { + let mut writer = smol::io::BufWriter::with_capacity(buffer_size, file); + for chunk in chunks(text, line_ending) { + writer.write_all(chunk.as_bytes()).await?; + } + writer.flush().await?; + Ok(()) + } + Err(e) if e.kind() == std::io::ErrorKind::PermissionDenied => { + if cfg!(any(target_os = "linux", target_os = "freebsd")) { + let target_path = path.to_path_buf(); + let temp_file = smol::unblock(move || create_temp_file(&target_path)).await?; + + let temp_path = temp_file.into_temp_path(); + let temp_path_for_write = temp_path.to_path_buf(); + + let async_file = smol::fs::OpenOptions::new() + .write(true) + .open(&temp_path) + .await?; + + let mut writer = smol::io::BufWriter::with_capacity(buffer_size, async_file); + + for chunk in chunks(text, line_ending) { + writer.write_all(chunk.as_bytes()).await?; + } + writer.flush().await?; + + write_to_file_as_root(temp_path_for_write, path.to_path_buf()).await + } else { + // Todo: Implement for Mac and Windows + Err(e.into()) + } + } + Err(e) => Err(e.into()), } - writer.flush().await?; - Ok(()) } async fn canonicalize(&self, path: &Path) -> Result { @@ -558,7 +624,7 @@ impl Fs for RealFs { Ok(Some(Metadata { inode, - mtime: metadata.modified().unwrap(), + mtime: MTime(metadata.modified().unwrap()), len: metadata.len(), is_symlink, is_dir: metadata.file_type().is_dir(), @@ -634,7 +700,7 @@ impl Fs for RealFs { ) } - #[cfg(any(target_os = "linux", target_os = "freebsd"))] + #[cfg(not(target_os = "macos"))] async fn watch( &self, path: &Path, @@ -644,19 +710,32 @@ impl Fs for RealFs { Arc, ) { use parking_lot::Mutex; + use util::paths::SanitizedPath; let (tx, rx) = smol::channel::unbounded(); let pending_paths: Arc>> = Default::default(); - let watcher = Arc::new(linux_watcher::LinuxWatcher::new(tx, pending_paths.clone())); + let watcher = Arc::new(fs_watcher::FsWatcher::new(tx, pending_paths.clone())); - watcher.add(&path).ok(); // Ignore "file doesn't exist error" and rely on parent watcher. - if let Some(parent) = path.parent() { - // watch the parent dir so we can tell when settings.json is created - watcher.add(parent).log_err(); + if watcher.add(path).is_err() { + // If the path doesn't exist yet (e.g. settings.json), watch the parent dir to learn when it's created. + if let Some(parent) = path.parent() { + if let Err(e) = watcher.add(parent) { + log::warn!("Failed to watch: {e}"); + } + } } // Check if path is a symlink and follow the target parent - if let Some(target) = self.read_link(&path).await.ok() { + if let Some(mut target) = self.read_link(&path).await.ok() { + // Check if symlink target is relative path, if so make it absolute + if target.is_relative() { + if let Some(parent) = path.parent() { + target = parent.join(target); + if let Ok(canonical) = self.canonicalize(&target).await { + target = SanitizedPath::from(canonical).as_path().to_path_buf(); + } + } + } watcher.add(&target).ok(); if let Some(parent) = target.parent() { watcher.add(parent).log_err(); @@ -680,58 +759,11 @@ impl Fs for RealFs { ) } - #[cfg(target_os = "windows")] - async fn watch( - &self, - path: &Path, - _latency: Duration, - ) -> ( - Pin>>>, - Arc, - ) { - use notify::{EventKind, Watcher}; - - let (tx, rx) = smol::channel::unbounded(); - - let mut file_watcher = notify::recommended_watcher({ - let tx = tx.clone(); - move |event: Result| { - if let Some(event) = event.log_err() { - let kind = match event.kind { - EventKind::Create(_) => Some(PathEventKind::Created), - EventKind::Modify(_) => Some(PathEventKind::Changed), - EventKind::Remove(_) => Some(PathEventKind::Removed), - _ => None, - }; - - tx.try_send( - event - .paths - .into_iter() - .map(|path| PathEvent { path, kind }) - .collect::>(), - ) - .ok(); - } - } - }) - .expect("Could not start file watcher"); - - file_watcher - .watch(path, notify::RecursiveMode::Recursive) - .log_err(); - - ( - Box::pin(rx.chain(futures::stream::once(async move { - drop(file_watcher); - vec![] - }))), - Arc::new(RealWatcher {}), - ) - } - fn open_repo(&self, dotgit_path: &Path) -> Option> { - let repo = git2::Repository::open(dotgit_path).log_err()?; + // with libgit2, we can open git repo from an existing work dir + // https://libgit2.org/docs/reference/main/repository/git_repository_open.html + let workdir_root = dotgit_path.parent()?; + let repo = git2::Repository::open(workdir_root).log_err()?; Some(Arc::new(RealGitRepository::new( repo, self.git_binary_path.clone(), @@ -794,6 +826,7 @@ impl Watcher for RealWatcher { #[cfg(any(test, feature = "test-support"))] pub struct FakeFs { + this: std::sync::Weak, // Use an unfair lock to ensure tests are deterministic. state: Mutex, executor: gpui::BackgroundExecutor, @@ -818,13 +851,13 @@ struct FakeFsState { enum FakeFsEntry { File { inode: u64, - mtime: SystemTime, + mtime: MTime, len: u64, content: Vec, }, Dir { inode: u64, - mtime: SystemTime, + mtime: MTime, len: u64, entries: BTreeMap>>, git_repo_state: Option>>, @@ -836,6 +869,18 @@ enum FakeFsEntry { #[cfg(any(test, feature = "test-support"))] impl FakeFsState { + fn get_and_increment_mtime(&mut self) -> MTime { + let mtime = self.next_mtime; + self.next_mtime += FakeFs::SYSTEMTIME_INTERVAL; + MTime(mtime) + } + + fn get_and_increment_inode(&mut self) -> u64 { + let inode = self.next_inode; + self.next_inode += 1; + inode + } + fn read_path(&self, target: &Path) -> Result>> { Ok(self .try_read_path(target, true) @@ -959,23 +1004,24 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> = impl FakeFs { /// We need to use something large enough for Windows and Unix to consider this a new file. /// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior - const SYSTEMTIME_INTERVAL: u64 = 100; + const SYSTEMTIME_INTERVAL: Duration = Duration::from_nanos(100); pub fn new(executor: gpui::BackgroundExecutor) -> Arc { let (tx, mut rx) = smol::channel::bounded::(10); - let this = Arc::new(Self { + let this = Arc::new_cyclic(|this| Self { + this: this.clone(), executor: executor.clone(), state: Mutex::new(FakeFsState { root: Arc::new(Mutex::new(FakeFsEntry::Dir { inode: 0, - mtime: SystemTime::UNIX_EPOCH, + mtime: MTime(UNIX_EPOCH), len: 0, entries: Default::default(), git_repo_state: None, })), git_event_tx: tx, - next_mtime: SystemTime::UNIX_EPOCH, + next_mtime: UNIX_EPOCH + Self::SYSTEMTIME_INTERVAL, next_inode: 1, event_txs: Default::default(), buffered_events: Vec::new(), @@ -1007,13 +1053,16 @@ impl FakeFs { state.next_mtime = next_mtime; } + pub fn get_and_increment_mtime(&self) -> MTime { + let mut state = self.state.lock(); + state.get_and_increment_mtime() + } + pub async fn touch_path(&self, path: impl AsRef) { let mut state = self.state.lock(); let path = path.as_ref(); - let new_mtime = state.next_mtime; - let new_inode = state.next_inode; - state.next_inode += 1; - state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); + let new_mtime = state.get_and_increment_mtime(); + let new_inode = state.get_and_increment_inode(); state .write_path(path, move |entry| { match entry { @@ -1062,19 +1111,14 @@ impl FakeFs { fn write_file_internal(&self, path: impl AsRef, content: Vec) -> Result<()> { let mut state = self.state.lock(); - let path = path.as_ref(); - let inode = state.next_inode; - let mtime = state.next_mtime; - state.next_inode += 1; - state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let file = Arc::new(Mutex::new(FakeFsEntry::File { - inode, - mtime, + inode: state.get_and_increment_inode(), + mtime: state.get_and_increment_mtime(), len: content.len() as u64, content, })); let mut kind = None; - state.write_path(path, { + state.write_path(path.as_ref(), { let kind = &mut kind; move |entry| { match entry { @@ -1090,7 +1134,7 @@ impl FakeFs { Ok(()) } })?; - state.emit_event([(path, kind)]); + state.emit_event([(path.as_ref(), kind)]); Ok(()) } @@ -1383,16 +1427,6 @@ impl FakeFsEntry { } } - fn set_file_content(&mut self, path: &Path, new_content: Vec) -> Result<()> { - if let Self::File { content, mtime, .. } = self { - *mtime = SystemTime::now(); - *content = new_content; - Ok(()) - } else { - Err(anyhow!("not a file: {}", path.display())) - } - } - fn dir_entries( &mut self, path: &Path, @@ -1428,7 +1462,8 @@ struct FakeHandle { #[cfg(any(test, feature = "test-support"))] impl FileHandle for FakeHandle { fn current_path(&self, fs: &Arc) -> Result { - let state = fs.as_fake().state.lock(); + let fs = fs.as_fake(); + let state = fs.state.lock(); let Some(target) = state.moves.get(&self.inode) else { anyhow::bail!("fake fd not moved") }; @@ -1456,10 +1491,8 @@ impl Fs for FakeFs { } let mut state = self.state.lock(); - let inode = state.next_inode; - let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); - state.next_inode += 1; + let inode = state.get_and_increment_inode(); + let mtime = state.get_and_increment_mtime(); state.write_path(&cur_path, |entry| { entry.or_insert_with(|| { created_dirs.push((cur_path.clone(), Some(PathEventKind::Created))); @@ -1482,10 +1515,8 @@ impl Fs for FakeFs { async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> { self.simulate_random_delay().await; let mut state = self.state.lock(); - let inode = state.next_inode; - let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); - state.next_inode += 1; + let inode = state.get_and_increment_inode(); + let mtime = state.get_and_increment_mtime(); let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, @@ -1625,13 +1656,12 @@ impl Fs for FakeFs { let source = normalize_path(source); let target = normalize_path(target); let mut state = self.state.lock(); - let mtime = state.next_mtime; - let inode = util::post_inc(&mut state.next_inode); - state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); + let mtime = state.get_and_increment_mtime(); + let inode = state.get_and_increment_inode(); let source_entry = state.read_path(&source)?; let content = source_entry.lock().file_content(&source)?.clone(); let mut kind = Some(PathEventKind::Created); - let entry = state.write_path(&target, |e| match e { + state.write_path(&target, |e| match e { btree_map::Entry::Occupied(e) => { if options.overwrite { kind = Some(PathEventKind::Changed); @@ -1647,14 +1677,11 @@ impl Fs for FakeFs { inode, mtime, len: content.len() as u64, - content: Vec::new(), + content, }))) .clone(), )), })?; - if let Some(entry) = entry { - entry.lock().set_file_content(&target, content)?; - } state.emit_event([(target, kind)]); Ok(()) } @@ -1932,8 +1959,8 @@ impl Fs for FakeFs { } #[cfg(any(test, feature = "test-support"))] - fn as_fake(&self) -> &FakeFs { - self + fn as_fake(&self) -> Arc { + self.this.upgrade().unwrap() } } @@ -1952,6 +1979,84 @@ fn chunks(rope: &Rope, line_ending: LineEnding) -> impl Iterator { }) } +fn create_temp_file(path: &Path) -> Result { + let temp_file = if cfg!(any(target_os = "linux", target_os = "freebsd")) { + // Use the directory of the destination as temp dir to avoid + // invalid cross-device link error, and XDG_CACHE_DIR for fallback. + // See https://github.com/zed-industries/zed/pull/8437 for more details. + NamedTempFile::new_in(path.parent().unwrap_or(paths::temp_dir()))? + } else if cfg!(target_os = "windows") { + // If temp dir is set to a different drive than the destination, + // we receive error: + // + // failed to persist temporary file: + // The system cannot move the file to a different disk drive. (os error 17) + // + // So we use the directory of the destination as a temp dir to avoid it. + // https://github.com/zed-industries/zed/issues/16571 + NamedTempFile::new_in(path.parent().unwrap_or(paths::temp_dir()))? + } else { + NamedTempFile::new()? + }; + + Ok(temp_file) +} + +#[cfg(target_os = "macos")] +async fn write_to_file_as_root(_temp_file_path: PathBuf, _target_file_path: PathBuf) -> Result<()> { + unimplemented!("write_to_file_as_root is not implemented") +} + +#[cfg(target_os = "windows")] +async fn write_to_file_as_root(_temp_file_path: PathBuf, _target_file_path: PathBuf) -> Result<()> { + unimplemented!("write_to_file_as_root is not implemented") +} + +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +async fn write_to_file_as_root(temp_file_path: PathBuf, target_file_path: PathBuf) -> Result<()> { + use shlex::try_quote; + use std::os::unix::fs::PermissionsExt; + use which::which; + + let pkexec_path = smol::unblock(|| which("pkexec")) + .await + .map_err(|_| anyhow::anyhow!("pkexec not found in PATH"))?; + + let script_file = smol::unblock(move || { + let script_file = tempfile::Builder::new() + .prefix("write-to-file-as-root-") + .tempfile_in(paths::temp_dir())?; + + writeln!( + script_file.as_file(), + "#!/usr/bin/env sh\nset -eu\ncat \"{}\" > \"{}\"", + try_quote(&temp_file_path.to_string_lossy())?, + try_quote(&target_file_path.to_string_lossy())? + )?; + + let mut perms = script_file.as_file().metadata()?.permissions(); + perms.set_mode(0o700); // rwx------ + script_file.as_file().set_permissions(perms)?; + + Result::<_>::Ok(script_file) + }) + .await?; + + let script_path = script_file.into_temp_path(); + + let output = Command::new(&pkexec_path) + .arg("--disable-internal-agent") + .arg(&script_path) + .output() + .await?; + + if !output.status.success() { + return Err(anyhow::anyhow!("Failed to write to file as root")); + } + + Ok(()) +} + pub fn normalize_path(path: &Path) -> PathBuf { let mut components = path.components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { diff --git a/crates/fs/src/linux_watcher.rs b/crates/fs/src/fs_watcher.rs similarity index 83% rename from crates/fs/src/linux_watcher.rs rename to crates/fs/src/fs_watcher.rs index cb2ac4826f..2a6c309dc9 100644 --- a/crates/fs/src/linux_watcher.rs +++ b/crates/fs/src/fs_watcher.rs @@ -5,12 +5,12 @@ use util::ResultExt; use crate::{PathEvent, PathEventKind, Watcher}; -pub struct LinuxWatcher { +pub struct FsWatcher { tx: smol::channel::Sender<()>, pending_path_events: Arc>>, } -impl LinuxWatcher { +impl FsWatcher { pub fn new( tx: smol::channel::Sender<()>, pending_path_events: Arc>>, @@ -22,7 +22,7 @@ impl LinuxWatcher { } } -impl Watcher for LinuxWatcher { +impl Watcher for FsWatcher { fn add(&self, path: &std::path::Path) -> gpui::Result<()> { let root_path = path.to_path_buf(); @@ -69,7 +69,7 @@ impl Watcher for LinuxWatcher { })?; global(|g| { - g.inotify + g.watcher .lock() .watch(path, notify::RecursiveMode::NonRecursive) })??; @@ -79,16 +79,18 @@ impl Watcher for LinuxWatcher { fn remove(&self, path: &std::path::Path) -> gpui::Result<()> { use notify::Watcher; - Ok(global(|w| w.inotify.lock().unwatch(path))??) + Ok(global(|w| w.watcher.lock().unwatch(path))??) } } pub struct GlobalWatcher { - // two mutexes because calling inotify.add triggers an inotify.event, which needs watchers. + // two mutexes because calling watcher.add triggers an watcher.event, which needs watchers. #[cfg(target_os = "linux")] - pub(super) inotify: Mutex, + pub(super) watcher: Mutex, #[cfg(target_os = "freebsd")] - pub(super) inotify: Mutex, + pub(super) watcher: Mutex, + #[cfg(target_os = "windows")] + pub(super) watcher: Mutex, pub(super) watchers: Mutex>>, } @@ -98,7 +100,8 @@ impl GlobalWatcher { } } -static INOTIFY_INSTANCE: OnceLock> = OnceLock::new(); +static FS_WATCHER_INSTANCE: OnceLock> = + OnceLock::new(); fn handle_event(event: Result) { let Some(event) = event.log_err() else { return }; @@ -111,9 +114,9 @@ fn handle_event(event: Result) { } pub fn global(f: impl FnOnce(&GlobalWatcher) -> T) -> anyhow::Result { - let result = INOTIFY_INSTANCE.get_or_init(|| { + let result = FS_WATCHER_INSTANCE.get_or_init(|| { notify::recommended_watcher(handle_event).map(|file_watcher| GlobalWatcher { - inotify: Mutex::new(file_watcher), + watcher: Mutex::new(file_watcher), watchers: Default::default(), }) }); diff --git a/crates/fuzzy/Cargo.toml b/crates/fuzzy/Cargo.toml index e3a016c98b..6d115fb1b5 100644 --- a/crates/fuzzy/Cargo.toml +++ b/crates/fuzzy/Cargo.toml @@ -15,3 +15,4 @@ doctest = false [dependencies] gpui.workspace = true util.workspace = true +log.workspace = true diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index ae56b84f1e..1b039c16f5 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -14,7 +14,6 @@ pub struct Matcher<'a> { lowercase_query: &'a [char], query_char_bag: CharBag, smart_case: bool, - max_results: usize, min_score: f64, match_positions: Vec, last_positions: Vec, @@ -22,11 +21,6 @@ pub struct Matcher<'a> { best_position_matrix: Vec, } -pub trait Match: Ord { - fn score(&self) -> f64; - fn set_positions(&mut self, positions: Vec); -} - pub trait MatchCandidate { fn has_chars(&self, bag: CharBag) -> bool; fn to_string(&self) -> Cow<'_, str>; @@ -38,7 +32,6 @@ impl<'a> Matcher<'a> { lowercase_query: &'a [char], query_char_bag: CharBag, smart_case: bool, - max_results: usize, ) -> Self { Self { query, @@ -50,10 +43,11 @@ impl<'a> Matcher<'a> { score_matrix: Vec::new(), best_position_matrix: Vec::new(), smart_case, - max_results, } } + /// Filter and score fuzzy match candidates. Results are returned unsorted, in the same order as + /// the input candidates. pub fn match_candidates( &mut self, prefix: &[char], @@ -63,8 +57,7 @@ impl<'a> Matcher<'a> { cancel_flag: &AtomicBool, build_match: F, ) where - R: Match, - F: Fn(&C, f64) -> R, + F: Fn(&C, f64, &Vec) -> R, { let mut candidate_chars = Vec::new(); let mut lowercase_candidate_chars = Vec::new(); @@ -103,20 +96,7 @@ impl<'a> Matcher<'a> { ); if score > 0.0 { - let mut mat = build_match(&candidate, score); - if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) { - if results.len() < self.max_results { - mat.set_positions(self.match_positions.clone()); - results.insert(i, mat); - } else if i < results.len() { - results.pop(); - mat.set_positions(self.match_positions.clone()); - results.insert(i, mat); - } - if results.len() == self.max_results { - self.min_score = results.last().unwrap().score(); - } - } + results.push(build_match(&candidate, score, &self.match_positions)); } } } @@ -325,18 +305,18 @@ mod tests { #[test] fn test_get_last_positions() { let mut query: &[char] = &['d', 'c']; - let mut matcher = Matcher::new(query, query, query.into(), false, 10); + let mut matcher = Matcher::new(query, query, query.into(), false); let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']); assert!(!result); query = &['c', 'd']; - let mut matcher = Matcher::new(query, query, query.into(), false, 10); + let mut matcher = Matcher::new(query, query, query.into(), false); let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']); assert!(result); assert_eq!(matcher.last_positions, vec![2, 4]); query = &['z', '/', 'z', 'f']; - let mut matcher = Matcher::new(query, query, query.into(), false, 10); + let mut matcher = Matcher::new(query, query, query.into(), false); let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']); assert!(result); assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]); @@ -451,7 +431,7 @@ mod tests { }); } - let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100); + let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case); let cancel_flag = AtomicBool::new(false); let mut results = Vec::new(); @@ -462,16 +442,17 @@ mod tests { path_entries.into_iter(), &mut results, &cancel_flag, - |candidate, score| PathMatch { + |candidate, score, positions| PathMatch { score, worktree_id: 0, - positions: Vec::new(), + positions: positions.clone(), path: Arc::from(candidate.path), path_prefix: "".into(), distance_to_relative_ancestor: usize::MAX, is_dir: false, }, ); + results.sort_by(|a, b| b.cmp(a)); results .into_iter() diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index 2b4eec98ef..bc3e399dc2 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -3,11 +3,14 @@ use std::{ borrow::Cow, cmp::{self, Ordering}, path::Path, - sync::{atomic::AtomicBool, Arc}, + sync::{ + atomic::{self, AtomicBool}, + Arc, + }, }; use crate::{ - matcher::{Match, MatchCandidate, Matcher}, + matcher::{MatchCandidate, Matcher}, CharBag, }; @@ -42,16 +45,6 @@ pub trait PathMatchCandidateSet<'a>: Send + Sync { fn candidates(&'a self, start: usize) -> Self::Candidates; } -impl Match for PathMatch { - fn score(&self) -> f64 { - self.score - } - - fn set_positions(&mut self, positions: Vec) { - self.positions = positions; - } -} - impl<'a> MatchCandidate for PathMatchCandidate<'a> { fn has_chars(&self, bag: CharBag) -> bool { self.char_bag.is_superset(bag) @@ -102,13 +95,7 @@ pub fn match_fixed_path_set( let query = query.chars().collect::>(); let query_char_bag = CharBag::from(&lowercase_query[..]); - let mut matcher = Matcher::new( - &query, - &lowercase_query, - query_char_bag, - smart_case, - max_results, - ); + let mut matcher = Matcher::new(&query, &lowercase_query, query_char_bag, smart_case); let mut results = Vec::new(); matcher.match_candidates( @@ -117,16 +104,17 @@ pub fn match_fixed_path_set( candidates.into_iter(), &mut results, &AtomicBool::new(false), - |candidate, score| PathMatch { + |candidate, score, positions| PathMatch { score, worktree_id, - positions: Vec::new(), + positions: positions.clone(), is_dir: candidate.is_dir, path: Arc::from(candidate.path), path_prefix: Arc::default(), distance_to_relative_ancestor: usize::MAX, }, ); + util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a)); results } @@ -164,16 +152,15 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( scope.spawn(async move { let segment_start = segment_idx * segment_size; let segment_end = segment_start + segment_size; - let mut matcher = Matcher::new( - query, - lowercase_query, - query_char_bag, - smart_case, - max_results, - ); + let mut matcher = + Matcher::new(query, lowercase_query, query_char_bag, smart_case); let mut tree_start = 0; for candidate_set in candidate_sets { + if cancel_flag.load(atomic::Ordering::Relaxed) { + break; + } + let tree_end = tree_start + candidate_set.len(); if tree_start < segment_end && segment_start < tree_end { @@ -193,10 +180,10 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( candidates, results, cancel_flag, - |candidate, score| PathMatch { + |candidate, score, positions| PathMatch { score, worktree_id, - positions: Vec::new(), + positions: positions.clone(), path: Arc::from(candidate.path), is_dir: candidate.is_dir, path_prefix: candidate_set.prefix(), @@ -222,14 +209,12 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( }) .await; - let mut results = Vec::new(); - for segment_result in segment_results { - if results.is_empty() { - results = segment_result; - } else { - util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a)); - } + if cancel_flag.load(atomic::Ordering::Relaxed) { + return Vec::new(); } + + let mut results = segment_results.concat(); + util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a)); results } diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs index e1f6de37a5..458278739a 100644 --- a/crates/fuzzy/src/strings.rs +++ b/crates/fuzzy/src/strings.rs @@ -1,5 +1,5 @@ use crate::{ - matcher::{Match, MatchCandidate, Matcher}, + matcher::{MatchCandidate, Matcher}, CharBag, }; use gpui::BackgroundExecutor; @@ -8,7 +8,7 @@ use std::{ cmp::{self, Ordering}, iter, ops::Range, - sync::atomic::AtomicBool, + sync::atomic::{self, AtomicBool}, }; #[derive(Clone, Debug)] @@ -18,22 +18,12 @@ pub struct StringMatchCandidate { pub char_bag: CharBag, } -impl Match for StringMatch { - fn score(&self) -> f64 { - self.score - } - - fn set_positions(&mut self, positions: Vec) { - self.positions = positions; - } -} - impl StringMatchCandidate { - pub fn new(id: usize, string: String) -> Self { + pub fn new(id: usize, string: &str) -> Self { Self { id, - char_bag: CharBag::from(string.as_str()), - string, + string: string.into(), + char_bag: string.into(), } } } @@ -61,10 +51,24 @@ impl StringMatch { let mut positions = self.positions.iter().peekable(); iter::from_fn(move || { if let Some(start) = positions.next().copied() { - let mut end = start + self.char_len_at_index(start); + let Some(char_len) = self.char_len_at_index(start) else { + log::error!( + "Invariant violation: Index {start} out of range or not on a utf-8 boundary in string {:?}", + self.string + ); + return None; + }; + let mut end = start + char_len; while let Some(next_start) = positions.peek() { if end == **next_start { - end += self.char_len_at_index(end); + let Some(char_len) = self.char_len_at_index(end) else { + log::error!( + "Invariant violation: Index {end} out of range or not on a utf-8 boundary in string {:?}", + self.string + ); + return None; + }; + end += char_len; positions.next(); } else { break; @@ -77,8 +81,12 @@ impl StringMatch { }) } - fn char_len_at_index(&self, ix: usize) -> usize { - self.string[ix..].chars().next().unwrap().len_utf8() + /// Gets the byte length of the utf-8 character at a byte offset. If the index is out of range + /// or not on a utf-8 boundary then None is returned. + fn char_len_at_index(&self, ix: usize) -> Option { + self.string + .get(ix..) + .and_then(|slice| slice.chars().next().map(|char| char.len_utf8())) } } @@ -149,13 +157,8 @@ pub async fn match_strings( scope.spawn(async move { let segment_start = cmp::min(segment_idx * segment_size, candidates.len()); let segment_end = cmp::min(segment_start + segment_size, candidates.len()); - let mut matcher = Matcher::new( - query, - lowercase_query, - query_char_bag, - smart_case, - max_results, - ); + let mut matcher = + Matcher::new(query, lowercase_query, query_char_bag, smart_case); matcher.match_candidates( &[], @@ -163,10 +166,10 @@ pub async fn match_strings( candidates[segment_start..segment_end].iter(), results, cancel_flag, - |candidate, score| StringMatch { + |candidate, score, positions| StringMatch { candidate_id: candidate.id, score, - positions: Vec::new(), + positions: positions.clone(), string: candidate.string.to_string(), }, ); @@ -175,13 +178,11 @@ pub async fn match_strings( }) .await; - let mut results = Vec::new(); - for segment_result in segment_results { - if results.is_empty() { - results = segment_result; - } else { - util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a)); - } + if cancel_flag.load(atomic::Ordering::Relaxed) { + return Vec::new(); } + + let mut results = segment_results.concat(); + util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a)); results } diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 06a46b3b76..d31538353e 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -14,7 +14,6 @@ path = "src/git.rs" [dependencies] anyhow.workspace = true async-trait.workspace = true -clock.workspace = true collections.workspace = true derive_more.workspace = true git2.workspace = true @@ -22,6 +21,7 @@ gpui.workspace = true http_client.workspace = true log.workspace = true parking_lot.workspace = true +regex.workspace = true rope.workspace = true serde.workspace = true smol.workspace = true @@ -31,10 +31,6 @@ time.workspace = true url.workspace = true util.workspace = true -[target.'cfg(target_os = "windows")'.dependencies] -windows.workspace = true - - [dev-dependencies] unindent.workspace = true serde_json.workspace = true diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 030309df96..8f87a8ca54 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -4,7 +4,7 @@ use anyhow::{anyhow, Context, Result}; use collections::{HashMap, HashSet}; use serde::{Deserialize, Serialize}; use std::io::Write; -use std::process::{Command, Stdio}; +use std::process::Stdio; use std::sync::Arc; use std::{ops::Range, path::Path}; use text::Rope; @@ -80,9 +80,7 @@ fn run_git_blame( path: &Path, contents: &Rope, ) -> Result { - let mut child = Command::new(git_binary); - - child + let child = util::command::new_std_command(git_binary) .current_dir(working_directory) .arg("blame") .arg("--incremental") @@ -91,15 +89,7 @@ fn run_git_blame( .arg(path.as_os_str()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - - #[cfg(windows)] - { - use std::os::windows::process::CommandExt; - child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - - let child = child + .stderr(Stdio::piped()) .spawn() .map_err(|e| anyhow!("Failed to start git blame process: {}", e))?; diff --git a/crates/git/src/commit.rs b/crates/git/src/commit.rs index bdac6ff287..f32ad226af 100644 --- a/crates/git/src/commit.rs +++ b/crates/git/src/commit.rs @@ -2,10 +2,6 @@ use crate::Oid; use anyhow::{anyhow, Result}; use collections::HashMap; use std::path::Path; -use std::process::Command; - -#[cfg(windows)] -use std::os::windows::process::CommandExt; pub fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result> { if shas.is_empty() { @@ -14,19 +10,12 @@ pub fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result, tree: SumTree, } impl BufferDiff { pub fn new(buffer: &BufferSnapshot) -> BufferDiff { BufferDiff { - last_buffer_version: None, tree: SumTree::new(buffer), } } + pub async fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self { + let mut tree = SumTree::new(buffer); + + let buffer_text = buffer.as_rope().to_string(); + let patch = Self::diff(diff_base, &buffer_text); + + if let Some(patch) = patch { + let mut divergence = 0; + for hunk_index in 0..patch.num_hunks() { + let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); + tree.push(hunk, buffer); + } + } + + Self { tree } + } + pub fn is_empty(&self) -> bool { self.tree.is_empty() } - #[cfg(any(test, feature = "test-support"))] pub fn hunks_in_row_range<'a>( &'a self, range: Range, @@ -169,27 +183,11 @@ impl BufferDiff { #[cfg(test)] fn clear(&mut self, buffer: &text::BufferSnapshot) { - self.last_buffer_version = Some(buffer.version().clone()); self.tree = SumTree::new(buffer); } pub async fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { - let mut tree = SumTree::new(buffer); - - let diff_base_text = diff_base.to_string(); - let buffer_text = buffer.as_rope().to_string(); - let patch = Self::diff(&diff_base_text, &buffer_text); - - if let Some(patch) = patch { - let mut divergence = 0; - for hunk_index in 0..patch.num_hunks() { - let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); - tree.push(hunk, buffer); - } - } - - self.tree = tree; - self.last_buffer_version = Some(buffer.version().clone()); + *self = Self::build(&diff_base.to_string(), buffer).await; } #[cfg(test)] diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index cf07b74ac5..c608c23cf3 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -16,6 +16,7 @@ use std::sync::LazyLock; pub use crate::hosting_provider::*; pub use crate::remote::*; pub use git2 as libgit; +pub use repository::WORK_DIRECTORY_REPO_PATH; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); diff --git a/crates/git/src/remote.rs b/crates/git/src/remote.rs index 430836fcf3..e9814afc51 100644 --- a/crates/git/src/remote.rs +++ b/crates/git/src/remote.rs @@ -1,17 +1,23 @@ +use std::sync::LazyLock; + use derive_more::Deref; +use regex::Regex; use url::Url; /// The URL to a Git remote. #[derive(Debug, PartialEq, Eq, Clone, Deref)] pub struct RemoteUrl(Url); +static USERNAME_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"^[0-9a-zA-Z\-_]+@").expect("Failed to create USERNAME_REGEX")); + impl std::str::FromStr for RemoteUrl { type Err = url::ParseError; fn from_str(input: &str) -> Result { - if input.starts_with("git@") { + if USERNAME_REGEX.is_match(input) { // Rewrite remote URLs like `git@github.com:user/repo.git` to `ssh://git@github.com/user/repo.git` - let ssh_url = input.replacen(':', "/", 1).replace("git@", "ssh://git@"); + let ssh_url = format!("ssh://{}", input.replacen(':', "/", 1)); Ok(RemoteUrl(Url::parse(&ssh_url)?)) } else { Ok(RemoteUrl(Url::parse(input)?)) @@ -40,6 +46,12 @@ mod tests { "github.com", "/octocat/zed.git", ), + ( + "org-000000@github.com:octocat/zed.git", + "ssh", + "github.com", + "/octocat/zed.git", + ), ( "ssh://git@github.com/octocat/zed.git", "ssh", diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 78f6ece508..bb890150e5 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -7,6 +7,8 @@ use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; use serde::{Deserialize, Serialize}; +use std::borrow::Borrow; +use std::sync::LazyLock; use std::{ cmp::Ordering, path::{Component, Path, PathBuf}, @@ -37,7 +39,8 @@ pub trait GitRepository: Send + Sync { /// Returns the SHA of the current HEAD. fn head_sha(&self) -> Option; - fn status(&self, path_prefixes: &[PathBuf]) -> Result; + /// Returns the list of git statuses, sorted by path + fn status(&self, path_prefixes: &[RepoPath]) -> Result; fn branches(&self) -> Result>; fn change_branch(&self, _: &str) -> Result<()>; @@ -46,7 +49,8 @@ pub trait GitRepository: Send + Sync { fn blame(&self, path: &Path, content: Rope) -> Result; - fn path(&self) -> PathBuf; + /// Returns the path to the repository, typically the `.git` folder. + fn dot_git_dir(&self) -> PathBuf; } impl std::fmt::Debug for dyn GitRepository { @@ -85,7 +89,7 @@ impl GitRepository for RealGitRepository { } } - fn path(&self) -> PathBuf { + fn dot_git_dir(&self) -> PathBuf { let repo = self.repository.lock(); repo.path().into() } @@ -131,7 +135,7 @@ impl GitRepository for RealGitRepository { Some(self.repository.lock().head().ok()?.target()?.to_string()) } - fn status(&self, path_prefixes: &[PathBuf]) -> Result { + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let working_directory = self .repository .lock() @@ -233,7 +237,7 @@ pub struct FakeGitRepository { #[derive(Debug, Clone)] pub struct FakeGitRepositoryState { - pub path: PathBuf, + pub dot_git_dir: PathBuf, pub event_emitter: smol::channel::Sender, pub index_contents: HashMap, pub blames: HashMap, @@ -249,9 +253,9 @@ impl FakeGitRepository { } impl FakeGitRepositoryState { - pub fn new(path: PathBuf, event_emitter: smol::channel::Sender) -> Self { + pub fn new(dot_git_dir: PathBuf, event_emitter: smol::channel::Sender) -> Self { FakeGitRepositoryState { - path, + dot_git_dir, event_emitter, index_contents: Default::default(), blames: Default::default(), @@ -283,13 +287,14 @@ impl GitRepository for FakeGitRepository { None } - fn path(&self) -> PathBuf { + fn dot_git_dir(&self) -> PathBuf { let state = self.state.lock(); - state.path.clone() + state.dot_git_dir.clone() } - fn status(&self, path_prefixes: &[PathBuf]) -> Result { + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let state = self.state.lock(); + let mut entries = state .worktree_statuses .iter() @@ -305,6 +310,7 @@ impl GitRepository for FakeGitRepository { }) .collect::>(); entries.sort_unstable_by(|a, b| a.0.cmp(&b.0)); + Ok(GitStatus { entries: entries.into(), }) @@ -334,7 +340,7 @@ impl GitRepository for FakeGitRepository { state.current_branch_name = Some(name.to_owned()); state .event_emitter - .try_send(state.path.clone()) + .try_send(state.dot_git_dir.clone()) .expect("Dropped repo change event"); Ok(()) } @@ -344,7 +350,7 @@ impl GitRepository for FakeGitRepository { state.branches.insert(name.to_owned()); state .event_emitter - .try_send(state.path.clone()) + .try_send(state.dot_git_dir.clone()) .expect("Dropped repo change event"); Ok(()) } @@ -393,6 +399,8 @@ pub enum GitFileStatus { Added, Modified, Conflict, + Deleted, + Untracked, } impl GitFileStatus { @@ -420,20 +428,34 @@ impl GitFileStatus { } } +pub static WORK_DIRECTORY_REPO_PATH: LazyLock = + LazyLock::new(|| RepoPath(Path::new("").into())); + #[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)] -pub struct RepoPath(pub PathBuf); +pub struct RepoPath(pub Arc); impl RepoPath { pub fn new(path: PathBuf) -> Self { debug_assert!(path.is_relative(), "Repo paths must be relative"); - RepoPath(path) + RepoPath(path.into()) + } + + pub fn from_str(path: &str) -> Self { + let path = Path::new(path); + debug_assert!(path.is_relative(), "Repo paths must be relative"); + + RepoPath(path.into()) + } + + pub fn to_proto(&self) -> String { + self.0.to_string_lossy().to_string() } } impl From<&Path> for RepoPath { fn from(value: &Path) -> Self { - RepoPath::new(value.to_path_buf()) + RepoPath::new(value.into()) } } @@ -443,9 +465,15 @@ impl From for RepoPath { } } +impl From<&str> for RepoPath { + fn from(value: &str) -> Self { + Self::from_str(value) + } +} + impl Default for RepoPath { fn default() -> Self { - RepoPath(PathBuf::new()) + RepoPath(Path::new("").into()) } } @@ -456,13 +484,19 @@ impl AsRef for RepoPath { } impl std::ops::Deref for RepoPath { - type Target = PathBuf; + type Target = Path; fn deref(&self) -> &Self::Target { &self.0 } } +impl Borrow for RepoPath { + fn borrow(&self) -> &Path { + self.0.as_ref() + } +} + #[derive(Debug)] pub struct RepoPathDescendants<'a>(pub &'a Path); diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index 6eb98ecefe..0d62cfaae9 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -1,10 +1,6 @@ use crate::repository::{GitFileStatus, RepoPath}; use anyhow::{anyhow, Result}; -use std::{ - path::{Path, PathBuf}, - process::{Command, Stdio}, - sync::Arc, -}; +use std::{path::Path, process::Stdio, sync::Arc}; #[derive(Clone)] pub struct GitStatus { @@ -15,11 +11,9 @@ impl GitStatus { pub(crate) fn new( git_binary: &Path, working_directory: &Path, - path_prefixes: &[PathBuf], + path_prefixes: &[RepoPath], ) -> Result { - let mut child = Command::new(git_binary); - - child + let child = util::command::new_std_command(git_binary) .current_dir(working_directory) .args([ "--no-optional-locks", @@ -29,7 +23,7 @@ impl GitStatus { "-z", ]) .args(path_prefixes.iter().map(|path_prefix| { - if *path_prefix == Path::new("") { + if path_prefix.0.as_ref() == Path::new("") { Path::new(".") } else { path_prefix @@ -37,15 +31,7 @@ impl GitStatus { })) .stdin(Stdio::null()) .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - - #[cfg(windows)] - { - use std::os::windows::process::CommandExt; - child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - - let child = child + .stderr(Stdio::piped()) .spawn() .map_err(|e| anyhow!("Failed to start git status process: {}", e))?; @@ -65,10 +51,12 @@ impl GitStatus { let (status, path) = entry.split_at(3); let status = status.trim(); Some(( - RepoPath(PathBuf::from(path)), + RepoPath(Path::new(path).into()), match status { - "A" | "??" => GitFileStatus::Added, + "A" => GitFileStatus::Added, "M" => GitFileStatus::Modified, + "D" => GitFileStatus::Deleted, + "??" => GitFileStatus::Untracked, _ => return None, }, )) @@ -85,7 +73,7 @@ impl GitStatus { pub fn get(&self, path: &Path) -> Option { self.entries - .binary_search_by(|(repo_path, _)| repo_path.0.as_path().cmp(path)) + .binary_search_by(|(repo_path, _)| repo_path.0.as_ref().cmp(path)) .ok() .map(|index| self.entries[index].1) } diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index cbd1cc73a8..6026c6ed20 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -1,5 +1,5 @@ use std::str::FromStr; -use std::sync::{Arc, OnceLock}; +use std::sync::{Arc, LazyLock}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; @@ -15,9 +15,9 @@ use git::{ }; fn pull_request_number_regex() -> &'static Regex { - static PULL_REQUEST_NUMBER_REGEX: OnceLock = OnceLock::new(); - - PULL_REQUEST_NUMBER_REGEX.get_or_init(|| Regex::new(r"\(#(\d+)\)$").unwrap()) + static PULL_REQUEST_NUMBER_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\(#(\d+)\)$").unwrap()); + &PULL_REQUEST_NUMBER_REGEX } #[derive(Debug, Deserialize)] diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml new file mode 100644 index 0000000000..120ca92857 --- /dev/null +++ b/crates/git_ui/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "git_ui" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +name = "git_ui" +path = "src/git_ui.rs" + +[dependencies] +anyhow.workspace = true +collections.workspace = true +db.workspace = true +editor.workspace = true +git.workspace = true +gpui.workspace = true +language.workspace = true +menu.workspace = true +project.workspace = true +schemars.workspace = true +serde.workspace = true +serde_derive.workspace = true +serde_json.workspace = true +settings.workspace = true +theme.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true +worktree.workspace = true + +[target.'cfg(windows)'.dependencies] +windows.workspace = true + +[features] +default = [] diff --git a/crates/git_ui/LICENSE-GPL b/crates/git_ui/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/git_ui/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/git_ui/TODO.md b/crates/git_ui/TODO.md new file mode 100644 index 0000000000..efbdcf494c --- /dev/null +++ b/crates/git_ui/TODO.md @@ -0,0 +1,45 @@ +### General + +- [x] Disable staging and committing actions for read-only projects + +### List + +- [x] Add uniform list +- [x] Git status item +- [ ] Directory item +- [x] Scrollbar +- [ ] Add indent size setting +- [ ] Add tree settings + +### List Items + +- [x] Checkbox for staging +- [x] Git status icon +- [ ] Context menu + - [ ] Discard Changes + - --- + - [ ] Ignore + - [ ] Ignore directory + - --- + - [ ] Copy path + - [ ] Copy relative path + - --- + - [ ] Reveal in Finder + +### Commit Editor + +- [ ] Add commit editor +- [ ] Add commit message placeholder & add commit message to store +- [ ] Add a way to get the current collaborators & automatically add them to the commit message as co-authors +- [ ] Add action to clear commit message +- [x] Swap commit button between "Commit" and "Commit All" based on modifier key + +### Component Updates + +- [ ] ChangedLineCount (new) + - takes `lines_added: usize, lines_removed: usize`, returns a added/removed badge +- [x] GitStatusIcon (new) +- [ ] Checkbox + - update checkbox design +- [ ] ScrollIndicator + - shows a gradient overlay when more content is available to be scrolled diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs new file mode 100644 index 0000000000..e57145f988 --- /dev/null +++ b/crates/git_ui/src/git_panel.rs @@ -0,0 +1,1397 @@ +use crate::{ + git_status_icon, settings::GitPanelSettings, CommitAllChanges, CommitStagedChanges, GitState, + RevertAll, StageAll, UnstageAll, +}; +use anyhow::{Context as _, Result}; +use db::kvp::KEY_VALUE_STORE; +use editor::Editor; +use git::{ + diff::DiffHunk, + repository::{GitFileStatus, RepoPath}, +}; +use gpui::*; +use language::Buffer; +use menu::{SelectNext, SelectPrev}; +use project::{EntryKind, Fs, Project, ProjectEntryId, WorktreeId}; +use serde::{Deserialize, Serialize}; +use settings::Settings as _; +use std::{ + cell::OnceCell, + collections::HashSet, + ffi::OsStr, + ops::{Deref, Range}, + path::PathBuf, + rc::Rc, + sync::Arc, + time::Duration, + usize, +}; +use theme::ThemeSettings; +use ui::{ + prelude::*, Checkbox, Divider, DividerColor, ElevationIndex, Scrollbar, ScrollbarState, Tooltip, +}; +use util::{ResultExt, TryFutureExt}; +use workspace::{ + dock::{DockPosition, Panel, PanelEvent}, + Workspace, +}; +use worktree::StatusEntry; + +actions!(git_panel, [ToggleFocus, OpenEntryMenu]); + +const GIT_PANEL_KEY: &str = "GitPanel"; + +const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views( + |workspace: &mut Workspace, _cx: &mut ViewContext| { + workspace.register_action(|workspace, _: &ToggleFocus, cx| { + workspace.toggle_panel_focus::(cx); + }); + }, + ) + .detach(); +} + +#[derive(Debug)] +pub enum Event { + Focus, +} + +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub enum ViewMode { + #[default] + List, + Tree, +} + +pub struct GitStatusEntry {} + +#[derive(Debug, PartialEq, Eq, Clone)] +struct EntryDetails { + filename: String, + display_name: String, + path: RepoPath, + kind: EntryKind, + depth: usize, + is_expanded: bool, + status: Option, + hunks: Rc>>, + index: usize, +} + +#[derive(Serialize, Deserialize)] +struct SerializedGitPanel { + width: Option, +} + +pub struct GitPanel { + // workspace: WeakView, + current_modifiers: Modifiers, + focus_handle: FocusHandle, + fs: Arc, + hide_scrollbar_task: Option>, + pending_serialization: Task>, + project: Model, + scroll_handle: UniformListScrollHandle, + scrollbar_state: ScrollbarState, + selected_item: Option, + view_mode: ViewMode, + show_scrollbar: bool, + // TODO Reintroduce expanded directories, once we're deriving directories from paths + // expanded_dir_ids: HashMap>, + git_state: Model, + commit_editor: View, + // The entries that are currently shown in the panel, aka + // not hidden by folding or such + visible_entries: Vec, + width: Option, + // git_diff_editor: Option>, + // git_diff_editor_updates: Task<()>, + reveal_in_editor: Task<()>, +} + +#[derive(Debug, Clone)] +struct WorktreeEntries { + worktree_id: WorktreeId, + // TODO support multiple repositories per worktree + // work_directory: worktree::WorkDirectory, + visible_entries: Vec, + paths: Rc>>, +} + +#[derive(Debug, Clone)] +struct GitPanelEntry { + entry: worktree::StatusEntry, + hunks: Rc>>, +} + +impl Deref for GitPanelEntry { + type Target = worktree::StatusEntry; + + fn deref(&self) -> &Self::Target { + &self.entry + } +} + +impl WorktreeEntries { + fn paths(&self) -> &HashSet { + self.paths.get_or_init(|| { + self.visible_entries + .iter() + .map(|e| (e.entry.repo_path.clone())) + .collect() + }) + } +} + +impl GitPanel { + pub fn load( + workspace: WeakView, + cx: AsyncWindowContext, + ) -> Task>> { + cx.spawn(|mut cx| async move { workspace.update(&mut cx, Self::new) }) + } + + pub fn new(workspace: &mut Workspace, cx: &mut ViewContext) -> View { + let git_state = GitState::get_global(cx); + + let fs = workspace.app_state().fs.clone(); + // let weak_workspace = workspace.weak_handle(); + let project = workspace.project().clone(); + let language_registry = workspace.app_state().languages.clone(); + + let git_panel = cx.new_view(|cx: &mut ViewContext| { + let focus_handle = cx.focus_handle(); + cx.on_focus(&focus_handle, Self::focus_in).detach(); + cx.on_focus_out(&focus_handle, |this, _, cx| { + this.hide_scrollbar(cx); + }) + .detach(); + cx.subscribe(&project, |this, _, event, cx| match event { + project::Event::WorktreeRemoved(_id) => { + // this.expanded_dir_ids.remove(id); + this.update_visible_entries(None, None, cx); + cx.notify(); + } + project::Event::WorktreeOrderChanged => { + this.update_visible_entries(None, None, cx); + cx.notify(); + } + project::Event::WorktreeUpdatedEntries(id, _) + | project::Event::WorktreeAdded(id) + | project::Event::WorktreeUpdatedGitRepositories(id) => { + this.update_visible_entries(Some(*id), None, cx); + cx.notify(); + } + project::Event::Closed => { + // this.git_diff_editor_updates = Task::ready(()); + this.reveal_in_editor = Task::ready(()); + // this.expanded_dir_ids.clear(); + this.visible_entries.clear(); + // this.git_diff_editor = None; + } + _ => {} + }) + .detach(); + + let state = git_state.read(cx); + let current_commit_message = state.commit_message.clone(); + + let commit_editor = cx.new_view(|cx| { + let theme = ThemeSettings::get_global(cx); + + let mut text_style = cx.text_style(); + let refinement = TextStyleRefinement { + font_family: Some(theme.buffer_font.family.clone()), + font_features: Some(FontFeatures::disable_ligatures()), + font_size: Some(px(12.).into()), + color: Some(cx.theme().colors().editor_foreground), + background_color: Some(gpui::transparent_black()), + ..Default::default() + }; + + text_style.refine(&refinement); + + let mut commit_editor = Editor::auto_height(10, cx); + if let Some(message) = current_commit_message { + commit_editor.set_text(message, cx); + } else { + commit_editor.set_text("", cx); + } + // commit_editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); + commit_editor.set_use_autoclose(false); + commit_editor.set_show_gutter(false, cx); + commit_editor.set_show_wrap_guides(false, cx); + commit_editor.set_show_indent_guides(false, cx); + commit_editor.set_text_style_refinement(refinement); + commit_editor.set_placeholder_text("Enter commit message", cx); + commit_editor + }); + + let buffer = commit_editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .expect("commit editor must be singleton"); + + cx.subscribe(&buffer, Self::on_buffer_event).detach(); + + let markdown = language_registry.language_for_name("Markdown"); + cx.spawn(|_, mut cx| async move { + let markdown = markdown.await.context("failed to load Markdown language")?; + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(Some(markdown), cx) + }) + }) + .detach_and_log_err(cx); + + let scroll_handle = UniformListScrollHandle::new(); + + let mut git_panel = Self { + // workspace: weak_workspace, + focus_handle: cx.focus_handle(), + fs, + pending_serialization: Task::ready(None), + visible_entries: Vec::new(), + current_modifiers: cx.modifiers(), + // expanded_dir_ids: Default::default(), + width: Some(px(360.)), + scrollbar_state: ScrollbarState::new(scroll_handle.clone()).parent_view(cx.view()), + scroll_handle, + selected_item: None, + view_mode: ViewMode::default(), + show_scrollbar: !Self::should_autohide_scrollbar(cx), + hide_scrollbar_task: None, + // git_diff_editor: Some(diff_display_editor(cx)), + // git_diff_editor_updates: Task::ready(()), + commit_editor, + git_state, + reveal_in_editor: Task::ready(()), + project, + }; + git_panel.update_visible_entries(None, None, cx); + git_panel + }); + + git_panel + } + + fn serialize(&mut self, cx: &mut ViewContext) { + let width = self.width; + self.pending_serialization = cx.background_executor().spawn( + async move { + KEY_VALUE_STORE + .write_kvp( + GIT_PANEL_KEY.into(), + serde_json::to_string(&SerializedGitPanel { width })?, + ) + .await?; + anyhow::Ok(()) + } + .log_err(), + ); + } + + fn dispatch_context(&self) -> KeyContext { + let mut dispatch_context = KeyContext::new_with_defaults(); + dispatch_context.add("GitPanel"); + dispatch_context.add("menu"); + + dispatch_context + } + + fn focus_in(&mut self, cx: &mut ViewContext) { + if !self.focus_handle.contains_focused(cx) { + cx.emit(Event::Focus); + } + } + + fn should_show_scrollbar(_cx: &AppContext) -> bool { + // TODO: plug into settings + true + } + + fn should_autohide_scrollbar(_cx: &AppContext) -> bool { + // TODO: plug into settings + true + } + + fn hide_scrollbar(&mut self, cx: &mut ViewContext) { + const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1); + if !Self::should_autohide_scrollbar(cx) { + return; + } + self.hide_scrollbar_task = Some(cx.spawn(|panel, mut cx| async move { + cx.background_executor() + .timer(SCROLLBAR_SHOW_INTERVAL) + .await; + panel + .update(&mut cx, |panel, cx| { + panel.show_scrollbar = false; + cx.notify(); + }) + .log_err(); + })) + } + + fn handle_modifiers_changed( + &mut self, + event: &ModifiersChangedEvent, + cx: &mut ViewContext, + ) { + self.current_modifiers = event.modifiers; + cx.notify(); + } + + fn calculate_depth_and_difference( + entry: &StatusEntry, + visible_worktree_entries: &HashSet, + ) -> (usize, usize) { + let (depth, difference) = entry + .repo_path + .ancestors() + .skip(1) // Skip the entry itself + .find_map(|ancestor| { + if let Some(parent_entry) = visible_worktree_entries.get(ancestor) { + let entry_path_components_count = entry.repo_path.components().count(); + let parent_path_components_count = parent_entry.components().count(); + let difference = entry_path_components_count - parent_path_components_count; + let depth = parent_entry + .ancestors() + .skip(1) + .filter(|ancestor| visible_worktree_entries.contains(*ancestor)) + .count(); + Some((depth + 1, difference)) + } else { + None + } + }) + .unwrap_or((0, 0)); + + (depth, difference) + } + + fn select_next(&mut self, _: &SelectNext, cx: &mut ViewContext) { + let item_count = self + .visible_entries + .iter() + .map(|worktree_entries| worktree_entries.visible_entries.len()) + .sum::(); + if item_count == 0 { + return; + } + let selection = match self.selected_item { + Some(i) => { + if i < item_count - 1 { + self.selected_item = Some(i + 1); + i + 1 + } else { + self.selected_item = Some(0); + 0 + } + } + None => { + self.selected_item = Some(0); + 0 + } + }; + self.scroll_handle + .scroll_to_item(selection, ScrollStrategy::Center); + + let mut hunks = None; + self.for_each_visible_entry(selection..selection + 1, cx, |_, entry, _| { + hunks = Some(entry.hunks.clone()); + }); + if let Some(hunks) = hunks { + self.reveal_entry_in_git_editor(hunks, false, Some(UPDATE_DEBOUNCE), cx); + } + + cx.notify(); + } + + fn select_prev(&mut self, _: &SelectPrev, cx: &mut ViewContext) { + let item_count = self + .visible_entries + .iter() + .map(|worktree_entries| worktree_entries.visible_entries.len()) + .sum::(); + if item_count == 0 { + return; + } + let selection = match self.selected_item { + Some(i) => { + if i > 0 { + self.selected_item = Some(i - 1); + i - 1 + } else { + self.selected_item = Some(item_count - 1); + item_count - 1 + } + } + None => { + self.selected_item = Some(0); + 0 + } + }; + self.scroll_handle + .scroll_to_item(selection, ScrollStrategy::Center); + + let mut hunks = None; + self.for_each_visible_entry(selection..selection + 1, cx, |_, entry, _| { + hunks = Some(entry.hunks.clone()); + }); + if let Some(hunks) = hunks { + self.reveal_entry_in_git_editor(hunks, false, Some(UPDATE_DEBOUNCE), cx); + } + + cx.notify(); + } +} + +impl GitPanel { + fn stage_all(&mut self, _: &StageAll, _cx: &mut ViewContext) { + // TODO: Implement stage all + println!("Stage all triggered"); + } + + fn unstage_all(&mut self, _: &UnstageAll, _cx: &mut ViewContext) { + // TODO: Implement unstage all + println!("Unstage all triggered"); + } + + fn discard_all(&mut self, _: &RevertAll, _cx: &mut ViewContext) { + // TODO: Implement discard all + println!("Discard all triggered"); + } + + fn clear_message(&mut self, cx: &mut ViewContext) { + let git_state = self.git_state.clone(); + git_state.update(cx, |state, _cx| state.clear_message()); + self.commit_editor + .update(cx, |editor, cx| editor.set_text("", cx)); + } + + /// Commit all staged changes + fn commit_staged_changes(&mut self, _: &CommitStagedChanges, cx: &mut ViewContext) { + self.clear_message(cx); + + // TODO: Implement commit all staged + println!("Commit staged changes triggered"); + } + + /// Commit all changes, regardless of whether they are staged or not + fn commit_all_changes(&mut self, _: &CommitAllChanges, cx: &mut ViewContext) { + self.clear_message(cx); + + // TODO: Implement commit all changes + println!("Commit all changes triggered"); + } + + fn all_staged(&self) -> bool { + // TODO: Implement all_staged + true + } + + fn no_entries(&self) -> bool { + self.visible_entries.is_empty() + } + + fn entry_count(&self) -> usize { + self.visible_entries + .iter() + .map(|worktree_entries| worktree_entries.visible_entries.len()) + .sum() + } + + fn for_each_visible_entry( + &self, + range: Range, + cx: &mut ViewContext, + mut callback: impl FnMut(usize, EntryDetails, &mut ViewContext), + ) { + let mut ix = 0; + for worktree_entries in &self.visible_entries { + if ix >= range.end { + return; + } + + if ix + worktree_entries.visible_entries.len() <= range.start { + ix += worktree_entries.visible_entries.len(); + continue; + } + + let end_ix = range.end.min(ix + worktree_entries.visible_entries.len()); + // let entry_range = range.start.saturating_sub(ix)..end_ix - ix; + if let Some(worktree) = self + .project + .read(cx) + .worktree_for_id(worktree_entries.worktree_id, cx) + { + let snapshot = worktree.read(cx).snapshot(); + let root_name = OsStr::new(snapshot.root_name()); + // let expanded_entry_ids = self + // .expanded_dir_ids + // .get(&snapshot.id()) + // .map(Vec::as_slice) + // .unwrap_or(&[]); + + let entry_range = range.start.saturating_sub(ix)..end_ix - ix; + let entries = worktree_entries.paths(); + + let index_start = entry_range.start; + for (i, entry) in worktree_entries.visible_entries[entry_range] + .iter() + .enumerate() + { + let index = index_start + i; + let status = entry.status; + let is_expanded = true; //expanded_entry_ids.binary_search(&entry.id).is_ok(); + + let (depth, difference) = Self::calculate_depth_and_difference(entry, entries); + + let filename = match difference { + diff if diff > 1 => entry + .repo_path + .iter() + .skip(entry.repo_path.components().count() - diff) + .collect::() + .to_str() + .unwrap_or_default() + .to_string(), + _ => entry + .repo_path + .file_name() + .map(|name| name.to_string_lossy().into_owned()) + .unwrap_or_else(|| root_name.to_string_lossy().to_string()), + }; + + let details = EntryDetails { + filename, + display_name: entry.repo_path.to_string_lossy().into_owned(), + // TODO get it from StatusEntry? + kind: EntryKind::File, + is_expanded, + path: entry.repo_path.clone(), + status: Some(status), + hunks: entry.hunks.clone(), + depth, + index, + }; + callback(ix, details, cx); + } + } + ix = end_ix; + } + } + + // TODO: Update expanded directory state + // TODO: Updates happen in the main loop, could be long for large workspaces + #[track_caller] + fn update_visible_entries( + &mut self, + for_worktree: Option, + _new_selected_entry: Option<(WorktreeId, ProjectEntryId)>, + cx: &mut ViewContext, + ) { + let project = self.project.read(cx); + let mut old_entries_removed = false; + let mut after_update = Vec::new(); + self.visible_entries + .retain(|worktree_entries| match for_worktree { + Some(for_worktree) => { + if worktree_entries.worktree_id == for_worktree { + old_entries_removed = true; + false + } else if old_entries_removed { + after_update.push(worktree_entries.clone()); + false + } else { + true + } + } + None => false, + }); + for worktree in project.visible_worktrees(cx) { + let snapshot = worktree.read(cx).snapshot(); + let worktree_id = snapshot.id(); + + if for_worktree.is_some() && for_worktree != Some(worktree_id) { + continue; + } + + let mut visible_worktree_entries = Vec::new(); + // Only use the first repository for now + let repositories = snapshot.repositories().take(1); + // let mut work_directory = None; + for repository in repositories { + visible_worktree_entries.extend(repository.status()); + // work_directory = Some(worktree::WorkDirectory::clone(repository)); + } + + // TODO use the GitTraversal + // let mut visible_worktree_entries = snapshot + // .entries(false, 0) + // .filter(|entry| !entry.is_external) + // .filter(|entry| entry.git_status.is_some()) + // .cloned() + // .collect::>(); + // snapshot.propagate_git_statuses(&mut visible_worktree_entries); + // project::sort_worktree_entries(&mut visible_worktree_entries); + + if !visible_worktree_entries.is_empty() { + self.visible_entries.push(WorktreeEntries { + worktree_id, + // work_directory: work_directory.unwrap(), + visible_entries: visible_worktree_entries + .into_iter() + .map(|entry| GitPanelEntry { + entry, + hunks: Rc::default(), + }) + .collect(), + paths: Rc::default(), + }); + } + } + self.visible_entries.extend(after_update); + + // TODO re-implement this + // if let Some((worktree_id, entry_id)) = new_selected_entry { + // self.selected_item = self.visible_entries.iter().enumerate().find_map( + // |(worktree_index, worktree_entries)| { + // if worktree_entries.worktree_id == worktree_id { + // worktree_entries + // .visible_entries + // .iter() + // .position(|entry| entry.id == entry_id) + // .map(|entry_index| { + // worktree_index * worktree_entries.visible_entries.len() + // + entry_index + // }) + // } else { + // None + // } + // }, + // ); + // } + + // let project = self.project.downgrade(); + // self.git_diff_editor_updates = cx.spawn(|git_panel, mut cx| async move { + // cx.background_executor() + // .timer(UPDATE_DEBOUNCE) + // .await; + // let Some(project_buffers) = git_panel + // .update(&mut cx, |git_panel, cx| { + // futures::future::join_all(git_panel.visible_entries.iter_mut().flat_map( + // |worktree_entries| { + // worktree_entries + // .visible_entries + // .iter() + // .filter_map(|entry| { + // let git_status = entry.status; + // let entry_hunks = entry.hunks.clone(); + // let (entry_path, unstaged_changes_task) = + // project.update(cx, |project, cx| { + // let entry_path = ProjectPath { + // worktree_id: worktree_entries.worktree_id, + // path: worktree_entries.work_directory.unrelativize(&entry.repo_path)?, + // }; + // let open_task = + // project.open_path(entry_path.clone(), cx); + // let unstaged_changes_task = + // cx.spawn(|project, mut cx| async move { + // let (_, opened_model) = open_task + // .await + // .context("opening buffer")?; + // let buffer = opened_model + // .downcast::() + // .map_err(|_| { + // anyhow::anyhow!( + // "accessing buffer for entry" + // ) + // })?; + // // TODO added files have noop changes and those are not expanded properly in the multi buffer + // let unstaged_changes = project + // .update(&mut cx, |project, cx| { + // project.open_unstaged_changes( + // buffer.clone(), + // cx, + // ) + // })? + // .await + // .context("opening unstaged changes")?; + + // let hunks = cx.update(|cx| { + // entry_hunks + // .get_or_init(|| { + // match git_status { + // GitFileStatus::Added => { + // let buffer_snapshot = buffer.read(cx).snapshot(); + // let entire_buffer_range = + // buffer_snapshot.anchor_after(0) + // ..buffer_snapshot + // .anchor_before( + // buffer_snapshot.len(), + // ); + // let entire_buffer_point_range = + // entire_buffer_range + // .clone() + // .to_point(&buffer_snapshot); + + // vec![DiffHunk { + // row_range: entire_buffer_point_range + // .start + // .row + // ..entire_buffer_point_range + // .end + // .row, + // buffer_range: entire_buffer_range, + // diff_base_byte_range: 0..0, + // }] + // } + // GitFileStatus::Modified => { + // let buffer_snapshot = + // buffer.read(cx).snapshot(); + // unstaged_changes.read(cx) + // .diff_to_buffer + // .hunks_in_row_range( + // 0..BufferRow::MAX, + // &buffer_snapshot, + // ) + // .collect() + // } + // // TODO support these + // GitFileStatus::Conflict | GitFileStatus::Deleted | GitFileStatus::Untracked => Vec::new(), + // } + // }).clone() + // })?; + + // anyhow::Ok((buffer, unstaged_changes, hunks)) + // }); + // Some((entry_path, unstaged_changes_task)) + // }).ok()??; + // Some((entry_path, unstaged_changes_task)) + // }) + // .map(|(entry_path, open_task)| async move { + // (entry_path, open_task.await) + // }) + // .collect::>() + // }, + // )) + // }) + // .ok() + // else { + // return; + // }; + + // let project_buffers = project_buffers.await; + // if project_buffers.is_empty() { + // return; + // } + // let mut change_sets = Vec::with_capacity(project_buffers.len()); + // if let Some(buffer_update_task) = git_panel + // .update(&mut cx, |git_panel, cx| { + // let editor = git_panel.git_diff_editor.clone()?; + // let multi_buffer = editor.read(cx).buffer().clone(); + // let mut buffers_with_ranges = Vec::with_capacity(project_buffers.len()); + // for (buffer_path, open_result) in project_buffers { + // if let Some((buffer, unstaged_changes, diff_hunks)) = open_result + // .with_context(|| format!("opening buffer {buffer_path:?}")) + // .log_err() + // { + // change_sets.push(unstaged_changes); + // buffers_with_ranges.push(( + // buffer, + // diff_hunks + // .into_iter() + // .map(|hunk| hunk.buffer_range) + // .collect(), + // )); + // } + // } + + // Some(multi_buffer.update(cx, |multi_buffer, cx| { + // multi_buffer.clear(cx); + // multi_buffer.push_multiple_excerpts_with_context_lines( + // buffers_with_ranges, + // DEFAULT_MULTIBUFFER_CONTEXT, + // cx, + // ) + // })) + // }) + // .ok().flatten() + // { + // buffer_update_task.await; + // git_panel + // .update(&mut cx, |git_panel, cx| { + // if let Some(diff_editor) = git_panel.git_diff_editor.as_ref() { + // diff_editor.update(cx, |editor, cx| { + // for change_set in change_sets { + // editor.add_change_set(change_set, cx); + // } + // }); + // } + // }) + // .ok(); + // } + // }); + + cx.notify(); + } + + fn on_buffer_event( + &mut self, + _buffer: Model, + event: &language::BufferEvent, + cx: &mut ViewContext, + ) { + if let language::BufferEvent::Reparsed | language::BufferEvent::Edited = event { + let commit_message = self.commit_editor.update(cx, |editor, cx| editor.text(cx)); + + self.git_state.update(cx, |state, _cx| { + state.commit_message = Some(commit_message.into()); + }); + + cx.notify(); + } + } +} + +impl GitPanel { + pub fn panel_button( + &self, + id: impl Into, + label: impl Into, + ) -> Button { + let id = id.into().clone(); + let label = label.into().clone(); + + Button::new(id, label) + .label_size(LabelSize::Small) + .layer(ElevationIndex::ElevatedSurface) + .size(ButtonSize::Compact) + .style(ButtonStyle::Filled) + } + + pub fn render_divider(&self, _cx: &mut ViewContext) -> impl IntoElement { + h_flex() + .items_center() + .h(px(8.)) + .child(Divider::horizontal_dashed().color(DividerColor::Border)) + } + + pub fn render_panel_header(&self, cx: &mut ViewContext) -> impl IntoElement { + let focus_handle = self.focus_handle(cx).clone(); + + let changes_string = match self.entry_count() { + 0 => "No changes".to_string(), + 1 => "1 change".to_string(), + n => format!("{} changes", n), + }; + + h_flex() + .h(px(32.)) + .items_center() + .px_3() + .bg(ElevationIndex::Surface.bg(cx)) + .child( + h_flex() + .gap_2() + .child(Checkbox::new("all-changes", true.into()).disabled(true)) + .child(div().text_buffer(cx).text_ui_sm(cx).child(changes_string)), + ) + .child(div().flex_grow()) + .child( + h_flex() + .gap_2() + .child( + IconButton::new("discard-changes", IconName::Undo) + .tooltip(move |cx| { + let focus_handle = focus_handle.clone(); + + Tooltip::for_action_in( + "Discard all changes", + &RevertAll, + &focus_handle, + cx, + ) + }) + .icon_size(IconSize::Small) + .disabled(true), + ) + .child(if self.all_staged() { + self.panel_button("unstage-all", "Unstage All").on_click( + cx.listener(move |_, _, cx| cx.dispatch_action(Box::new(RevertAll))), + ) + } else { + self.panel_button("stage-all", "Stage All").on_click( + cx.listener(move |_, _, cx| cx.dispatch_action(Box::new(StageAll))), + ) + }), + ) + } + + pub fn render_commit_editor(&self, cx: &ViewContext) -> impl IntoElement { + let editor = self.commit_editor.clone(); + let editor_focus_handle = editor.read(cx).focus_handle(cx).clone(); + + let focus_handle_1 = self.focus_handle(cx).clone(); + let focus_handle_2 = self.focus_handle(cx).clone(); + + let commit_staged_button = self + .panel_button("commit-staged-changes", "Commit") + .tooltip(move |cx| { + let focus_handle = focus_handle_1.clone(); + Tooltip::for_action_in( + "Commit all staged changes", + &CommitStagedChanges, + &focus_handle, + cx, + ) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.commit_staged_changes(&CommitStagedChanges, cx) + })); + + let commit_all_button = self + .panel_button("commit-all-changes", "Commit All") + .tooltip(move |cx| { + let focus_handle = focus_handle_2.clone(); + Tooltip::for_action_in( + "Commit all changes, including unstaged changes", + &CommitAllChanges, + &focus_handle, + cx, + ) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.commit_all_changes(&CommitAllChanges, cx) + })); + + div().w_full().h(px(140.)).px_2().pt_1().pb_2().child( + v_flex() + .id("commit-editor-container") + .relative() + .h_full() + .py_2p5() + .px_3() + .bg(cx.theme().colors().editor_background) + .on_click(cx.listener(move |_, _: &ClickEvent, cx| cx.focus(&editor_focus_handle))) + .child(self.commit_editor.clone()) + .child( + h_flex() + .absolute() + .bottom_2p5() + .right_3() + .child(div().gap_1().flex_grow()) + .child(if self.current_modifiers.alt { + commit_all_button + } else { + commit_staged_button + }), + ), + ) + } + + fn render_empty_state(&self, cx: &ViewContext) -> impl IntoElement { + h_flex() + .h_full() + .flex_1() + .justify_center() + .items_center() + .child( + v_flex() + .gap_3() + .child("No changes to commit") + .text_ui_sm(cx) + .mx_auto() + .text_color(Color::Placeholder.color(cx)), + ) + } + + fn render_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) + || !(self.show_scrollbar || self.scrollbar_state.is_dragging()) + { + return None; + } + Some( + div() + .occlude() + .id("project-panel-vertical-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|this, _, cx| { + if !this.scrollbar_state.is_dragging() + && !this.focus_handle.contains_focused(cx) + { + this.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .h_full() + .absolute() + .right_1() + .top_1() + .bottom_1() + .w(px(12.)) + .cursor_default() + .children(Scrollbar::vertical( + // percentage as f32..end_offset as f32, + self.scrollbar_state.clone(), + )), + ) + } + + fn render_entries(&self, cx: &mut ViewContext) -> impl IntoElement { + let item_count = self + .visible_entries + .iter() + .map(|worktree_entries| worktree_entries.visible_entries.len()) + .sum(); + let selected_entry = self.selected_item; + h_flex() + .size_full() + .overflow_hidden() + .child( + uniform_list(cx.view().clone(), "entries", item_count, { + move |git_panel, range, cx| { + let mut items = Vec::with_capacity(range.end - range.start); + git_panel.for_each_visible_entry(range, cx, |id, details, cx| { + items.push(git_panel.render_entry( + id, + Some(details.index) == selected_entry, + details, + cx, + )); + }); + items + } + }) + .size_full() + .with_sizing_behavior(ListSizingBehavior::Infer) + .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) + // .with_width_from_item(self.max_width_item_index) + .track_scroll(self.scroll_handle.clone()), + ) + .children(self.render_scrollbar(cx)) + } + + fn render_entry( + &self, + ix: usize, + selected: bool, + details: EntryDetails, + cx: &ViewContext, + ) -> impl IntoElement { + let view_mode = self.view_mode.clone(); + let checkbox_id = ElementId::Name(format!("checkbox_{}", ix).into()); + let is_staged = ToggleState::Selected; + let handle = cx.view().downgrade(); + + // TODO: At this point, an entry should really have a status. + // Is this fixed with the new git status stuff? + let status = details.status.unwrap_or(GitFileStatus::Untracked); + + let end_slot = h_flex() + .invisible() + .when(selected, |this| this.visible()) + .when(!selected, |this| { + this.group_hover("git-panel-entry", |this| this.visible()) + }) + .gap_1() + .items_center() + .child( + IconButton::new("more", IconName::EllipsisVertical) + .icon_color(Color::Placeholder) + .icon_size(IconSize::Small), + ); + + let mut entry = h_flex() + .id(("git-panel-entry", ix)) + .group("git-panel-entry") + .h(px(28.)) + .w_full() + .pr(px(4.)) + .items_center() + .gap_2() + .font_buffer(cx) + .text_ui_sm(cx) + .when(!selected, |this| { + this.hover(|this| this.bg(cx.theme().colors().ghost_element_hover)) + }); + + if view_mode == ViewMode::Tree { + entry = entry.pl(px(12. + 12. * details.depth as f32)) + } else { + entry = entry.pl(px(12.)) + } + + if selected { + entry = entry.bg(cx.theme().status().info_background); + } + + entry = entry + .child(Checkbox::new(checkbox_id, is_staged)) + .child(git_status_icon(status)) + .child( + h_flex() + .gap_1p5() + .when(status == GitFileStatus::Deleted, |this| { + this.text_color(cx.theme().colors().text_disabled) + .line_through() + }) + .child(details.display_name.clone()), + ) + .child(div().flex_1()) + .child(end_slot) + // TODO: Only fire this if the entry is not currently revealed, otherwise the ui flashes + .on_click(move |e, cx| { + handle + .update(cx, |git_panel, cx| { + git_panel.selected_item = Some(details.index); + let change_focus = e.down.click_count > 1; + git_panel.reveal_entry_in_git_editor( + details.hunks.clone(), + change_focus, + None, + cx, + ); + }) + .ok(); + }); + + entry + } + + fn reveal_entry_in_git_editor( + &mut self, + _hunks: Rc>>, + _change_focus: bool, + _debounce: Option, + _cx: &mut ViewContext, + ) { + // let workspace = self.workspace.clone(); + // let Some(diff_editor) = self.git_diff_editor.clone() else { + // return; + // }; + // self.reveal_in_editor = cx.spawn(|_, mut cx| async move { + // if let Some(debounce) = debounce { + // cx.background_executor().timer(debounce).await; + // } + + // let Some(editor) = workspace + // .update(&mut cx, |workspace, cx| { + // let git_diff_editor = workspace + // .items_of_type::(cx) + // .find(|editor| &diff_editor == editor); + // match git_diff_editor { + // Some(existing_editor) => { + // workspace.activate_item(&existing_editor, true, change_focus, cx); + // existing_editor + // } + // None => { + // workspace.active_pane().update(cx, |pane, cx| { + // pane.add_item( + // ` diff_editor.boxed_clone(), + // true, + // change_focus, + // None, + // cx, + // ) + // }); + // diff_editor.clone() + // } + // } + // }) + // .ok() + // else { + // return; + // }; + + // if let Some(first_hunk) = hunks.get().and_then(|hunks| hunks.first()) { + // let hunk_buffer_range = &first_hunk.buffer_range; + // if let Some(buffer_id) = hunk_buffer_range + // .start + // .buffer_id + // .or_else(|| first_hunk.buffer_range.end.buffer_id) + // { + // editor + // .update(&mut cx, |editor, cx| { + // let multi_buffer = editor.buffer().read(cx); + // let buffer = multi_buffer.buffer(buffer_id)?; + // let buffer_snapshot = buffer.read(cx).snapshot(); + // let (excerpt_id, _) = multi_buffer + // .excerpts_for_buffer(&buffer, cx) + // .into_iter() + // .find(|(_, excerpt)| { + // hunk_buffer_range + // .start + // .cmp(&excerpt.context.start, &buffer_snapshot) + // .is_ge() + // && hunk_buffer_range + // .end + // .cmp(&excerpt.context.end, &buffer_snapshot) + // .is_le() + // })?; + // let multi_buffer_hunk_start = multi_buffer + // .snapshot(cx) + // .anchor_in_excerpt(excerpt_id, hunk_buffer_range.start)?; + // editor.change_selections( + // Some(Autoscroll::Strategy(AutoscrollStrategy::Center)), + // cx, + // |s| { + // s.select_ranges(Some( + // multi_buffer_hunk_start..multi_buffer_hunk_start, + // )) + // }, + // ); + // cx.notify(); + // Some(()) + // }) + // .ok() + // .flatten(); + // } + // } + // }); + } +} + +impl Render for GitPanel { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let project = self.project.read(cx); + + v_flex() + .id("git_panel") + .key_context(self.dispatch_context()) + .track_focus(&self.focus_handle) + .on_modifiers_changed(cx.listener(Self::handle_modifiers_changed)) + .when(!project.is_read_only(cx), |this| { + this.on_action(cx.listener(|this, &StageAll, cx| this.stage_all(&StageAll, cx))) + .on_action( + cx.listener(|this, &UnstageAll, cx| this.unstage_all(&UnstageAll, cx)), + ) + .on_action(cx.listener(|this, &RevertAll, cx| this.discard_all(&RevertAll, cx))) + .on_action(cx.listener(|this, &CommitStagedChanges, cx| { + this.commit_staged_changes(&CommitStagedChanges, cx) + })) + .on_action(cx.listener(|this, &CommitAllChanges, cx| { + this.commit_all_changes(&CommitAllChanges, cx) + })) + }) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_prev)) + .on_hover(cx.listener(|this, hovered, cx| { + if *hovered { + this.show_scrollbar = true; + this.hide_scrollbar_task.take(); + cx.notify(); + } else if !this.focus_handle.contains_focused(cx) { + this.hide_scrollbar(cx); + } + })) + .size_full() + .overflow_hidden() + .font_buffer(cx) + .py_1() + .bg(ElevationIndex::Surface.bg(cx)) + .child(self.render_panel_header(cx)) + .child(self.render_divider(cx)) + .child(if !self.no_entries() { + self.render_entries(cx).into_any_element() + } else { + self.render_empty_state(cx).into_any_element() + }) + .child(self.render_divider(cx)) + .child(self.render_commit_editor(cx)) + } +} + +impl FocusableView for GitPanel { + fn focus_handle(&self, _: &AppContext) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} + +impl EventEmitter for GitPanel {} + +impl EventEmitter for GitPanel {} + +impl Panel for GitPanel { + fn persistent_name() -> &'static str { + "GitPanel" + } + + fn position(&self, cx: &WindowContext) -> DockPosition { + GitPanelSettings::get_global(cx).dock + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + matches!(position, DockPosition::Left | DockPosition::Right) + } + + fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext) { + settings::update_settings_file::( + self.fs.clone(), + cx, + move |settings, _| settings.dock = Some(position), + ); + } + + fn size(&self, cx: &WindowContext) -> Pixels { + self.width + .unwrap_or_else(|| GitPanelSettings::get_global(cx).default_width) + } + + fn set_size(&mut self, size: Option, cx: &mut ViewContext) { + self.width = size; + self.serialize(cx); + cx.notify(); + } + + fn icon(&self, cx: &WindowContext) -> Option { + Some(ui::IconName::GitBranch).filter(|_| GitPanelSettings::get_global(cx).button) + } + + fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { + Some("Git Panel") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } + + fn activation_priority(&self) -> u32 { + 2 + } +} + +// fn diff_display_editor(cx: &mut WindowContext) -> View { +// cx.new_view(|cx| { +// let multi_buffer = cx.new_model(|_| { +// MultiBuffer::new(language::Capability::ReadWrite).with_title("Project diff".to_string()) +// }); +// let mut editor = Editor::for_multibuffer(multi_buffer, None, true, cx); +// editor.set_expand_all_diff_hunks(); +// editor +// }) +// } diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs new file mode 100644 index 0000000000..89a47d884c --- /dev/null +++ b/crates/git_ui/src/git_ui.rs @@ -0,0 +1,89 @@ +use ::settings::Settings; +use git::repository::GitFileStatus; +use gpui::{actions, AppContext, Context, Global, Hsla, Model}; +use settings::GitPanelSettings; +use ui::{Color, Icon, IconName, IntoElement, SharedString}; + +pub mod git_panel; +mod settings; + +actions!( + git_ui, + [ + StageAll, + UnstageAll, + RevertAll, + CommitStagedChanges, + CommitAllChanges, + ClearMessage + ] +); + +pub fn init(cx: &mut AppContext) { + GitPanelSettings::register(cx); + let git_state = cx.new_model(|_cx| GitState::new()); + cx.set_global(GlobalGitState(git_state)); +} + +struct GlobalGitState(Model); + +impl Global for GlobalGitState {} + +pub struct GitState { + commit_message: Option, +} + +impl GitState { + pub fn new() -> Self { + GitState { + commit_message: None, + } + } + + pub fn set_message(&mut self, message: Option) { + self.commit_message = message; + } + + pub fn clear_message(&mut self) { + self.commit_message = None; + } + + pub fn get_global(cx: &mut AppContext) -> Model { + cx.global::().0.clone() + } +} + +const ADDED_COLOR: Hsla = Hsla { + h: 142. / 360., + s: 0.68, + l: 0.45, + a: 1.0, +}; +const MODIFIED_COLOR: Hsla = Hsla { + h: 48. / 360., + s: 0.76, + l: 0.47, + a: 1.0, +}; +const REMOVED_COLOR: Hsla = Hsla { + h: 355. / 360., + s: 0.65, + l: 0.65, + a: 1.0, +}; + +// TODO: Add updated status colors to theme +pub fn git_status_icon(status: GitFileStatus) -> impl IntoElement { + match status { + GitFileStatus::Added | GitFileStatus::Untracked => { + Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR)) + } + GitFileStatus::Modified => { + Icon::new(IconName::SquareDot).color(Color::Custom(MODIFIED_COLOR)) + } + GitFileStatus::Conflict => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)), + GitFileStatus::Deleted => { + Icon::new(IconName::SquareMinus).color(Color::Custom(REMOVED_COLOR)) + } + } +} diff --git a/crates/git_ui/src/settings.rs b/crates/git_ui/src/settings.rs new file mode 100644 index 0000000000..fc2f4d51af --- /dev/null +++ b/crates/git_ui/src/settings.rs @@ -0,0 +1,41 @@ +use gpui::Pixels; +use schemars::JsonSchema; +use serde_derive::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; +use workspace::dock::DockPosition; + +#[derive(Deserialize, Debug)] +pub struct GitPanelSettings { + pub button: bool, + pub dock: DockPosition, + pub default_width: Pixels, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct PanelSettingsContent { + /// Whether to show the panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Where to dock the panel. + /// + /// Default: left + pub dock: Option, + /// Default width of the panel in pixels. + /// + /// Default: 360 + pub default_width: Option, +} + +impl Settings for GitPanelSettings { + const KEY: Option<&'static str> = Some("git_panel"); + + type FileContent = PanelSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 3931cac284..65ef247b91 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -1,5 +1,5 @@ use editor::{Editor, ToPoint}; -use gpui::{AppContext, Subscription, Task, View, WeakView}; +use gpui::{AppContext, FocusHandle, FocusableView, Subscription, Task, View, WeakView}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -22,6 +22,7 @@ pub(crate) struct SelectionStats { pub struct CursorPosition { position: Option, selected_count: SelectionStats, + context: Option, workspace: WeakView, update_position: Task<()>, _observe_active_editor: Option, @@ -31,6 +32,7 @@ impl CursorPosition { pub fn new(workspace: &Workspace) -> Self { Self { position: None, + context: None, selected_count: Default::default(), workspace: workspace.weak_handle(), update_position: Task::ready(()), @@ -46,40 +48,61 @@ impl CursorPosition { ) { let editor = editor.downgrade(); self.update_position = cx.spawn(|cursor_position, mut cx| async move { - if let Some(debounce) = debounce { - cx.background_executor().timer(debounce).await; + let is_singleton = editor + .update(&mut cx, |editor, cx| { + editor.buffer().read(cx).is_singleton() + }) + .ok() + .unwrap_or(true); + + if !is_singleton { + if let Some(debounce) = debounce { + cx.background_executor().timer(debounce).await; + } } editor .update(&mut cx, |editor, cx| { - let buffer = editor.buffer().read(cx).snapshot(cx); cursor_position.update(cx, |cursor_position, cx| { cursor_position.selected_count = SelectionStats::default(); cursor_position.selected_count.selections = editor.selections.count(); - let mut last_selection = None::>; - for selection in editor.selections.all::(cx) { - cursor_position.selected_count.characters += buffer - .text_for_range(selection.start..selection.end) - .map(|t| t.chars().count()) - .sum::(); - if last_selection - .as_ref() - .map_or(true, |last_selection| selection.id > last_selection.id) - { - last_selection = Some(selection); + match editor.mode() { + editor::EditorMode::AutoHeight { .. } + | editor::EditorMode::SingleLine { .. } => { + cursor_position.position = None; + cursor_position.context = None; } - } - for selection in editor.selections.all::(cx) { - if selection.end != selection.start { - cursor_position.selected_count.lines += - (selection.end.row - selection.start.row) as usize; - if selection.end.column != 0 { - cursor_position.selected_count.lines += 1; + editor::EditorMode::Full => { + let mut last_selection = None::>; + let buffer = editor.buffer().read(cx).snapshot(cx); + if buffer.excerpts().count() > 0 { + for selection in editor.selections.all::(cx) { + cursor_position.selected_count.characters += buffer + .text_for_range(selection.start..selection.end) + .map(|t| t.chars().count()) + .sum::(); + if last_selection.as_ref().map_or(true, |last_selection| { + selection.id > last_selection.id + }) { + last_selection = Some(selection); + } + } + for selection in editor.selections.all::(cx) { + if selection.end != selection.start { + cursor_position.selected_count.lines += + (selection.end.row - selection.start.row) as usize; + if selection.end.column != 0 { + cursor_position.selected_count.lines += 1; + } + } + } } + cursor_position.position = + last_selection.map(|s| s.head().to_point(&buffer)); + cursor_position.context = Some(editor.focus_handle(cx)); } } - cursor_position.position = - last_selection.map(|s| s.head().to_point(&buffer)); + cx.notify(); }) }) @@ -148,6 +171,8 @@ impl Render for CursorPosition { ); self.write_position(&mut text, cx); + let context = self.context.clone(); + el.child( Button::new("go-to-line-column", text) .label_size(LabelSize::Small) @@ -164,12 +189,18 @@ impl Render for CursorPosition { }); } })) - .tooltip(|cx| { - Tooltip::for_action( + .tooltip(move |cx| match context.as_ref() { + Some(context) => Tooltip::for_action_in( + "Go to Line/Column", + &editor::actions::ToggleGoToLine, + context, + cx, + ), + None => Tooltip::for_action( "Go to Line/Column", &editor::actions::ToggleGoToLine, cx, - ) + ), }), ) }) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index c848d28eaa..df673ef823 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -9,7 +9,7 @@ use gpui::{ use settings::Settings; use text::{Bias, Point}; use theme::ActiveTheme; -use ui::{h_flex, prelude::*, v_flex, Label}; +use ui::prelude::*; use util::paths::FILE_ROW_COLUMN_DELIMITER; use workspace::ModalView; @@ -73,7 +73,7 @@ impl GoToLine { let last_line = editor.buffer().read(cx).snapshot(cx).max_point().row; let scroll_position = active_editor.update(cx, |editor, cx| editor.scroll_position(cx)); - let current_text = format!("line {} of {} (column {})", line, last_line + 1, column); + let current_text = format!("{} of {} (column {})", line, last_line + 1, column); Self { line_editor, @@ -186,36 +186,27 @@ impl Render for GoToLine { } } - div() + v_flex() + .w(rems(24.)) .elevation_2(cx) .key_context("GoToLine") .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::confirm)) - .w_96() .child( - v_flex() - .px_1() - .pt_0p5() - .gap_px() - .child( - v_flex() - .py_0p5() - .px_1() - .child(div().px_1().py_0p5().child(self.line_editor.clone())), - ) - .child( - div() - .h_px() - .w_full() - .bg(cx.theme().colors().element_background), - ) - .child( - h_flex() - .justify_between() - .px_2() - .py_1() - .child(Label::new(help_text).color(Color::Muted)), - ), + div() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .px_2() + .py_1() + .child(self.line_editor.clone()), + ) + .child( + h_flex() + .px_2() + .py_1() + .gap_1() + .child(Label::new("Current Line:").color(Color::Muted)) + .child(Label::new(help_text).color(Color::Muted)), ) } } diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index bb32cb48c0..b40c5714b8 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -1,6 +1,6 @@ mod supported_countries; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, bail, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; @@ -15,6 +15,20 @@ pub async fn stream_generate_content( api_key: &str, mut request: GenerateContentRequest, ) -> Result>> { + if request.contents.is_empty() { + bail!("Request must contain at least one content item"); + } + + if let Some(user_content) = request + .contents + .iter() + .find(|content| content.role == Role::User) + { + if user_content.parts.is_empty() { + bail!("User content must contain at least one part"); + } + } + let uri = format!( "{api_url}/v1beta/models/{model}:streamGenerateContent?alt=sse&key={api_key}", model = request.model @@ -140,7 +154,7 @@ pub struct Content { pub role: Role, } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, PartialEq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub enum Role { User, @@ -291,6 +305,8 @@ pub enum Model { Gemini15Pro, #[serde(rename = "gemini-1.5-flash")] Gemini15Flash, + #[serde(rename = "gemini-2.0-flash-exp")] + Gemini20Flash, #[serde(rename = "custom")] Custom { name: String, @@ -305,6 +321,7 @@ impl Model { match self { Model::Gemini15Pro => "gemini-1.5-pro", Model::Gemini15Flash => "gemini-1.5-flash", + Model::Gemini20Flash => "gemini-2.0-flash-exp", Model::Custom { name, .. } => name, } } @@ -313,6 +330,7 @@ impl Model { match self { Model::Gemini15Pro => "Gemini 1.5 Pro", Model::Gemini15Flash => "Gemini 1.5 Flash", + Model::Gemini20Flash => "Gemini 2.0 Flash", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -323,6 +341,7 @@ impl Model { match self { Model::Gemini15Pro => 2_000_000, Model::Gemini15Flash => 1_000_000, + Model::Gemini20Flash => 1_000_000, Model::Custom { max_tokens, .. } => *max_tokens, } } diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 347e5502ca..952fed02fe 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -22,7 +22,7 @@ test-support = [ "x11", ] runtime_shaders = [] -macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck"] +macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck", "objc2", "objc2-metal"] wayland = [ "blade-graphics", "blade-macros", @@ -119,7 +119,7 @@ http_client = { workspace = true, features = ["test-support"] } unicode-segmentation.workspace = true [build-dependencies] -embed-resource = "2.4" +embed-resource = "3.0" [target.'cfg(target_os = "macos")'.build-dependencies] bindgen = "0.70.0" @@ -132,12 +132,15 @@ core-foundation.workspace = true core-foundation-sys = "0.8" core-graphics = "0.23" core-text = "20.1" -font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "40391b7", optional = true} +font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "40391b7", optional = true } foreign-types = "0.5" log.workspace = true media.workspace = true -metal = "0.29" objc = "0.2" +objc2 = { version = "0.5", optional = true } +objc2-metal = { version = "0.2", optional = true } +#TODO: replace with "objc2" +metal.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "macos"))'.dependencies] pathfinder_geometry = "0.5" @@ -145,7 +148,7 @@ pathfinder_geometry = "0.5" [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] # Always used flume = "0.11" -oo7 = "0.3.0" +oo7 = { git = "https://github.com/zed-industries/oo7", branch = "avoid-crypto-panic" } # Used in both windowing options ashpd = { workspace = true, optional = true } @@ -181,7 +184,7 @@ wayland-protocols-plasma = { version = "0.2.0", features = [ # X11 as-raw-xcb-connection = { version = "1", optional = true } -x11rb = { version = "0.13.0", features = [ +x11rb = { version = "0.13.1", features = [ "allow-unsafe-code", "xkb", "randr", @@ -198,7 +201,7 @@ xim = { git = "https://github.com/XDeme1/xim-rs", rev = "d50d461764c2213655cd9cf "x11rb-xcb", "x11rb-client", ], optional = true } -x11-clipboard = { version = "0.9.2", optional = true } +x11-clipboard = { version = "0.9.3", optional = true } [target.'cfg(windows)'.dependencies] blade-util.workspace = true diff --git a/crates/gpui/README.md b/crates/gpui/README.md index 3ca0dcf7ca..6c0a5b607c 100644 --- a/crates/gpui/README.md +++ b/crates/gpui/README.md @@ -61,4 +61,4 @@ In addition to the systems above, GPUI provides a range of smaller services that - The `[gpui::test]` macro provides a convenient way to write tests for your GPUI applications. Tests also have their own kind of context, a `TestAppContext` which provides ways of simulating common platform input. See `app::test_context` and `test` modules for more details. -Currently, the best way to learn about these APIs is to read the Zed source code, ask us about it at a fireside hack, or drop a question in the [Zed Discord](https://discord.gg/zed-community). We're working on improving the documentation, creating more examples, and will be publishing more guides to GPUI on our [blog](https://zed.dev/blog). +Currently, the best way to learn about these APIs is to read the Zed source code, ask us about it at a fireside hack, or drop a question in the [Zed Discord](https://zed.dev/community-links). We're working on improving the documentation, creating more examples, and will be publishing more guides to GPUI on our [blog](https://zed.dev/blog). diff --git a/crates/gpui/build.rs b/crates/gpui/build.rs index 5a015106c7..045372b73c 100644 --- a/crates/gpui/build.rs +++ b/crates/gpui/build.rs @@ -18,7 +18,9 @@ fn main() { let rc_file = std::path::Path::new("resources/windows/gpui.rc"); println!("cargo:rerun-if-changed={}", manifest.display()); println!("cargo:rerun-if-changed={}", rc_file.display()); - embed_resource::compile(rc_file, embed_resource::NONE); + embed_resource::compile(rc_file, embed_resource::NONE) + .manifest_required() + .unwrap(); } _ => (), }; @@ -48,6 +50,7 @@ mod macos { fn generate_dispatch_bindings() { println!("cargo:rustc-link-lib=framework=System"); + println!("cargo:rustc-link-lib=framework=ScreenCaptureKit"); println!("cargo:rerun-if-changed=src/platform/mac/dispatch.h"); let bindings = bindgen::Builder::default() diff --git a/crates/gpui/examples/animation.rs b/crates/gpui/examples/animation.rs index 80e4a4456a..c6f810ce17 100644 --- a/crates/gpui/examples/animation.rs +++ b/crates/gpui/examples/animation.rs @@ -1,6 +1,11 @@ use std::time::Duration; -use gpui::*; +use anyhow::Result; +use gpui::{ + black, bounce, div, ease_in_out, percentage, prelude::*, px, rgb, size, svg, Animation, + AnimationExt as _, App, AppContext, AssetSource, Bounds, SharedString, Transformation, + ViewContext, WindowBounds, WindowOptions, +}; struct Assets {} @@ -37,7 +42,7 @@ impl Render for AnimationExample { div() .flex() .bg(rgb(0x2e7d32)) - .size(Length::Definite(Pixels(300.0).into())) + .size(px(300.0)) .justify_center() .items_center() .shadow_lg() diff --git a/crates/gpui/examples/gradient.rs b/crates/gpui/examples/gradient.rs new file mode 100644 index 0000000000..dcdfaa5257 --- /dev/null +++ b/crates/gpui/examples/gradient.rs @@ -0,0 +1,254 @@ +use gpui::{ + canvas, div, linear_color_stop, linear_gradient, point, prelude::*, px, size, App, AppContext, + Bounds, ColorSpace, Half, Render, ViewContext, WindowOptions, +}; + +struct GradientViewer { + color_space: ColorSpace, +} + +impl GradientViewer { + fn new() -> Self { + Self { + color_space: ColorSpace::default(), + } + } +} + +impl Render for GradientViewer { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let color_space = self.color_space; + + div() + .font_family(".SystemUIFont") + .bg(gpui::white()) + .size_full() + .p_4() + .flex() + .flex_col() + .gap_3() + .child( + div() + .flex() + .gap_2() + .justify_between() + .items_center() + .child("Gradient Examples") + .child( + div().flex().gap_2().items_center().child( + div() + .id("method") + .flex() + .px_3() + .py_1() + .text_sm() + .bg(gpui::black()) + .text_color(gpui::white()) + .child(format!("{}", color_space)) + .active(|this| this.opacity(0.8)) + .on_click(cx.listener(move |this, _, cx| { + this.color_space = match this.color_space { + ColorSpace::Oklab => ColorSpace::Srgb, + ColorSpace::Srgb => ColorSpace::Oklab, + }; + cx.notify(); + })), + ), + ), + ) + .child( + div() + .flex() + .flex_1() + .gap_3() + .child( + div() + .size_full() + .rounded_xl() + .flex() + .items_center() + .justify_center() + .bg(gpui::red()) + .text_color(gpui::white()) + .child("Solid Color"), + ) + .child( + div() + .size_full() + .rounded_xl() + .flex() + .items_center() + .justify_center() + .bg(gpui::blue()) + .text_color(gpui::white()) + .child("Solid Color"), + ), + ) + .child( + div() + .flex() + .flex_1() + .gap_3() + .h_24() + .text_color(gpui::white()) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 45., + linear_color_stop(gpui::red(), 0.), + linear_color_stop(gpui::blue(), 1.), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 135., + linear_color_stop(gpui::red(), 0.), + linear_color_stop(gpui::green(), 1.), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 225., + linear_color_stop(gpui::green(), 0.), + linear_color_stop(gpui::blue(), 1.), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 315., + linear_color_stop(gpui::green(), 0.), + linear_color_stop(gpui::yellow(), 1.), + ) + .color_space(color_space)), + ), + ) + .child( + div() + .flex() + .flex_1() + .gap_3() + .h_24() + .text_color(gpui::white()) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 0., + linear_color_stop(gpui::red(), 0.), + linear_color_stop(gpui::white(), 1.), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 90., + linear_color_stop(gpui::blue(), 0.), + linear_color_stop(gpui::white(), 1.), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 180., + linear_color_stop(gpui::green(), 0.), + linear_color_stop(gpui::white(), 1.), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 360., + linear_color_stop(gpui::yellow(), 0.), + linear_color_stop(gpui::white(), 1.), + ) + .color_space(color_space)), + ), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 0., + linear_color_stop(gpui::green(), 0.05), + linear_color_stop(gpui::yellow(), 0.95), + ) + .color_space(color_space)), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 90., + linear_color_stop(gpui::blue(), 0.05), + linear_color_stop(gpui::red(), 0.95), + ) + .color_space(color_space)), + ) + .child( + div() + .flex() + .flex_1() + .gap_3() + .child( + div().flex().flex_1().gap_3().child( + div().flex_1().rounded_xl().bg(linear_gradient( + 90., + linear_color_stop(gpui::blue(), 0.5), + linear_color_stop(gpui::red(), 0.5), + ) + .color_space(color_space)), + ), + ) + .child( + div().flex_1().rounded_xl().bg(linear_gradient( + 180., + linear_color_stop(gpui::green(), 0.), + linear_color_stop(gpui::blue(), 0.5), + ) + .color_space(color_space)), + ), + ) + .child(div().h_24().child(canvas( + move |_, _| {}, + move |bounds, _, cx| { + let size = size(bounds.size.width * 0.8, px(80.)); + let square_bounds = Bounds { + origin: point( + bounds.size.width.half() - size.width.half(), + bounds.origin.y, + ), + size, + }; + let height = square_bounds.size.height; + let horizontal_offset = height; + let vertical_offset = px(30.); + let mut path = gpui::Path::new(square_bounds.bottom_left()); + path.line_to(square_bounds.origin + point(horizontal_offset, vertical_offset)); + path.line_to( + square_bounds.top_right() + point(-horizontal_offset, vertical_offset), + ); + path.line_to(square_bounds.bottom_right()); + path.line_to(square_bounds.bottom_left()); + cx.paint_path( + path, + linear_gradient( + 180., + linear_color_stop(gpui::red(), 0.), + linear_color_stop(gpui::blue(), 1.), + ) + .color_space(color_space), + ); + }, + ))) + } +} + +fn main() { + App::new().run(|cx: &mut AppContext| { + cx.open_window( + WindowOptions { + focus: true, + ..Default::default() + }, + |cx| cx.new_view(|_| GradientViewer::new()), + ) + .unwrap(); + cx.activate(true); + }); +} diff --git a/crates/gpui/examples/hello_world.rs b/crates/gpui/examples/hello_world.rs index 961212fa62..9045c7be04 100644 --- a/crates/gpui/examples/hello_world.rs +++ b/crates/gpui/examples/hello_world.rs @@ -1,4 +1,7 @@ -use gpui::*; +use gpui::{ + div, prelude::*, px, rgb, size, App, AppContext, Bounds, SharedString, ViewContext, + WindowBounds, WindowOptions, +}; struct HelloWorld { text: SharedString, @@ -8,8 +11,10 @@ impl Render for HelloWorld { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { div() .flex() - .bg(rgb(0x2e7d32)) - .size(Length::Definite(Pixels(300.0).into())) + .flex_col() + .gap_3() + .bg(rgb(0x505050)) + .size(px(500.0)) .justify_center() .items_center() .shadow_lg() @@ -18,12 +23,23 @@ impl Render for HelloWorld { .text_xl() .text_color(rgb(0xffffff)) .child(format!("Hello, {}!", &self.text)) + .child( + div() + .flex() + .gap_2() + .child(div().size_8().bg(gpui::red())) + .child(div().size_8().bg(gpui::green())) + .child(div().size_8().bg(gpui::blue())) + .child(div().size_8().bg(gpui::yellow())) + .child(div().size_8().bg(gpui::black())) + .child(div().size_8().bg(gpui::white())), + ) } } fn main() { App::new().run(|cx: &mut AppContext| { - let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx); + let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); cx.open_window( WindowOptions { window_bounds: Some(WindowBounds::Windowed(bounds)), diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index 19a4e9313f..4aeea3affc 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -1,9 +1,14 @@ +use std::fs; use std::path::PathBuf; use std::str::FromStr; use std::sync::Arc; -use gpui::*; -use std::fs; +use anyhow::Result; +use gpui::{ + actions, div, img, prelude::*, px, rgb, size, App, AppContext, AssetSource, Bounds, + ImageSource, KeyBinding, Menu, MenuItem, Point, SharedString, SharedUri, TitlebarOptions, + ViewContext, WindowBounds, WindowContext, WindowOptions, +}; struct Assets { base: PathBuf, @@ -55,7 +60,7 @@ impl RenderOnce for ImageContainer { .size_full() .gap_4() .child(self.text) - .child(img(self.src).w(px(256.0)).h(px(256.0))), + .child(img(self.src).size(px(256.0))), ) } } @@ -75,7 +80,7 @@ impl Render for ImageShowcase { .justify_center() .items_center() .gap_8() - .bg(rgb(0xFFFFFF)) + .bg(rgb(0xffffff)) .child( div() .flex() diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index d52697c43f..7544800e5e 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -1,6 +1,13 @@ use std::ops::Range; -use gpui::*; +use gpui::{ + actions, black, div, fill, hsla, opaque_grey, point, prelude::*, px, relative, rgb, rgba, size, + white, yellow, App, AppContext, Bounds, ClipboardItem, CursorStyle, ElementId, + ElementInputHandler, FocusHandle, FocusableView, GlobalElementId, KeyBinding, Keystroke, + LayoutId, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad, Pixels, Point, + ShapedLine, SharedString, Style, TextRun, UTF16Selection, UnderlineStyle, View, ViewContext, + ViewInputHandler, WindowBounds, WindowContext, WindowOptions, +}; use unicode_segmentation::*; actions!( @@ -15,7 +22,10 @@ actions!( SelectAll, Home, End, - ShowCharacterPalette + ShowCharacterPalette, + Paste, + Cut, + Copy, ] ); @@ -107,6 +117,28 @@ impl TextInput { cx.show_character_palette(); } + fn paste(&mut self, _: &Paste, cx: &mut ViewContext) { + if let Some(text) = cx.read_from_clipboard().and_then(|item| item.text()) { + self.replace_text_in_range(None, &text.replace("\n", " "), cx); + } + } + + fn copy(&mut self, _: &Copy, cx: &mut ViewContext) { + if !self.selected_range.is_empty() { + cx.write_to_clipboard(ClipboardItem::new_string( + (&self.content[self.selected_range.clone()]).to_string(), + )); + } + } + fn cut(&mut self, _: &Copy, cx: &mut ViewContext) { + if !self.selected_range.is_empty() { + cx.write_to_clipboard(ClipboardItem::new_string( + (&self.content[self.selected_range.clone()]).to_string(), + )); + self.replace_text_in_range(None, "", cx) + } + } + fn move_to(&mut self, offset: usize, cx: &mut ViewContext) { self.selected_range = offset..offset; cx.notify() @@ -219,9 +251,11 @@ impl ViewInputHandler for TextInput { fn text_for_range( &mut self, range_utf16: Range, + actual_range: &mut Option>, _cx: &mut ViewContext, ) -> Option { let range = self.range_from_utf16(&range_utf16); + actual_range.replace(self.range_to_utf16(&range)); Some(self.content[range].to_string()) } @@ -436,7 +470,7 @@ impl Element for TextElement { bounds.bottom(), ), ), - rgba(0x3311FF30), + rgba(0x3311ff30), )), None, ) @@ -497,6 +531,9 @@ impl Render for TextInput { .on_action(cx.listener(Self::home)) .on_action(cx.listener(Self::end)) .on_action(cx.listener(Self::show_character_palette)) + .on_action(cx.listener(Self::paste)) + .on_action(cx.listener(Self::cut)) + .on_action(cx.listener(Self::copy)) .on_mouse_down(MouseButton::Left, cx.listener(Self::on_mouse_down)) .on_mouse_up(MouseButton::Left, cx.listener(Self::on_mouse_up)) .on_mouse_up_out(MouseButton::Left, cx.listener(Self::on_mouse_up)) @@ -581,8 +618,8 @@ impl Render for InputExample { format!( "{:} {}", ks.unparse(), - if let Some(ime_key) = ks.ime_key.as_ref() { - format!("-> {:?}", ime_key) + if let Some(key_char) = ks.key_char.as_ref() { + format!("-> {:?}", key_char) } else { "".to_owned() } @@ -602,6 +639,9 @@ fn main() { KeyBinding::new("shift-left", SelectLeft, None), KeyBinding::new("shift-right", SelectRight, None), KeyBinding::new("cmd-a", SelectAll, None), + KeyBinding::new("cmd-v", Paste, None), + KeyBinding::new("cmd-c", Copy, None), + KeyBinding::new("cmd-x", Cut, None), KeyBinding::new("home", Home, None), KeyBinding::new("end", End, None), KeyBinding::new("ctrl-cmd-space", ShowCharacterPalette, None), diff --git a/crates/gpui/examples/opacity.rs b/crates/gpui/examples/opacity.rs index 6ab28f7f8c..1ebdee544c 100644 --- a/crates/gpui/examples/opacity.rs +++ b/crates/gpui/examples/opacity.rs @@ -1,6 +1,10 @@ use std::{fs, path::PathBuf, time::Duration}; -use gpui::*; +use anyhow::Result; +use gpui::{ + div, hsla, img, point, prelude::*, px, rgb, size, svg, App, AppContext, AssetSource, Bounds, + BoxShadow, ClickEvent, SharedString, Task, Timer, ViewContext, WindowBounds, WindowOptions, +}; struct Assets { base: PathBuf, @@ -76,7 +80,7 @@ impl Render for HelloWorld { .flex() .flex_row() .size_full() - .bg(rgb(0xE0E0E0)) + .bg(rgb(0xe0e0e0)) .text_xl() .child( div() diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index 6e5fe25dfd..6b9f4952c6 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -49,17 +49,17 @@ impl PaintingViewer { let height = square_bounds.size.height; let horizontal_offset = height; let vertical_offset = px(30.); - let mut path = Path::new(square_bounds.lower_left()); + let mut path = Path::new(square_bounds.bottom_left()); path.curve_to( square_bounds.origin + point(horizontal_offset, vertical_offset), square_bounds.origin + point(px(0.0), vertical_offset), ); - path.line_to(square_bounds.upper_right() + point(-horizontal_offset, vertical_offset)); + path.line_to(square_bounds.top_right() + point(-horizontal_offset, vertical_offset)); path.curve_to( - square_bounds.lower_right(), - square_bounds.upper_right() + point(px(0.0), vertical_offset), + square_bounds.bottom_right(), + square_bounds.top_right() + point(px(0.0), vertical_offset), ); - path.line_to(square_bounds.lower_left()); + path.line_to(square_bounds.bottom_left()); lines.push(path); Self { diff --git a/crates/gpui/examples/set_menus.rs b/crates/gpui/examples/set_menus.rs index b06f279ce5..e37c207e72 100644 --- a/crates/gpui/examples/set_menus.rs +++ b/crates/gpui/examples/set_menus.rs @@ -1,4 +1,6 @@ -use gpui::*; +use gpui::{ + actions, div, prelude::*, rgb, App, AppContext, Menu, MenuItem, ViewContext, WindowOptions, +}; struct SetMenus; diff --git a/crates/gpui/examples/shadow.rs b/crates/gpui/examples/shadow.rs index cdf3ba42e9..5519b4b839 100644 --- a/crates/gpui/examples/shadow.rs +++ b/crates/gpui/examples/shadow.rs @@ -1,22 +1,574 @@ -use gpui::*; +use gpui::{ + div, hsla, point, prelude::*, px, relative, rgb, size, App, AppContext, Bounds, BoxShadow, Div, + SharedString, ViewContext, WindowBounds, WindowOptions, +}; + +use smallvec::smallvec; struct Shadow {} +impl Shadow { + fn base() -> Div { + div() + .size_16() + .bg(rgb(0xffffff)) + .rounded_full() + .border_1() + .border_color(hsla(0.0, 0.0, 0.0, 0.1)) + } + + fn square() -> Div { + div() + .size_16() + .bg(rgb(0xffffff)) + .border_1() + .border_color(hsla(0.0, 0.0, 0.0, 0.1)) + } + + fn rounded_small() -> Div { + div() + .size_16() + .bg(rgb(0xffffff)) + .rounded(px(4.)) + .border_1() + .border_color(hsla(0.0, 0.0, 0.0, 0.1)) + } + + fn rounded_medium() -> Div { + div() + .size_16() + .bg(rgb(0xffffff)) + .rounded(px(8.)) + .border_1() + .border_color(hsla(0.0, 0.0, 0.0, 0.1)) + } + + fn rounded_large() -> Div { + div() + .size_16() + .bg(rgb(0xffffff)) + .rounded(px(12.)) + .border_1() + .border_color(hsla(0.0, 0.0, 0.0, 0.1)) + } +} + +fn example(label: impl Into, example: impl IntoElement) -> impl IntoElement { + let label = label.into(); + + div() + .flex() + .flex_col() + .justify_center() + .items_center() + .w(relative(1. / 6.)) + .border_r_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .child( + div() + .flex() + .items_center() + .justify_center() + .flex_1() + .py_12() + .child(example), + ) + .child( + div() + .w_full() + .border_t_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .p_1() + .flex() + .items_center() + .child(label), + ) +} + impl Render for Shadow { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { div() - .flex() + .id("shadow-example") + .overflow_y_scroll() .bg(rgb(0xffffff)) .size_full() - .justify_center() - .items_center() - .child(div().size_8().shadow_sm()) + .text_xs() + .child(div().flex().flex_col().w_full().children(vec![ + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .flex_row() + .children(vec![ + example( + "Square", + Shadow::square() + .shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded 4", + Shadow::rounded_small() + .shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded 8", + Shadow::rounded_medium() + .shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded 16", + Shadow::rounded_large() + .shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Circle", + Shadow::base() + .shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + ]), + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .w_full() + .children(vec![ + example("None", Shadow::base()), + // Small shadow + example("Small", Shadow::base().shadow_sm()), + // Medium shadow + example("Medium", Shadow::base().shadow_md()), + // Large shadow + example("Large", Shadow::base().shadow_lg()), + example("Extra Large", Shadow::base().shadow_xl()), + example("2X Large", Shadow::base().shadow_2xl()), + ]), + // Horizontal list of increasing blur radii + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Blur 0", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(0.), + spread_radius: px(0.), + }]), + ), + example( + "Blur 2", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(2.), + spread_radius: px(0.), + }]), + ), + example( + "Blur 4", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(4.), + spread_radius: px(0.), + }]), + ), + example( + "Blur 8", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Blur 16", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(16.), + spread_radius: px(0.), + }]), + ), + ]), + // Horizontal list of increasing spread radii + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Spread 0", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Spread 2", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(2.), + }]), + ), + example( + "Spread 4", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(4.), + }]), + ), + example( + "Spread 8", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(8.), + }]), + ), + example( + "Spread 16", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(16.), + }]), + ), + ]), + // Square spread examples + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Square Spread 0", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Square Spread 8", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(8.), + }]), + ), + example( + "Square Spread 16", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(16.), + }]), + ), + ]), + // Rounded large spread examples + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Rounded Large Spread 0", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded Large Spread 8", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(8.), + }]), + ), + example( + "Rounded Large Spread 16", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.0, 0.0, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(16.), + }]), + ), + ]), + // Directional shadows + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Left", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(-8.), px(0.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Right", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(8.), px(0.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Top", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(-8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Bottom", + Shadow::base().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + ]), + // Square directional shadows + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Square Left", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(-8.), px(0.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Square Right", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(8.), px(0.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Square Top", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(-8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Square Bottom", + Shadow::square().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + ]), + // Rounded large directional shadows + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Rounded Large Left", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(-8.), px(0.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded Large Right", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(8.), px(0.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded Large Top", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(-8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + example( + "Rounded Large Bottom", + Shadow::rounded_large().shadow(smallvec![BoxShadow { + color: hsla(0.0, 0.5, 0.5, 0.3), + offset: point(px(0.), px(8.)), + blur_radius: px(8.), + spread_radius: px(0.), + }]), + ), + ]), + // Multiple shadows for different shapes + div() + .border_b_1() + .border_color(hsla(0.0, 0.0, 0.0, 1.0)) + .flex() + .children(vec![ + example( + "Circle Multiple", + Shadow::base().shadow(smallvec![ + BoxShadow { + color: hsla(0.0 / 360., 1.0, 0.5, 0.3), // Red + offset: point(px(0.), px(-12.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(60.0 / 360., 1.0, 0.5, 0.3), // Yellow + offset: point(px(12.), px(0.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(120.0 / 360., 1.0, 0.5, 0.3), // Green + offset: point(px(0.), px(12.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(240.0 / 360., 1.0, 0.5, 0.3), // Blue + offset: point(px(-12.), px(0.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + ]), + ), + example( + "Square Multiple", + Shadow::square().shadow(smallvec![ + BoxShadow { + color: hsla(0.0 / 360., 1.0, 0.5, 0.3), // Red + offset: point(px(0.), px(-12.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(60.0 / 360., 1.0, 0.5, 0.3), // Yellow + offset: point(px(12.), px(0.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(120.0 / 360., 1.0, 0.5, 0.3), // Green + offset: point(px(0.), px(12.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(240.0 / 360., 1.0, 0.5, 0.3), // Blue + offset: point(px(-12.), px(0.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + ]), + ), + example( + "Rounded Large Multiple", + Shadow::rounded_large().shadow(smallvec![ + BoxShadow { + color: hsla(0.0 / 360., 1.0, 0.5, 0.3), // Red + offset: point(px(0.), px(-12.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(60.0 / 360., 1.0, 0.5, 0.3), // Yellow + offset: point(px(12.), px(0.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(120.0 / 360., 1.0, 0.5, 0.3), // Green + offset: point(px(0.), px(12.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + BoxShadow { + color: hsla(240.0 / 360., 1.0, 0.5, 0.3), // Blue + offset: point(px(-12.), px(0.)), + blur_radius: px(8.), + spread_radius: px(2.), + }, + ]), + ), + ]), + ])) } } fn main() { App::new().run(|cx: &mut AppContext| { - let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx); + let bounds = Bounds::centered(None, size(px(1000.0), px(800.0)), cx); cx.open_window( WindowOptions { window_bounds: Some(WindowBounds::Windowed(bounds)), @@ -25,5 +577,7 @@ fn main() { |cx| cx.new_view(|_cx| Shadow {}), ) .unwrap(); + + cx.activate(true); }); } diff --git a/crates/gpui/examples/svg/svg.rs b/crates/gpui/examples/svg/svg.rs index 79b83b31e1..45d918f633 100644 --- a/crates/gpui/examples/svg/svg.rs +++ b/crates/gpui/examples/svg/svg.rs @@ -1,7 +1,11 @@ +use std::fs; use std::path::PathBuf; -use gpui::*; -use std::fs; +use anyhow::Result; +use gpui::{ + div, prelude::*, px, rgb, size, svg, App, AppContext, AssetSource, Bounds, SharedString, + ViewContext, WindowBounds, WindowOptions, +}; struct Assets { base: PathBuf, diff --git a/crates/gpui/examples/text_wrapper.rs b/crates/gpui/examples/text_wrapper.rs index cb06425928..c211ece035 100644 --- a/crates/gpui/examples/text_wrapper.rs +++ b/crates/gpui/examples/text_wrapper.rs @@ -1,4 +1,6 @@ -use gpui::*; +use gpui::{ + div, prelude::*, px, size, App, AppContext, Bounds, ViewContext, WindowBounds, WindowOptions, +}; struct HelloWorld {} diff --git a/crates/gpui/examples/uniform_list.rs b/crates/gpui/examples/uniform_list.rs index 2994c0d8fc..33dcc2a6ae 100644 --- a/crates/gpui/examples/uniform_list.rs +++ b/crates/gpui/examples/uniform_list.rs @@ -1,4 +1,7 @@ -use gpui::*; +use gpui::{ + div, prelude::*, px, rgb, size, uniform_list, App, AppContext, Bounds, ViewContext, + WindowBounds, WindowOptions, +}; struct UniformListExample {} diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs index 78a47782c9..8f8faa5ad0 100644 --- a/crates/gpui/examples/window.rs +++ b/crates/gpui/examples/window.rs @@ -1,5 +1,7 @@ -use gpui::*; -use prelude::FluentBuilder as _; +use gpui::{ + div, prelude::*, px, rgb, size, App, AppContext, Bounds, SharedString, Timer, ViewContext, + WindowBounds, WindowContext, WindowKind, WindowOptions, +}; struct SubWindow { custom_titlebar: bool, diff --git a/crates/gpui/examples/window_positioning.rs b/crates/gpui/examples/window_positioning.rs index 5505478140..5c39bf5d3f 100644 --- a/crates/gpui/examples/window_positioning.rs +++ b/crates/gpui/examples/window_positioning.rs @@ -1,4 +1,8 @@ -use gpui::*; +use gpui::{ + div, point, prelude::*, px, rgb, App, AppContext, Bounds, DisplayId, Hsla, Pixels, + SharedString, Size, ViewContext, WindowBackgroundAppearance, WindowBounds, WindowKind, + WindowOptions, +}; struct WindowContent { text: SharedString, @@ -86,7 +90,7 @@ fn main() { .unwrap(); let bounds = Bounds { - origin: screen.bounds().upper_right() + origin: screen.bounds().top_right() - point(size.width + margin_offset, -margin_offset), size, }; @@ -101,7 +105,7 @@ fn main() { .unwrap(); let bounds = Bounds { - origin: screen.bounds().lower_left() + origin: screen.bounds().bottom_left() - point(-margin_offset, size.height + margin_offset), size, }; @@ -116,7 +120,7 @@ fn main() { .unwrap(); let bounds = Bounds { - origin: screen.bounds().lower_right() + origin: screen.bounds().bottom_right() - point(size.width + margin_offset, size.height + margin_offset), size, }; diff --git a/crates/gpui/examples/window_shadow.rs b/crates/gpui/examples/window_shadow.rs index 122231f6b5..919773fdb7 100644 --- a/crates/gpui/examples/window_shadow.rs +++ b/crates/gpui/examples/window_shadow.rs @@ -1,15 +1,16 @@ -use gpui::*; -use prelude::FluentBuilder; +use gpui::{ + black, canvas, div, green, point, prelude::*, px, rgb, size, transparent_black, white, App, + AppContext, Bounds, CursorStyle, Decorations, Hsla, MouseButton, Pixels, Point, ResizeEdge, + Size, ViewContext, WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowOptions, +}; struct WindowShadow {} -/* -Things to do: -1. We need a way of calculating which edge or corner the mouse is on, - and then dispatch on that -2. We need to improve the shadow rendering significantly -3. We need to implement the techniques in here in Zed -*/ +// Things to do: +// 1. We need a way of calculating which edge or corner the mouse is on, +// and then dispatch on that +// 2. We need to improve the shadow rendering significantly +// 3. We need to implement the techniques in here in Zed impl Render for WindowShadow { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { @@ -128,7 +129,7 @@ impl Render for WindowShadow { div() .flex() .bg(white()) - .size(Length::Definite(Pixels(300.0).into())) + .size(px(300.0)) .justify_center() .items_center() .shadow_lg() diff --git a/crates/gpui/src/action.rs b/crates/gpui/src/action.rs index b81ba6136f..338397a551 100644 --- a/crates/gpui/src/action.rs +++ b/crates/gpui/src/action.rs @@ -62,6 +62,14 @@ pub trait Action: 'static + Send { fn build(value: serde_json::Value) -> Result> where Self: Sized; + + /// A list of alternate, deprecated names for this action. + fn deprecated_aliases() -> &'static [&'static str] + where + Self: Sized, + { + &[] + } } impl std::fmt::Debug for dyn Action { @@ -85,6 +93,7 @@ pub(crate) struct ActionRegistry { builders_by_name: HashMap, names_by_type_id: HashMap, all_names: Vec, // So we can return a static slice. + deprecations: Vec<(SharedString, SharedString)>, } impl Default for ActionRegistry { @@ -93,6 +102,7 @@ impl Default for ActionRegistry { builders_by_name: Default::default(), names_by_type_id: Default::default(), all_names: Default::default(), + deprecations: Default::default(), }; this.load_actions(); @@ -111,6 +121,7 @@ pub type MacroActionBuilder = fn() -> ActionData; #[doc(hidden)] pub struct ActionData { pub name: &'static str, + pub aliases: &'static [&'static str], pub type_id: TypeId, pub build: ActionBuilder, } @@ -134,6 +145,7 @@ impl ActionRegistry { pub(crate) fn load_action(&mut self) { self.insert_action(ActionData { name: A::debug_name(), + aliases: A::deprecated_aliases(), type_id: TypeId::of::(), build: A::build, }); @@ -142,6 +154,10 @@ impl ActionRegistry { fn insert_action(&mut self, action: ActionData) { let name: SharedString = action.name.into(); self.builders_by_name.insert(name.clone(), action.build); + for &alias in action.aliases { + self.builders_by_name.insert(alias.into(), action.build); + self.deprecations.push((alias.into(), name.clone())); + } self.names_by_type_id.insert(action.type_id, name.clone()); self.all_names.push(name); } @@ -174,6 +190,10 @@ impl ActionRegistry { pub fn all_action_names(&self) -> &[SharedString] { self.all_names.as_slice() } + + pub fn action_deprecations(&self) -> &[(SharedString, SharedString)] { + self.deprecations.as_slice() + } } /// Defines unit structs that can be used as actions. @@ -185,8 +205,7 @@ macro_rules! actions { #[doc = "The `"] #[doc = stringify!($name)] #[doc = "` action, see [`gpui::actions!`]"] - #[derive(::std::cmp::PartialEq, ::std::clone::Clone, ::std::default::Default, ::std::fmt::Debug, gpui::private::serde_derive::Deserialize)] - #[serde(crate = "gpui::private::serde")] + #[derive(::std::clone::Clone,::std::cmp::PartialEq, ::std::default::Default)] pub struct $name; gpui::__impl_action!($namespace, $name, $name, @@ -207,7 +226,32 @@ macro_rules! actions { /// `impl_action_as!` #[macro_export] macro_rules! action_as { - ($namespace:path, $name:ident as $visual_name:tt) => { + ($namespace:path, $name:ident as $visual_name:ident) => { + #[doc = "The `"] + #[doc = stringify!($name)] + #[doc = "` action, see [`gpui::actions!`]"] + #[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default)] + pub struct $name; + + gpui::__impl_action!( + $namespace, + $name, + $visual_name, + fn build( + _: gpui::private::serde_json::Value, + ) -> gpui::Result<::std::boxed::Box> { + Ok(Box::new(Self)) + } + ); + + gpui::register_action!($name); + }; +} + +/// Defines a unit struct that can be used as an action, with some deprecated aliases. +#[macro_export] +macro_rules! action_aliases { + ($namespace:path, $name:ident, [$($alias:ident),* $(,)?]) => { #[doc = "The `"] #[doc = stringify!($name)] #[doc = "` action, see [`gpui::actions!`]"] @@ -224,11 +268,16 @@ macro_rules! action_as { gpui::__impl_action!( $namespace, $name, - $visual_name, + $name, fn build( _: gpui::private::serde_json::Value, ) -> gpui::Result<::std::boxed::Box> { Ok(Box::new(Self)) + }, + fn deprecated_aliases() -> &'static [&'static str] { + &[ + $(concat!(stringify!($namespace), "::", stringify!($alias))),* + ] } ); @@ -277,7 +326,7 @@ macro_rules! impl_action_as { #[doc(hidden)] #[macro_export] macro_rules! __impl_action { - ($namespace:path, $name:ident, $visual_name:tt, $build:item) => { + ($namespace:path, $name:ident, $visual_name:tt, $($items:item),*) => { impl gpui::Action for $name { fn name(&self) -> &'static str { @@ -299,8 +348,6 @@ macro_rules! __impl_action { ) } - $build - fn partial_eq(&self, action: &dyn gpui::Action) -> bool { action .as_any() @@ -315,6 +362,8 @@ macro_rules! __impl_action { fn as_any(&self) -> &dyn ::std::any::Any { self } + + $($items)* } }; } diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 0776e5c72e..a0ec1f9933 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -5,7 +5,10 @@ use std::{ ops::{Deref, DerefMut}, path::{Path, PathBuf}, rc::{Rc, Weak}, - sync::{atomic::Ordering::SeqCst, Arc}, + sync::{ + atomic::{AtomicUsize, Ordering::SeqCst}, + Arc, + }, time::Duration, }; @@ -16,6 +19,7 @@ use futures::{ future::{LocalBoxFuture, Shared}, Future, FutureExt, }; +use parking_lot::RwLock; use slotmap::SlotMap; pub use async_context::*; @@ -29,12 +33,13 @@ use util::ResultExt; use crate::{ current_platform, hash, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle, - Asset, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId, - Entity, EventEmitter, ForegroundExecutor, Global, KeyBinding, Keymap, Keystroke, LayoutId, - Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, Point, - PromptBuilder, PromptHandle, PromptLevel, Render, RenderablePromptHandle, Reservation, - SharedString, SubscriberSet, Subscription, SvgRenderer, Task, TextSystem, View, ViewContext, - Window, WindowAppearance, WindowContext, WindowHandle, WindowId, + Asset, AssetSource, BackgroundExecutor, Bounds, ClipboardItem, Context, DispatchPhase, + DisplayId, Entity, EventEmitter, FocusHandle, FocusId, ForegroundExecutor, Global, KeyBinding, + Keymap, Keystroke, LayoutId, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, + PlatformDisplay, Point, PromptBuilder, PromptHandle, PromptLevel, Render, + RenderablePromptHandle, Reservation, ScreenCaptureSource, SharedString, SubscriberSet, + Subscription, SvgRenderer, Task, TextSystem, View, ViewContext, Window, WindowAppearance, + WindowContext, WindowHandle, WindowId, }; mod async_context; @@ -242,6 +247,7 @@ pub struct AppContext { pub(crate) new_view_observers: SubscriberSet, pub(crate) windows: SlotMap>, pub(crate) window_handles: FxHashMap, + pub(crate) focus_handles: Arc>>, pub(crate) keymap: Rc>, pub(crate) keyboard_layout: SharedString, pub(crate) global_action_listeners: @@ -302,8 +308,9 @@ impl AppContext { entities, new_view_observers: SubscriberSet::new(), new_model_observers: SubscriberSet::new(), - window_handles: FxHashMap::default(), windows: SlotMap::with_key(), + window_handles: FxHashMap::default(), + focus_handles: Arc::new(RwLock::new(SlotMap::with_key())), keymap: Rc::new(RefCell::new(Keymap::default())), keyboard_layout, global_action_listeners: FxHashMap::default(), @@ -439,6 +446,7 @@ impl AppContext { self.defer(move |_| activate()); subscription } + pub(crate) fn observe_internal( &mut self, entity: &E, @@ -569,6 +577,12 @@ impl AppContext { }) } + /// Obtain a new [`FocusHandle`], which allows you to track and manipulate the keyboard focus + /// for elements rendered within this window. + pub fn focus_handle(&self) -> FocusHandle { + FocusHandle::new(&self.focus_handles) + } + /// Instructs the platform to activate the application by bringing it to the foreground. pub fn activate(&self, ignoring_other_apps: bool) { self.platform.activate(ignoring_other_apps); @@ -599,6 +613,13 @@ impl AppContext { self.platform.primary_display() } + /// Returns a list of available screen capture sources. + pub fn screen_capture_sources( + &self, + ) -> oneshot::Receiver>>> { + self.platform.screen_capture_sources() + } + /// Returns the display with the given ID, if one exists. pub fn find_display(&self, id: DisplayId) -> Option> { self.displays() @@ -837,28 +858,25 @@ impl AppContext { /// Repeatedly called during `flush_effects` to handle a focused handle being dropped. fn release_dropped_focus_handles(&mut self) { - for window_handle in self.windows() { - window_handle - .update(self, |_, cx| { - let mut blur_window = false; - let focus = cx.window.focus; - cx.window.focus_handles.write().retain(|handle_id, count| { - if count.load(SeqCst) == 0 { - if focus == Some(handle_id) { - blur_window = true; - } - false - } else { - true - } - }); - - if blur_window { - cx.blur(); + self.focus_handles + .clone() + .write() + .retain(|handle_id, count| { + if count.load(SeqCst) == 0 { + for window_handle in self.windows() { + window_handle + .update(self, |_, cx| { + if cx.window.focus == Some(handle_id) { + cx.blur(); + } + }) + .unwrap(); } - }) - .unwrap(); - } + false + } else { + true + } + }); } fn apply_notify_effect(&mut self, emitter: EntityId) { @@ -1208,6 +1226,11 @@ impl AppContext { self.actions.all_action_names() } + /// Get a list of all deprecated action aliases and their canonical names. + pub fn action_deprecations(&self) -> &[(SharedString, SharedString)] { + self.actions.action_deprecations() + } + /// Register a callback to be invoked when the application is about to quit. /// It is not possible to cancel the quit event at this point. pub fn on_app_quit( @@ -1395,6 +1418,11 @@ impl AppContext { pub fn get_name(&self) -> &'static str { self.name.as_ref().unwrap() } + + /// Returns `true` if the platform file picker supports selecting a mix of files and directories. + pub fn can_select_mixed_files_and_dirs(&self) -> bool { + self.platform.can_select_mixed_files_and_dirs() + } } impl Context for AppContext { @@ -1472,7 +1500,7 @@ impl Context for AppContext { fn update_window(&mut self, handle: AnyWindowHandle, update: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> T, + F: FnOnce(AnyView, &mut WindowContext) -> T, { self.update(|cx| { let mut window = cx @@ -1578,7 +1606,7 @@ pub struct AnyDrag { pub view: AnyView, /// The value of the dragged item, to be dropped - pub value: Box, + pub value: Arc, /// This is used to render the dragged item in the same place /// on the original element that the drag was initiated @@ -1594,6 +1622,12 @@ pub struct AnyTooltip { /// The absolute position of the mouse when the tooltip was deployed. pub mouse_position: Point, + + /// Whether the tooltitp can be hovered or not. + pub hoverable: bool, + + /// Bounds of the element that triggered the tooltip appearance. + pub origin_bounds: Bounds, } /// A keystroke event, and potentially the associated action diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index be35776595..c24ebc29bc 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -84,7 +84,7 @@ impl Context for AsyncAppContext { fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> T, + F: FnOnce(AnyView, &mut WindowContext) -> T, { let app = self.app.upgrade().context("app was released")?; let mut lock = app.borrow_mut(); @@ -349,7 +349,7 @@ impl Context for AsyncWindowContext { fn update_window(&mut self, window: AnyWindowHandle, update: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> T, + F: FnOnce(AnyView, &mut WindowContext) -> T, { self.app.update_window(window, update) } @@ -369,7 +369,7 @@ impl Context for AsyncWindowContext { impl VisualContext for AsyncWindowContext { fn new_view( &mut self, - build_view_state: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view_state: impl FnOnce(&mut ViewContext) -> V, ) -> Self::Result> where V: 'static + Render, @@ -381,7 +381,7 @@ impl VisualContext for AsyncWindowContext { fn update_view( &mut self, view: &View, - update: impl FnOnce(&mut V, &mut ViewContext<'_, V>) -> R, + update: impl FnOnce(&mut V, &mut ViewContext) -> R, ) -> Self::Result { self.window .update(self, |_, cx| cx.update_view(view, update)) @@ -389,7 +389,7 @@ impl VisualContext for AsyncWindowContext { fn replace_root_view( &mut self, - build_view: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view: impl FnOnce(&mut ViewContext) -> V, ) -> Self::Result> where V: 'static + Render, diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index 07aa466295..8bf9c67269 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -117,8 +117,9 @@ impl EntityMap { pub fn read(&self, model: &Model) -> &T { self.assert_valid_context(model); - self.entities[model.entity_id] - .downcast_ref() + self.entities + .get(model.entity_id) + .and_then(|entity| entity.downcast_ref()) .unwrap_or_else(|| double_lease_panic::("read")) } diff --git a/crates/gpui/src/app/model_context.rs b/crates/gpui/src/app/model_context.rs index d3f07b25eb..25b68fbdd5 100644 --- a/crates/gpui/src/app/model_context.rs +++ b/crates/gpui/src/app/model_context.rs @@ -263,7 +263,7 @@ impl<'a, T> Context for ModelContext<'a, T> { fn update_window(&mut self, window: AnyWindowHandle, update: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> R, + F: FnOnce(AnyView, &mut WindowContext) -> R, { self.app.update_window(window, update) } diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 34449c91ec..f371648f75 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -4,8 +4,8 @@ use crate::{ Element, Empty, Entity, EventEmitter, ForegroundExecutor, Global, InputEvent, Keystroke, Model, ModelContext, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, - TestPlatform, TestWindow, TextSystem, View, ViewContext, VisualContext, WindowBounds, - WindowContext, WindowHandle, WindowOptions, + TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, View, ViewContext, + VisualContext, WindowBounds, WindowContext, WindowHandle, WindowOptions, }; use anyhow::{anyhow, bail}; use futures::{channel::oneshot, Stream, StreamExt}; @@ -77,7 +77,7 @@ impl Context for TestAppContext { fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> T, + F: FnOnce(AnyView, &mut WindowContext) -> T, { let mut lock = self.app.borrow_mut(); lock.update_window(window, f) @@ -287,6 +287,12 @@ impl TestAppContext { self.test_window(window_handle).simulate_resize(size); } + /// Causes the given sources to be returned if the application queries for screen + /// capture sources. + pub fn set_screen_capture_sources(&self, sources: Vec) { + self.test_platform.set_screen_capture_sources(sources); + } + /// Returns all windows open in the test. pub fn windows(&self) -> Vec { self.app.borrow().windows().clone() @@ -538,12 +544,15 @@ impl Model { impl View { /// Returns a future that resolves when the view is next updated. - pub fn next_notification(&self, cx: &TestAppContext) -> impl Future { + pub fn next_notification( + &self, + advance_clock_by: Duration, + cx: &TestAppContext, + ) -> impl Future { use postage::prelude::{Sink as _, Stream as _}; let (mut tx, mut rx) = postage::mpsc::channel(1); - let mut cx = cx.app.app.borrow_mut(); - let subscription = cx.observe(self, move |_, _| { + let subscription = cx.app.app.borrow_mut().observe(self, move |_, _| { tx.try_send(()).ok(); }); @@ -553,6 +562,8 @@ impl View { Duration::from_secs(1) }; + cx.executor().advance_clock(advance_clock_by); + async move { let notification = crate::util::timeout(duration, rx.recv()) .await @@ -905,7 +916,7 @@ impl Context for VisualTestContext { fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> T, + F: FnOnce(AnyView, &mut WindowContext) -> T, { self.cx.update_window(window, f) } @@ -925,7 +936,7 @@ impl Context for VisualTestContext { impl VisualContext for VisualTestContext { fn new_view( &mut self, - build_view: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view: impl FnOnce(&mut ViewContext) -> V, ) -> Self::Result> where V: 'static + Render, @@ -938,7 +949,7 @@ impl VisualContext for VisualTestContext { fn update_view( &mut self, view: &View, - update: impl FnOnce(&mut V, &mut ViewContext<'_, V>) -> R, + update: impl FnOnce(&mut V, &mut ViewContext) -> R, ) -> Self::Result { self.window .update(&mut self.cx, |_, cx| cx.update_view(view, update)) @@ -947,7 +958,7 @@ impl VisualContext for VisualTestContext { fn replace_root_view( &mut self, - build_view: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view: impl FnOnce(&mut ViewContext) -> V, ) -> Self::Result> where V: 'static + Render, @@ -982,7 +993,7 @@ impl AnyWindowHandle { pub fn build_view( &self, cx: &mut TestAppContext, - build_view: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view: impl FnOnce(&mut ViewContext) -> V, ) -> View { self.update(cx, |_, cx| cx.new_view(build_view)).unwrap() } diff --git a/crates/gpui/src/color.rs b/crates/gpui/src/color.rs index 9c831d0875..19182b088b 100644 --- a/crates/gpui/src/color.rs +++ b/crates/gpui/src/color.rs @@ -314,7 +314,7 @@ pub fn hsla(h: f32, s: f32, l: f32, a: f32) -> Hsla { } /// Pure black in [`Hsla`] -pub fn black() -> Hsla { +pub const fn black() -> Hsla { Hsla { h: 0., s: 0., @@ -324,7 +324,7 @@ pub fn black() -> Hsla { } /// Transparent black in [`Hsla`] -pub fn transparent_black() -> Hsla { +pub const fn transparent_black() -> Hsla { Hsla { h: 0., s: 0., @@ -334,7 +334,7 @@ pub fn transparent_black() -> Hsla { } /// Transparent black in [`Hsla`] -pub fn transparent_white() -> Hsla { +pub const fn transparent_white() -> Hsla { Hsla { h: 0., s: 0., @@ -354,7 +354,7 @@ pub fn opaque_grey(lightness: f32, opacity: f32) -> Hsla { } /// Pure white in [`Hsla`] -pub fn white() -> Hsla { +pub const fn white() -> Hsla { Hsla { h: 0., s: 0., @@ -364,7 +364,7 @@ pub fn white() -> Hsla { } /// The color red in [`Hsla`] -pub fn red() -> Hsla { +pub const fn red() -> Hsla { Hsla { h: 0., s: 1., @@ -374,9 +374,9 @@ pub fn red() -> Hsla { } /// The color blue in [`Hsla`] -pub fn blue() -> Hsla { +pub const fn blue() -> Hsla { Hsla { - h: 0.6, + h: 0.6666666667, s: 1., l: 0.5, a: 1., @@ -384,19 +384,19 @@ pub fn blue() -> Hsla { } /// The color green in [`Hsla`] -pub fn green() -> Hsla { +pub const fn green() -> Hsla { Hsla { - h: 0.33, + h: 0.3333333333, s: 1., - l: 0.5, + l: 0.25, a: 1., } } /// The color yellow in [`Hsla`] -pub fn yellow() -> Hsla { +pub const fn yellow() -> Hsla { Hsla { - h: 0.16, + h: 0.1666666667, s: 1., l: 0.5, a: 1., @@ -410,32 +410,32 @@ impl Hsla { } /// The color red - pub fn red() -> Self { + pub const fn red() -> Self { red() } /// The color green - pub fn green() -> Self { + pub const fn green() -> Self { green() } /// The color blue - pub fn blue() -> Self { + pub const fn blue() -> Self { blue() } /// The color black - pub fn black() -> Self { + pub const fn black() -> Self { black() } /// The color white - pub fn white() -> Self { + pub const fn white() -> Self { white() } /// The color transparent black - pub fn transparent_black() -> Self { + pub const fn transparent_black() -> Self { transparent_black() } @@ -548,6 +548,164 @@ impl<'de> Deserialize<'de> for Hsla { } } +#[derive(Debug, Clone, Copy, PartialEq)] +#[repr(C)] +pub(crate) enum BackgroundTag { + Solid = 0, + LinearGradient = 1, +} + +/// A color space for color interpolation. +/// +/// References: +/// - https://developer.mozilla.org/en-US/docs/Web/CSS/color-interpolation-method +/// - https://www.w3.org/TR/css-color-4/#typedef-color-space +#[derive(Debug, Clone, Copy, PartialEq, Default)] +#[repr(C)] +pub enum ColorSpace { + #[default] + /// The sRGB color space. + Srgb = 0, + /// The Oklab color space. + Oklab = 1, +} + +impl Display for ColorSpace { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + ColorSpace::Srgb => write!(f, "sRGB"), + ColorSpace::Oklab => write!(f, "Oklab"), + } + } +} + +/// A background color, which can be either a solid color or a linear gradient. +#[derive(Debug, Clone, Copy, PartialEq)] +#[repr(C)] +pub struct Background { + pub(crate) tag: BackgroundTag, + pub(crate) color_space: ColorSpace, + pub(crate) solid: Hsla, + pub(crate) angle: f32, + pub(crate) colors: [LinearColorStop; 2], + /// Padding for alignment for repr(C) layout. + pad: u32, +} + +impl Eq for Background {} +impl Default for Background { + fn default() -> Self { + Self { + tag: BackgroundTag::Solid, + solid: Hsla::default(), + color_space: ColorSpace::default(), + angle: 0.0, + colors: [LinearColorStop::default(), LinearColorStop::default()], + pad: 0, + } + } +} + +/// Creates a LinearGradient background color. +/// +/// The gradient line's angle of direction. A value of `0.` is equivalent to to top; increasing values rotate clockwise from there. +/// +/// The `angle` is in degrees value in the range 0.0 to 360.0. +/// +/// https://developer.mozilla.org/en-US/docs/Web/CSS/gradient/linear-gradient +pub fn linear_gradient( + angle: f32, + from: impl Into, + to: impl Into, +) -> Background { + Background { + tag: BackgroundTag::LinearGradient, + angle, + colors: [from.into(), to.into()], + ..Default::default() + } +} + +/// A color stop in a linear gradient. +/// +/// https://developer.mozilla.org/en-US/docs/Web/CSS/gradient/linear-gradient#linear-color-stop +#[derive(Debug, Clone, Copy, Default, PartialEq)] +#[repr(C)] +pub struct LinearColorStop { + /// The color of the color stop. + pub color: Hsla, + /// The percentage of the gradient, in the range 0.0 to 1.0. + pub percentage: f32, +} + +/// Creates a new linear color stop. +/// +/// The percentage of the gradient, in the range 0.0 to 1.0. +pub fn linear_color_stop(color: impl Into, percentage: f32) -> LinearColorStop { + LinearColorStop { + color: color.into(), + percentage, + } +} + +impl LinearColorStop { + /// Returns a new color stop with the same color, but with a modified alpha value. + pub fn opacity(&self, factor: f32) -> Self { + Self { + percentage: self.percentage, + color: self.color.opacity(factor), + } + } +} + +impl Background { + /// Use specified color space for color interpolation. + /// + /// https://developer.mozilla.org/en-US/docs/Web/CSS/color-interpolation-method + pub fn color_space(mut self, color_space: ColorSpace) -> Self { + self.color_space = color_space; + self + } + + /// Returns a new background color with the same hue, saturation, and lightness, but with a modified alpha value. + pub fn opacity(&self, factor: f32) -> Self { + let mut background = *self; + background.solid = background.solid.opacity(factor); + background.colors = [ + self.colors[0].opacity(factor), + self.colors[1].opacity(factor), + ]; + background + } + + /// Returns whether the background color is transparent. + pub fn is_transparent(&self) -> bool { + match self.tag { + BackgroundTag::Solid => self.solid.is_transparent(), + BackgroundTag::LinearGradient => self.colors.iter().all(|c| c.color.is_transparent()), + } + } +} + +impl From for Background { + fn from(value: Hsla) -> Self { + Background { + tag: BackgroundTag::Solid, + solid: value, + ..Default::default() + } + } +} +impl From for Background { + fn from(value: Rgba) -> Self { + Background { + tag: BackgroundTag::Solid, + solid: Hsla::from(value), + ..Default::default() + } + } +} + #[cfg(test)] mod tests { use serde_json::json; @@ -595,4 +753,32 @@ mod tests { assert_eq!(actual, rgba(0xdeadbeef)) } + + #[test] + fn test_background_solid() { + let color = Hsla::from(rgba(0xff0099ff)); + let mut background = Background::from(color); + assert_eq!(background.tag, BackgroundTag::Solid); + assert_eq!(background.solid, color); + + assert_eq!(background.opacity(0.5).solid, color.opacity(0.5)); + assert_eq!(background.is_transparent(), false); + background.solid = hsla(0.0, 0.0, 0.0, 0.0); + assert_eq!(background.is_transparent(), true); + } + + #[test] + fn test_background_linear_gradient() { + let from = linear_color_stop(rgba(0xff0099ff), 0.0); + let to = linear_color_stop(rgba(0x00ff99ff), 1.0); + let background = linear_gradient(90.0, from, to); + assert_eq!(background.tag, BackgroundTag::LinearGradient); + assert_eq!(background.colors[0], from); + assert_eq!(background.colors[1], to); + + assert_eq!(background.opacity(0.5).colors[0], from.opacity(0.5)); + assert_eq!(background.opacity(0.5).colors[1], to.opacity(0.5)); + assert_eq!(background.is_transparent(), false); + assert_eq!(background.opacity(0.0).is_transparent(), true); + } } diff --git a/crates/gpui/src/element.rs b/crates/gpui/src/element.rs index f0c5119033..38c4c83904 100644 --- a/crates/gpui/src/element.rs +++ b/crates/gpui/src/element.rs @@ -500,7 +500,7 @@ impl AnyElement { if !focus_assigned { if let Some(focus_id) = cx.window.next_frame.focus { - return FocusHandle::for_id(focus_id, &cx.window.focus_handles); + return FocusHandle::for_id(focus_id, &cx.focus_handles); } } diff --git a/crates/gpui/src/elements/anchored.rs b/crates/gpui/src/elements/anchored.rs index 7872ba6349..4dba2b580a 100644 --- a/crates/gpui/src/elements/anchored.rs +++ b/crates/gpui/src/elements/anchored.rs @@ -2,8 +2,8 @@ use smallvec::SmallVec; use taffy::style::{Display, Position}; use crate::{ - point, AnyElement, Bounds, Edges, Element, GlobalElementId, IntoElement, LayoutId, - ParentElement, Pixels, Point, Size, Style, WindowContext, + point, AnyElement, Axis, Bounds, Corner, Edges, Element, GlobalElementId, IntoElement, + LayoutId, ParentElement, Pixels, Point, Size, Style, WindowContext, }; /// The state that the anchored element element uses to track its children. @@ -15,10 +15,11 @@ pub struct AnchoredState { /// will avoid overflowing the window bounds. pub struct Anchored { children: SmallVec<[AnyElement; 2]>, - anchor_corner: AnchorCorner, + anchor_corner: Corner, fit_mode: AnchoredFitMode, anchor_position: Option>, position_mode: AnchoredPositionMode, + offset: Option>, } /// anchored gives you an element that will avoid overflowing the window bounds. @@ -26,16 +27,17 @@ pub struct Anchored { pub fn anchored() -> Anchored { Anchored { children: SmallVec::new(), - anchor_corner: AnchorCorner::TopLeft, + anchor_corner: Corner::TopLeft, fit_mode: AnchoredFitMode::SwitchAnchor, anchor_position: None, position_mode: AnchoredPositionMode::Window, + offset: None, } } impl Anchored { /// Sets which corner of the anchored element should be anchored to the current position. - pub fn anchor(mut self, anchor: AnchorCorner) -> Self { + pub fn anchor(mut self, anchor: Corner) -> Self { self.anchor_corner = anchor; self } @@ -47,6 +49,13 @@ impl Anchored { self } + /// Offset the final position by this amount. + /// Useful when you want to anchor to an element but offset from it, such as in PopoverMenu. + pub fn offset(mut self, offset: Point) -> Self { + self.offset = Some(offset); + self + } + /// Sets the position mode for this anchored element. Local will have this /// interpret its [`Anchored::position`] as relative to the parent element. /// While Window will have it interpret the position as relative to the window. @@ -120,7 +129,7 @@ impl Element for Anchored { for child_layout_id in &request_layout.child_layout_ids { let child_bounds = cx.layout_bounds(*child_layout_id); child_min = child_min.min(&child_bounds.origin); - child_max = child_max.max(&child_bounds.lower_right()); + child_max = child_max.max(&child_bounds.bottom_right()); } let size: Size = (child_max - child_min).into(); @@ -129,6 +138,7 @@ impl Element for Anchored { self.anchor_corner, size, bounds, + self.offset, ); let limits = Bounds { @@ -140,19 +150,23 @@ impl Element for Anchored { let mut anchor_corner = self.anchor_corner; if desired.left() < limits.left() || desired.right() > limits.right() { - let switched = anchor_corner - .switch_axis(Axis::Horizontal) - .get_bounds(origin, size); + let switched = Bounds::from_corner_and_size( + anchor_corner.other_side_corner_along(Axis::Horizontal), + origin, + size, + ); if !(switched.left() < limits.left() || switched.right() > limits.right()) { - anchor_corner = anchor_corner.switch_axis(Axis::Horizontal); + anchor_corner = anchor_corner.other_side_corner_along(Axis::Horizontal); desired = switched } } if desired.top() < limits.top() || desired.bottom() > limits.bottom() { - let switched = anchor_corner - .switch_axis(Axis::Vertical) - .get_bounds(origin, size); + let switched = Bounds::from_corner_and_size( + anchor_corner.other_side_corner_along(Axis::Vertical), + origin, + size, + ); if !(switched.top() < limits.top() || switched.bottom() > limits.bottom()) { desired = switched; } @@ -214,11 +228,6 @@ impl IntoElement for Anchored { } } -enum Axis { - Horizontal, - Vertical, -} - /// Which algorithm to use when fitting the anchored element to be inside the window. #[derive(Copy, Clone, PartialEq)] pub enum AnchoredFitMode { @@ -243,83 +252,29 @@ impl AnchoredPositionMode { fn get_position_and_bounds( &self, anchor_position: Option>, - anchor_corner: AnchorCorner, + anchor_corner: Corner, size: Size, bounds: Bounds, + offset: Option>, ) -> (Point, Bounds) { + let offset = offset.unwrap_or_default(); + match self { AnchoredPositionMode::Window => { let anchor_position = anchor_position.unwrap_or(bounds.origin); - let bounds = anchor_corner.get_bounds(anchor_position, size); + let bounds = + Bounds::from_corner_and_size(anchor_corner, anchor_position + offset, size); (anchor_position, bounds) } AnchoredPositionMode::Local => { let anchor_position = anchor_position.unwrap_or_default(); - let bounds = anchor_corner.get_bounds(bounds.origin + anchor_position, size); + let bounds = Bounds::from_corner_and_size( + anchor_corner, + bounds.origin + anchor_position + offset, + size, + ); (anchor_position, bounds) } } } } - -/// Which corner of the anchored element should be considered the anchor. -#[derive(Clone, Copy, PartialEq, Eq)] -pub enum AnchorCorner { - /// The top left corner - TopLeft, - /// The top right corner - TopRight, - /// The bottom left corner - BottomLeft, - /// The bottom right corner - BottomRight, -} - -impl AnchorCorner { - fn get_bounds(&self, origin: Point, size: Size) -> Bounds { - let origin = match self { - Self::TopLeft => origin, - Self::TopRight => Point { - x: origin.x - size.width, - y: origin.y, - }, - Self::BottomLeft => Point { - x: origin.x, - y: origin.y - size.height, - }, - Self::BottomRight => Point { - x: origin.x - size.width, - y: origin.y - size.height, - }, - }; - - Bounds { origin, size } - } - - /// Get the point corresponding to this anchor corner in `bounds`. - pub fn corner(&self, bounds: Bounds) -> Point { - match self { - Self::TopLeft => bounds.origin, - Self::TopRight => bounds.upper_right(), - Self::BottomLeft => bounds.lower_left(), - Self::BottomRight => bounds.lower_right(), - } - } - - fn switch_axis(self, axis: Axis) -> Self { - match axis { - Axis::Vertical => match self { - AnchorCorner::TopLeft => AnchorCorner::BottomLeft, - AnchorCorner::TopRight => AnchorCorner::BottomRight, - AnchorCorner::BottomLeft => AnchorCorner::TopLeft, - AnchorCorner::BottomRight => AnchorCorner::TopRight, - }, - Axis::Horizontal => match self { - AnchorCorner::TopLeft => AnchorCorner::TopRight, - AnchorCorner::TopRight => AnchorCorner::TopLeft, - AnchorCorner::BottomLeft => AnchorCorner::BottomRight, - AnchorCorner::BottomRight => AnchorCorner::BottomLeft, - }, - } - } -} diff --git a/crates/gpui/src/elements/animation.rs b/crates/gpui/src/elements/animation.rs index bffa52cfba..2cab9e8fa4 100644 --- a/crates/gpui/src/elements/animation.rs +++ b/crates/gpui/src/elements/animation.rs @@ -1,6 +1,6 @@ use std::time::{Duration, Instant}; -use crate::{AnyElement, Element, ElementId, GlobalElementId, IntoElement}; +use crate::{AnyElement, Element, ElementId, GlobalElementId, IntoElement, WindowContext}; pub use easing::*; @@ -104,7 +104,7 @@ impl Element for AnimationElement { fn request_layout( &mut self, global_id: Option<&GlobalElementId>, - cx: &mut crate::WindowContext, + cx: &mut WindowContext, ) -> (crate::LayoutId, Self::RequestLayoutState) { cx.with_element_state(global_id.unwrap(), |state, cx| { let state = state.unwrap_or_else(|| AnimationState { @@ -145,7 +145,7 @@ impl Element for AnimationElement { _id: Option<&GlobalElementId>, _bounds: crate::Bounds, element: &mut Self::RequestLayoutState, - cx: &mut crate::WindowContext, + cx: &mut WindowContext, ) -> Self::PrepaintState { element.prepaint(cx); } @@ -156,7 +156,7 @@ impl Element for AnimationElement { _bounds: crate::Bounds, element: &mut Self::RequestLayoutState, _: &mut Self::PrepaintState, - cx: &mut crate::WindowContext, + cx: &mut WindowContext, ) { element.paint(cx); } diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 6928ca74ee..755ffabf16 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -35,6 +35,7 @@ use std::{ mem, ops::DerefMut, rc::Rc, + sync::Arc, time::Duration, }; use taffy::style::Overflow; @@ -61,6 +62,7 @@ pub struct DragMoveEvent { /// The bounds of this element. pub bounds: Bounds, drag: PhantomData, + dragged_item: Arc, } impl DragMoveEvent { @@ -71,6 +73,11 @@ impl DragMoveEvent { .and_then(|drag| drag.value.downcast_ref::()) .expect("DragMoveEvent is only valid when the stored active drag is of the same type.") } + + /// An item that is about to be dropped. + pub fn dragged_item(&self) -> &dyn Any { + self.dragged_item.as_ref() + } } impl Interactivity { @@ -243,20 +250,20 @@ impl Interactivity { { self.mouse_move_listeners .push(Box::new(move |event, phase, hitbox, cx| { - if phase == DispatchPhase::Capture - && cx - .active_drag - .as_ref() - .is_some_and(|drag| drag.value.as_ref().type_id() == TypeId::of::()) - { - (listener)( - &DragMoveEvent { - event: event.clone(), - bounds: hitbox.bounds, - drag: PhantomData, - }, - cx, - ); + if phase == DispatchPhase::Capture { + if let Some(drag) = &cx.active_drag { + if drag.value.as_ref().type_id() == TypeId::of::() { + (listener)( + &DragMoveEvent { + event: event.clone(), + bounds: hitbox.bounds, + drag: PhantomData, + dragged_item: Arc::clone(&drag.value), + }, + cx, + ); + } + } } })); } @@ -454,7 +461,7 @@ impl Interactivity { "calling on_drag more than once on the same element is not supported" ); self.drag_listener = Some(( - Box::new(value), + Arc::new(value), Box::new(move |value, offset, cx| { constructor(value.downcast_ref().unwrap(), offset, cx).into() }), @@ -1186,7 +1193,7 @@ impl Element for Div { for (ix, child_layout_id) in request_layout.child_layout_ids.iter().enumerate() { let child_bounds = cx.layout_bounds(*child_layout_id); child_min = child_min.min(&child_bounds.origin); - child_max = child_max.max(&child_bounds.lower_right()); + child_max = child_max.max(&child_bounds.bottom_right()); state.child_bounds.push(child_bounds); if let Some(requested) = requested.as_ref() { @@ -1201,7 +1208,7 @@ impl Element for Div { for child_layout_id in &request_layout.child_layout_ids { let child_bounds = cx.layout_bounds(*child_layout_id); child_min = child_min.min(&child_bounds.origin); - child_max = child_max.max(&child_bounds.lower_right()); + child_max = child_max.max(&child_bounds.bottom_right()); } (child_max - child_min).into() }; @@ -1292,7 +1299,7 @@ pub struct Interactivity { pub(crate) drop_listeners: Vec<(TypeId, DropListener)>, pub(crate) can_drop_predicate: Option, pub(crate) click_listeners: Vec, - pub(crate) drag_listener: Option<(Box, DragListener)>, + pub(crate) drag_listener: Option<(Arc, DragListener)>, pub(crate) hover_listener: Option>, pub(crate) tooltip_builder: Option, pub(crate) occlude_mouse: bool, @@ -1916,6 +1923,7 @@ impl Interactivity { cx.on_mouse_event({ let active_tooltip = active_tooltip.clone(); let hitbox = hitbox.clone(); + let source_bounds = hitbox.bounds; let tooltip_id = self.tooltip_id; move |_: &MouseMoveEvent, phase, cx| { let is_hovered = @@ -1945,6 +1953,8 @@ impl Interactivity { tooltip: Some(AnyTooltip { view: build_tooltip(cx), mouse_position: cx.mouse_position(), + hoverable: tooltip_is_hoverable, + origin_bounds: source_bounds, }), _task: None, }); @@ -2492,7 +2502,7 @@ impl ScrollAnchor { } } /// Request scroll to this item on the next frame. - pub fn scroll_to(&self, cx: &mut WindowContext<'_>) { + pub fn scroll_to(&self, cx: &mut WindowContext) { let this = self.clone(); cx.on_next_frame(move |_| { diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 895904c801..3a1b1d92fb 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -8,7 +8,8 @@ use anyhow::{anyhow, Result}; use futures::{AsyncReadExt, Future}; use image::{ - codecs::gif::GifDecoder, AnimationDecoder, Frame, ImageBuffer, ImageError, ImageFormat, + codecs::{gif::GifDecoder, webp::WebPDecoder}, + AnimationDecoder, DynamicImage, Frame, ImageBuffer, ImageError, ImageFormat, Rgba, }; use smallvec::SmallVec; use std::{ @@ -542,6 +543,34 @@ impl Asset for ImageAssetLoader { frames } + ImageFormat::WebP => { + let mut decoder = WebPDecoder::new(Cursor::new(&bytes))?; + + if decoder.has_animation() { + let _ = decoder.set_background_color(Rgba([0, 0, 0, 0])); + let mut frames = SmallVec::new(); + + for frame in decoder.into_frames() { + let mut frame = frame?; + // Convert from RGBA to BGRA. + for pixel in frame.buffer_mut().chunks_exact_mut(4) { + pixel.swap(0, 2); + } + frames.push(frame); + } + + frames + } else { + let mut data = DynamicImage::from_decoder(decoder)?.into_rgba8(); + + // Convert from RGBA to BGRA. + for pixel in data.chunks_exact_mut(4) { + pixel.swap(0, 2); + } + + SmallVec::from_elem(Frame::new(data), 1) + } + } _ => { let mut data = image::load_from_memory_with_format(&bytes, format)?.into_rgba8(); diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 47f0a82774..e652f30373 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -716,7 +716,7 @@ impl Element for List { fn request_layout( &mut self, _id: Option<&GlobalElementId>, - cx: &mut crate::WindowContext, + cx: &mut WindowContext, ) -> (crate::LayoutId, Self::RequestLayoutState) { let layout_id = match self.sizing_behavior { ListSizingBehavior::Infer => { @@ -827,7 +827,7 @@ impl Element for List { bounds: Bounds, _: &mut Self::RequestLayoutState, prepaint: &mut Self::PrepaintState, - cx: &mut crate::WindowContext, + cx: &mut WindowContext, ) { cx.with_content_mask(Some(ContentMask { bounds }), |cx| { for item in &mut prepaint.layout.item_layouts { diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 56b551737a..489a798014 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -263,7 +263,7 @@ impl TextLayout { .line_height .to_pixels(font_size.into(), cx.rem_size()); - let runs = if let Some(runs) = runs { + let mut runs = if let Some(runs) = runs { runs } else { vec![text_style.to_run(text.len())] @@ -306,7 +306,7 @@ impl TextLayout { let mut line_wrapper = cx.text_system().line_wrapper(text_style.font(), font_size); let text = if let Some(truncate_width) = truncate_width { - line_wrapper.truncate_line(text.clone(), truncate_width, ellipsis) + line_wrapper.truncate_line(text.clone(), truncate_width, ellipsis, &mut runs) } else { text.clone() }; @@ -472,9 +472,9 @@ pub struct InteractiveText { element_id: ElementId, text: StyledText, click_listener: - Option], InteractiveTextClickEvent, &mut WindowContext<'_>)>>, - hover_listener: Option, MouseMoveEvent, &mut WindowContext<'_>)>>, - tooltip_builder: Option) -> Option>>, + Option], InteractiveTextClickEvent, &mut WindowContext)>>, + hover_listener: Option, MouseMoveEvent, &mut WindowContext)>>, + tooltip_builder: Option Option>>, clickable_ranges: Vec>, } @@ -510,7 +510,7 @@ impl InteractiveText { pub fn on_click( mut self, ranges: Vec>, - listener: impl Fn(usize, &mut WindowContext<'_>) + 'static, + listener: impl Fn(usize, &mut WindowContext) + 'static, ) -> Self { self.click_listener = Some(Box::new(move |ranges, event, cx| { for (range_ix, range) in ranges.iter().enumerate() { @@ -528,7 +528,7 @@ impl InteractiveText { /// index of the hovered character, or None if the mouse leaves the text. pub fn on_hover( mut self, - listener: impl Fn(Option, MouseMoveEvent, &mut WindowContext<'_>) + 'static, + listener: impl Fn(Option, MouseMoveEvent, &mut WindowContext) + 'static, ) -> Self { self.hover_listener = Some(Box::new(listener)); self @@ -537,7 +537,7 @@ impl InteractiveText { /// tooltip lets you specify a tooltip for a given character index in the string. pub fn tooltip( mut self, - builder: impl Fn(usize, &mut WindowContext<'_>) -> Option + 'static, + builder: impl Fn(usize, &mut WindowContext) -> Option + 'static, ) -> Self { self.tooltip_builder = Some(Rc::new(builder)); self @@ -675,6 +675,7 @@ impl Element for InteractiveText { if let Some(tooltip_builder) = self.tooltip_builder.clone() { let hitbox = hitbox.clone(); + let source_bounds = hitbox.bounds; let active_tooltip = interactive_state.active_tooltip.clone(); let pending_mouse_down = interactive_state.mouse_down_index.clone(); let text_layout = text_layout.clone(); @@ -708,6 +709,8 @@ impl Element for InteractiveText { tooltip: Some(AnyTooltip { view: tooltip, mouse_position: cx.mouse_position(), + hoverable: true, + origin_bounds: source_bounds, }), _task: None, } diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 703d9bebe6..4c2b449b41 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -219,7 +219,7 @@ impl Element for UniformList { let padded_bounds = Bounds::from_corners( bounds.origin + point(border.left + padding.left, border.top + padding.top), - bounds.lower_right() + bounds.bottom_right() - point(border.right + padding.right, border.bottom + padding.bottom), ); @@ -261,7 +261,7 @@ impl Element for UniformList { let padded_bounds = Bounds::from_corners( bounds.origin + point(border.left + padding.left, border.top), - bounds.lower_right() - point(border.right + padding.right, border.bottom), + bounds.bottom_right() - point(border.right + padding.right, border.bottom), ); if let Some(handle) = self.scroll_handle.as_mut() { diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 3035892d7a..1015b2bcc6 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -1,6 +1,10 @@ use crate::{AppContext, PlatformDispatcher}; +use async_task::Runnable; use futures::channel::mpsc; use smol::prelude::*; +use std::mem::ManuallyDrop; +use std::panic::Location; +use std::thread::{self, ThreadId}; use std::{ fmt::Debug, marker::PhantomData, @@ -46,7 +50,10 @@ pub struct ForegroundExecutor { /// the task to continue running, but with no way to return a value. #[must_use] #[derive(Debug)] -pub enum Task { +pub struct Task(TaskState); + +#[derive(Debug)] +enum TaskState { /// A task that is ready to return a value Ready(Option), @@ -57,14 +64,14 @@ pub enum Task { impl Task { /// Creates a new task that will resolve with the value pub fn ready(val: T) -> Self { - Task::Ready(Some(val)) + Task(TaskState::Ready(Some(val))) } /// Detaching a task runs it to completion in the background pub fn detach(self) { match self { - Task::Ready(_) => {} - Task::Spawned(task) => task.detach(), + Task(TaskState::Ready(_)) => {} + Task(TaskState::Spawned(task)) => task.detach(), } } } @@ -90,8 +97,8 @@ impl Future for Task { fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { match unsafe { self.get_unchecked_mut() } { - Task::Ready(val) => Poll::Ready(val.take().unwrap()), - Task::Spawned(task) => task.poll(cx), + Task(TaskState::Ready(val)) => Poll::Ready(val.take().unwrap()), + Task(TaskState::Spawned(task)) => task.poll(cx), } } } @@ -159,7 +166,7 @@ impl BackgroundExecutor { let (runnable, task) = async_task::spawn(future, move |runnable| dispatcher.dispatch(runnable, label)); runnable.schedule(); - Task::Spawned(task) + Task(TaskState::Spawned(task)) } /// Used by the test harness to run an async test in a synchronous fashion. @@ -328,12 +335,15 @@ impl BackgroundExecutor { /// Depending on other concurrent tasks the elapsed duration may be longer /// than requested. pub fn timer(&self, duration: Duration) -> Task<()> { + if duration.is_zero() { + return Task::ready(()); + } let (runnable, task) = async_task::spawn(async move {}, { let dispatcher = self.dispatcher.clone(); move |runnable| dispatcher.dispatch_after(duration, runnable) }); runnable.schedule(); - Task::Spawned(task) + Task(TaskState::Spawned(task)) } /// in tests, start_waiting lets you indicate which task is waiting (for debugging only) @@ -437,25 +447,93 @@ impl ForegroundExecutor { } /// Enqueues the given Task to run on the main thread at some point in the future. + #[track_caller] pub fn spawn(&self, future: impl Future + 'static) -> Task where R: 'static, { let dispatcher = self.dispatcher.clone(); + + #[track_caller] fn inner( dispatcher: Arc, future: AnyLocalFuture, ) -> Task { - let (runnable, task) = async_task::spawn_local(future, move |runnable| { + let (runnable, task) = spawn_local_with_source_location(future, move |runnable| { dispatcher.dispatch_on_main_thread(runnable) }); runnable.schedule(); - Task::Spawned(task) + Task(TaskState::Spawned(task)) } inner::(dispatcher, Box::pin(future)) } } +/// Variant of `async_task::spawn_local` that includes the source location of the spawn in panics. +/// +/// Copy-modified from: +/// https://github.com/smol-rs/async-task/blob/ca9dbe1db9c422fd765847fa91306e30a6bb58a9/src/runnable.rs#L405 +#[track_caller] +fn spawn_local_with_source_location( + future: Fut, + schedule: S, +) -> (Runnable<()>, async_task::Task) +where + Fut: Future + 'static, + Fut::Output: 'static, + S: async_task::Schedule<()> + Send + Sync + 'static, +{ + #[inline] + fn thread_id() -> ThreadId { + std::thread_local! { + static ID: ThreadId = thread::current().id(); + } + ID.try_with(|id| *id) + .unwrap_or_else(|_| thread::current().id()) + } + + struct Checked { + id: ThreadId, + inner: ManuallyDrop, + location: &'static Location<'static>, + } + + impl Drop for Checked { + fn drop(&mut self) { + assert!( + self.id == thread_id(), + "local task dropped by a thread that didn't spawn it. Task spawned at {}", + self.location + ); + unsafe { + ManuallyDrop::drop(&mut self.inner); + } + } + } + + impl Future for Checked { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + assert!( + self.id == thread_id(), + "local task polled by a thread that didn't spawn it. Task spawned at {}", + self.location + ); + unsafe { self.map_unchecked_mut(|c| &mut *c.inner).poll(cx) } + } + } + + // Wrap the future into one that checks which thread it's on. + let future = Checked { + id: thread_id(), + inner: ManuallyDrop::new(future), + location: Location::caller(), + }; + + unsafe { async_task::spawn_unchecked(future, schedule) } +} + /// Scope manages a set of tasks that are enqueued and waited on together. See [`BackgroundExecutor::scoped`]. pub struct Scope<'a> { executor: BackgroundExecutor, diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 9e0b9b9014..8d726f6d28 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -10,13 +10,13 @@ use std::{ cmp::{self, PartialOrd}, fmt, hash::Hash, - ops::{Add, Div, Mul, MulAssign, Sub}, + ops::{Add, Div, Mul, MulAssign, Neg, Sub}, }; use crate::{AppContext, DisplayId}; -/// An axis along which a measurement can be made. -#[derive(Copy, Clone, PartialEq, Eq, Debug)] +/// Axis in a 2D cartesian space. +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub enum Axis { /// The y axis, or up and down Vertical, @@ -46,7 +46,7 @@ pub trait Along { fn apply_along(&self, axis: Axis, f: impl FnOnce(Self::Unit) -> Self::Unit) -> Self; } -/// Describes a location in a 2D cartesian coordinate space. +/// Describes a location in a 2D cartesian space. /// /// It holds two public fields, `x` and `y`, which represent the coordinates in the space. /// The type `T` for the coordinates can be any type that implements `Default`, `Clone`, and `Debug`. @@ -54,11 +54,25 @@ pub trait Along { /// # Examples /// /// ``` -/// # use zed::Point; +/// # use gpui::Point; /// let point = Point { x: 10, y: 20 }; /// println!("{:?}", point); // Outputs: Point { x: 10, y: 20 } /// ``` -#[derive(Refineable, Default, Add, AddAssign, Sub, SubAssign, Copy, Debug, PartialEq, Eq, Hash)] +#[derive( + Refineable, + Default, + Add, + AddAssign, + Sub, + SubAssign, + Copy, + Debug, + PartialEq, + Eq, + Serialize, + Deserialize, + Hash, +)] #[refineable(Debug)] #[repr(C)] pub struct Point { @@ -82,7 +96,7 @@ pub struct Point { /// # Examples /// /// ``` -/// # use zed::Point; +/// # use gpui::Point; /// let p = point(10, 20); /// assert_eq!(p.x, 10); /// assert_eq!(p.y, 20); @@ -123,7 +137,7 @@ impl Point { /// # Examples /// /// ``` - /// # use zed::Point; + /// # use gpui::Point; /// let p = Point { x: 3, y: 4 }; /// let p_float = p.map(|coord| coord as f32); /// assert_eq!(p_float, Point { x: 3.0, y: 4.0 }); @@ -177,7 +191,7 @@ impl Point { /// # Examples /// /// ``` - /// # use zed::{Point, Pixels, ScaledPixels}; + /// # use gpui::{Point, Pixels, ScaledPixels}; /// let p = Point { x: Pixels(10.0), y: Pixels(20.0) }; /// let scaled_p = p.scale(1.5); /// assert_eq!(scaled_p, Point { x: ScaledPixels(15.0), y: ScaledPixels(30.0) }); @@ -194,8 +208,7 @@ impl Point { /// # Examples /// /// ``` - /// # use zed::Point; - /// # use zed::Pixels; + /// # use gpui::{Pixels, Point}; /// let p = Point { x: Pixels(3.0), y: Pixels(4.0) }; /// assert_eq!(p.magnitude(), 5.0); /// ``` @@ -258,7 +271,7 @@ where /// # Examples /// /// ``` - /// # use zed::Point; + /// # use gpui::Point; /// let p1 = Point { x: 3, y: 7 }; /// let p2 = Point { x: 5, y: 2 }; /// let max_point = p1.max(&p2); @@ -288,7 +301,7 @@ where /// # Examples /// /// ``` - /// # use zed::Point; + /// # use gpui::Point; /// let p1 = Point { x: 3, y: 7 }; /// let p2 = Point { x: 5, y: 2 }; /// let min_point = p1.min(&p2); @@ -324,7 +337,7 @@ where /// # Examples /// /// ``` - /// # use zed::Point; + /// # use gpui::Point; /// let p = Point { x: 10, y: 20 }; /// let min = Point { x: 0, y: 5 }; /// let max = Point { x: 15, y: 25 }; @@ -373,7 +386,7 @@ pub struct Size { /// # Examples /// /// ``` -/// # use zed::Size; +/// # use gpui::Size; /// let my_size = size(10, 20); /// assert_eq!(my_size.width, 10); /// assert_eq!(my_size.height, 20); @@ -402,7 +415,7 @@ where /// # Examples /// /// ``` - /// # use zed::Size; + /// # use gpui::Size; /// let my_size = Size { width: 10, height: 20 }; /// let my_new_size = my_size.map(|dimension| dimension as f32 * 1.5); /// assert_eq!(my_new_size, Size { width: 15.0, height: 30.0 }); @@ -445,7 +458,7 @@ impl Size { /// # Examples /// /// ``` - /// # use zed::{Size, Pixels, ScaledPixels}; + /// # use gpui::{Size, Pixels, ScaledPixels}; /// let size = Size { width: Pixels(100.0), height: Pixels(50.0) }; /// let scaled_size = size.scale(2.0); /// assert_eq!(scaled_size, Size { width: ScaledPixels(200.0), height: ScaledPixels(100.0) }); @@ -499,7 +512,7 @@ where /// # Examples /// /// ``` - /// # use zed::Size; + /// # use gpui::Size; /// let size1 = Size { width: 30, height: 40 }; /// let size2 = Size { width: 50, height: 20 }; /// let max_size = size1.max(&size2); @@ -519,6 +532,7 @@ where }, } } + /// Returns a new `Size` with the minimum width and height from `self` and `other`. /// /// # Arguments @@ -528,7 +542,7 @@ where /// # Examples /// /// ``` - /// # use zed::Size; + /// # use gpui::Size; /// let size1 = Size { width: 30, height: 40 }; /// let size2 = Size { width: 50, height: 20 }; /// let min_size = size1.min(&size2); @@ -680,13 +694,13 @@ impl Size { /// Represents a rectangular area in a 2D space with an origin point and a size. /// /// The `Bounds` struct is generic over a type `T` which represents the type of the coordinate system. -/// The origin is represented as a `Point` which defines the upper-left corner of the rectangle, +/// The origin is represented as a `Point` which defines the top left corner of the rectangle, /// and the size is represented as a `Size` which defines the width and height of the rectangle. /// /// # Examples /// /// ``` -/// # use zed::{Bounds, Point, Size}; +/// # use gpui::{Bounds, Point, Size}; /// let origin = Point { x: 0, y: 0 }; /// let size = Size { width: 10, height: 20 }; /// let bounds = Bounds::new(origin, size); @@ -694,7 +708,7 @@ impl Size { /// assert_eq!(bounds.origin, origin); /// assert_eq!(bounds.size, size); /// ``` -#[derive(Refineable, Clone, Default, Debug, Eq, PartialEq, Hash)] +#[derive(Refineable, Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)] #[refineable(Debug)] #[repr(C)] pub struct Bounds { @@ -704,6 +718,11 @@ pub struct Bounds { pub size: Size, } +/// Create a bounds with the given origin and size +pub fn bounds(origin: Point, size: Size) -> Bounds { + Bounds { origin, size } +} + impl Bounds { /// Generate a centered bounds for the given display or primary display if none is provided pub fn centered(display_id: Option, size: Size, cx: &AppContext) -> Self { @@ -712,13 +731,7 @@ impl Bounds { .or_else(|| cx.primary_display()); display - .map(|display| { - let center = display.bounds().center(); - Bounds { - origin: point(center.x - size.width / 2., center.y - size.height / 2.), - size, - } - }) + .map(|display| Bounds::centered_at(display.bounds().center(), size)) .unwrap_or_else(|| Bounds { origin: point(px(0.), px(0.)), size, @@ -742,47 +755,8 @@ impl Bounds { impl Bounds where - T: Clone + Debug + Sub + Default, + T: Clone + Debug + Default, { - /// Constructs a `Bounds` from two corner points: the upper-left and lower-right corners. - /// - /// This function calculates the origin and size of the `Bounds` based on the provided corner points. - /// The origin is set to the upper-left corner, and the size is determined by the difference between - /// the x and y coordinates of the lower-right and upper-left points. - /// - /// # Arguments - /// - /// * `upper_left` - A `Point` representing the upper-left corner of the rectangle. - /// * `lower_right` - A `Point` representing the lower-right corner of the rectangle. - /// - /// # Returns - /// - /// Returns a `Bounds` that encompasses the area defined by the two corner points. - /// - /// # Examples - /// - /// ``` - /// # use zed::{Bounds, Point}; - /// let upper_left = Point { x: 0, y: 0 }; - /// let lower_right = Point { x: 10, y: 10 }; - /// let bounds = Bounds::from_corners(upper_left, lower_right); - /// - /// assert_eq!(bounds.origin, upper_left); - /// assert_eq!(bounds.size.width, 10); - /// assert_eq!(bounds.size.height, 10); - /// ``` - pub fn from_corners(upper_left: Point, lower_right: Point) -> Self { - let origin = Point { - x: upper_left.x.clone(), - y: upper_left.y.clone(), - }; - let size = Size { - width: lower_right.x - upper_left.x, - height: lower_right.y - upper_left.y, - }; - Bounds { origin, size } - } - /// Creates a new `Bounds` with the specified origin and size. /// /// # Arguments @@ -800,7 +774,87 @@ where impl Bounds where - T: Clone + Debug + PartialOrd + Add + Sub + Default + Half, + T: Clone + Debug + Sub + Default, +{ + /// Constructs a `Bounds` from two corner points: the top left and bottom right corners. + /// + /// This function calculates the origin and size of the `Bounds` based on the provided corner points. + /// The origin is set to the top left corner, and the size is determined by the difference between + /// the x and y coordinates of the bottom right and top left points. + /// + /// # Arguments + /// + /// * `upper_left` - A `Point` representing the top left corner of the rectangle. + /// * `bottom_right` - A `Point` representing the bottom right corner of the rectangle. + /// + /// # Returns + /// + /// Returns a `Bounds` that encompasses the area defined by the two corner points. + /// + /// # Examples + /// + /// ``` + /// # use gpui::{Bounds, Point}; + /// let upper_left = Point { x: 0, y: 0 }; + /// let bottom_right = Point { x: 10, y: 10 }; + /// let bounds = Bounds::from_corners(upper_left, bottom_right); + /// + /// assert_eq!(bounds.origin, upper_left); + /// assert_eq!(bounds.size.width, 10); + /// assert_eq!(bounds.size.height, 10); + /// ``` + pub fn from_corners(upper_left: Point, bottom_right: Point) -> Self { + let origin = Point { + x: upper_left.x.clone(), + y: upper_left.y.clone(), + }; + let size = Size { + width: bottom_right.x - upper_left.x, + height: bottom_right.y - upper_left.y, + }; + Bounds { origin, size } + } + + /// Constructs a `Bounds` from a corner point and size. The specified corner will be placed at + /// the specified origin. + pub fn from_corner_and_size(corner: Corner, origin: Point, size: Size) -> Bounds { + let origin = match corner { + Corner::TopLeft => origin, + Corner::TopRight => Point { + x: origin.x - size.width.clone(), + y: origin.y, + }, + Corner::BottomLeft => Point { + x: origin.x, + y: origin.y - size.height.clone(), + }, + Corner::BottomRight => Point { + x: origin.x - size.width.clone(), + y: origin.y - size.height.clone(), + }, + }; + + Bounds { origin, size } + } +} + +impl Bounds +where + T: Clone + Debug + Sub + Default + Half, +{ + /// Creates a new bounds centered at the given point. + pub fn centered_at(center: Point, size: Size) -> Self { + let origin = Point { + x: center.x - size.width.half(), + y: center.y - size.height.half(), + }; + Self::new(origin, size) + } +} + +impl Bounds +where + T: Clone + Debug + PartialOrd + Add + Default, { /// Checks if this `Bounds` intersects with another `Bounds`. /// @@ -818,7 +872,7 @@ where /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds1 = Bounds { /// origin: Point { x: 0, y: 0 }, /// size: Size { width: 10, height: 10 }, @@ -836,15 +890,80 @@ where /// assert_eq!(bounds1.intersects(&bounds3), false); // Non-overlapping bounds /// ``` pub fn intersects(&self, other: &Bounds) -> bool { - let my_lower_right = self.lower_right(); - let their_lower_right = other.lower_right(); + let my_lower_right = self.bottom_right(); + let their_lower_right = other.bottom_right(); self.origin.x < their_lower_right.x && my_lower_right.x > other.origin.x && self.origin.y < their_lower_right.y && my_lower_right.y > other.origin.y } +} +impl Bounds +where + T: Clone + Debug + Add + Default + Half, +{ + /// Returns the center point of the bounds. + /// + /// Calculates the center by taking the origin's x and y coordinates and adding half the width and height + /// of the bounds, respectively. The center is represented as a `Point` where `T` is the type of the + /// coordinate system. + /// + /// # Returns + /// + /// A `Point` representing the center of the bounds. + /// + /// # Examples + /// + /// ``` + /// # use gpui::{Bounds, Point, Size}; + /// let bounds = Bounds { + /// origin: Point { x: 0, y: 0 }, + /// size: Size { width: 10, height: 20 }, + /// }; + /// let center = bounds.center(); + /// assert_eq!(center, Point { x: 5, y: 10 }); + /// ``` + pub fn center(&self) -> Point { + Point { + x: self.origin.x.clone() + self.size.width.clone().half(), + y: self.origin.y.clone() + self.size.height.clone().half(), + } + } +} + +impl Bounds +where + T: Clone + Debug + Add + Default, +{ + /// Calculates the half perimeter of a rectangle defined by the bounds. + /// + /// The half perimeter is calculated as the sum of the width and the height of the rectangle. + /// This method is generic over the type `T` which must implement the `Sub` trait to allow + /// calculation of the width and height from the bounds' origin and size, as well as the `Add` trait + /// to sum the width and height for the half perimeter. + /// + /// # Examples + /// + /// ``` + /// # use gpui::{Bounds, Point, Size}; + /// let bounds = Bounds { + /// origin: Point { x: 0, y: 0 }, + /// size: Size { width: 10, height: 20 }, + /// }; + /// let half_perimeter = bounds.half_perimeter(); + /// assert_eq!(half_perimeter, 30); + /// ``` + pub fn half_perimeter(&self) -> T { + self.size.width.clone() + self.size.height.clone() + } +} + +impl Bounds +where + T: Clone + Debug + Add + Sub + Default, +{ /// Dilates the bounds by a specified amount in all directions. /// /// This method expands the bounds by the given `amount`, increasing the size @@ -860,7 +979,7 @@ where /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let mut bounds = Bounds { /// origin: Point { x: 10, y: 10 }, /// size: Size { width: 10, height: 10 }, @@ -871,79 +990,36 @@ where /// size: Size { width: 20, height: 20 }, /// }); /// ``` - pub fn dilate(&mut self, amount: T) { - self.origin.x = self.origin.x.clone() - amount.clone(); - self.origin.y = self.origin.y.clone() - amount.clone(); - let double_amount = amount.clone() + amount; - self.size.width = self.size.width.clone() + double_amount.clone(); - self.size.height = self.size.height.clone() + double_amount; - } - - /// inset the bounds by a specified amount - /// Note that this may panic if T does not support negative values - pub fn inset(&self, amount: T) -> Self { - let mut result = self.clone(); - result.dilate(T::default() - amount); - result - } - - /// Returns the center point of the bounds. - /// - /// Calculates the center by taking the origin's x and y coordinates and adding half the width and height - /// of the bounds, respectively. The center is represented as a `Point` where `T` is the type of the - /// coordinate system. - /// - /// # Returns - /// - /// A `Point` representing the center of the bounds. - /// - /// # Examples - /// - /// ``` - /// # use zed::{Bounds, Point, Size}; - /// let bounds = Bounds { - /// origin: Point { x: 0, y: 0 }, - /// size: Size { width: 10, height: 20 }, - /// }; - /// let center = bounds.center(); - /// assert_eq!(center, Point { x: 5, y: 10 }); - /// ``` - pub fn center(&self) -> Point { - Point { - x: self.origin.x.clone() + self.size.width.clone().half(), - y: self.origin.y.clone() + self.size.height.clone().half(), + pub fn dilate(&self, amount: T) -> Bounds { + let double_amount = amount.clone() + amount.clone(); + Bounds { + origin: self.origin.clone() - point(amount.clone(), amount), + size: self.size.clone() + size(double_amount.clone(), double_amount), } } - /// Calculates the half perimeter of a rectangle defined by the bounds. - /// - /// The half perimeter is calculated as the sum of the width and the height of the rectangle. - /// This method is generic over the type `T` which must implement the `Sub` trait to allow - /// calculation of the width and height from the bounds' origin and size, as well as the `Add` trait - /// to sum the width and height for the half perimeter. - /// - /// # Examples - /// - /// ``` - /// # use zed::{Bounds, Point, Size}; - /// let bounds = Bounds { - /// origin: Point { x: 0, y: 0 }, - /// size: Size { width: 10, height: 20 }, - /// }; - /// let half_perimeter = bounds.half_perimeter(); - /// assert_eq!(half_perimeter, 30); - /// ``` - pub fn half_perimeter(&self) -> T { - self.size.width.clone() + self.size.height.clone() + /// Extends the bounds different amounts in each direction. + pub fn extend(&self, amount: Edges) -> Bounds { + Bounds { + origin: self.origin.clone() - point(amount.left.clone(), amount.top.clone()), + size: self.size.clone() + + size( + amount.left.clone() + amount.right.clone(), + amount.top.clone() + amount.bottom.clone(), + ), + } } +} - /// centered_at creates a new bounds centered at the given point. - pub fn centered_at(center: Point, size: Size) -> Self { - let origin = Point { - x: center.x - size.width.half(), - y: center.y - size.height.half(), - }; - Self::new(origin, size) +impl Bounds +where + T: Clone + Debug + Add + Sub + Neg + Default, +{ + /// Inset the bounds by a specified amount. Equivalent to `dilate` with the amount negated. + /// + /// Note that this may panic if T does not support negative values. + pub fn inset(&self, amount: T) -> Self { + self.dilate(-amount) } } @@ -965,7 +1041,7 @@ impl + Sub + Sub Self { let upper_left = self.origin.max(&other.origin); - let lower_right = self.lower_right().min(&other.lower_right()); - Self::from_corners(upper_left, lower_right) + let bottom_right = self.bottom_right().min(&other.bottom_right()); + Self::from_corners(upper_left, bottom_right) } /// Computes the union of two `Bounds`. @@ -1004,7 +1080,7 @@ impl + Sub + Sub Self { let top_left = self.origin.min(&other.origin); - let bottom_right = self.lower_right().max(&other.lower_right()); + let bottom_right = self.bottom_right().max(&other.bottom_right()); Bounds::from_corners(top_left, bottom_right) } } +impl Bounds +where + T: Clone + Debug + Add + Sub + Default, +{ + /// Computes the space available within outer bounds. + pub fn space_within(&self, outer: &Self) -> Edges { + Edges { + top: self.top().clone() - outer.top().clone(), + right: outer.right().clone() - self.right().clone(), + bottom: outer.bottom().clone() - self.bottom().clone(), + left: self.left().clone() - outer.left().clone(), + } + } +} + impl Mul for Bounds where T: Mul + Clone + Default + Debug, @@ -1070,6 +1161,34 @@ where } } +impl Add> for Bounds +where + T: Add + Default + Clone + Debug, +{ + type Output = Self; + + fn add(self, rhs: Point) -> Self { + Self { + origin: self.origin + rhs, + size: self.size, + } + } +} + +impl Sub> for Bounds +where + T: Sub + Default + Clone + Debug, +{ + type Output = Self; + + fn sub(self, rhs: Point) -> Self { + Self { + origin: self.origin - rhs, + size: self.size, + } + } +} + impl Bounds where T: Add + Clone + Default + Debug, @@ -1110,77 +1229,103 @@ where self.origin.x.clone() + self.size.width.clone() } - /// Returns the upper-right corner point of the bounds. + /// Returns the top right corner point of the bounds. /// /// # Returns /// - /// A `Point` representing the upper-right corner of the bounds. + /// A `Point` representing the top right corner of the bounds. /// /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds = Bounds { /// origin: Point { x: 0, y: 0 }, /// size: Size { width: 10, height: 20 }, /// }; - /// let upper_right = bounds.upper_right(); - /// assert_eq!(upper_right, Point { x: 10, y: 0 }); + /// let top_right = bounds.top_right(); + /// assert_eq!(top_right, Point { x: 10, y: 0 }); /// ``` - pub fn upper_right(&self) -> Point { + pub fn top_right(&self) -> Point { Point { x: self.origin.x.clone() + self.size.width.clone(), y: self.origin.y.clone(), } } - /// Returns the lower-right corner point of the bounds. + /// Returns the bottom right corner point of the bounds. /// /// # Returns /// - /// A `Point` representing the lower-right corner of the bounds. + /// A `Point` representing the bottom right corner of the bounds. /// /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds = Bounds { /// origin: Point { x: 0, y: 0 }, /// size: Size { width: 10, height: 20 }, /// }; - /// let lower_right = bounds.lower_right(); - /// assert_eq!(lower_right, Point { x: 10, y: 20 }); + /// let bottom_right = bounds.bottom_right(); + /// assert_eq!(bottom_right, Point { x: 10, y: 20 }); /// ``` - pub fn lower_right(&self) -> Point { + pub fn bottom_right(&self) -> Point { Point { x: self.origin.x.clone() + self.size.width.clone(), y: self.origin.y.clone() + self.size.height.clone(), } } - /// Returns the lower-left corner point of the bounds. + /// Returns the bottom left corner point of the bounds. /// /// # Returns /// - /// A `Point` representing the lower-left corner of the bounds. + /// A `Point` representing the bottom left corner of the bounds. /// /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds = Bounds { /// origin: Point { x: 0, y: 0 }, /// size: Size { width: 10, height: 20 }, /// }; - /// let lower_left = bounds.lower_left(); - /// assert_eq!(lower_left, Point { x: 0, y: 20 }); + /// let bottom_left = bounds.bottom_left(); + /// assert_eq!(bottom_left, Point { x: 0, y: 20 }); /// ``` - pub fn lower_left(&self) -> Point { + pub fn bottom_left(&self) -> Point { Point { x: self.origin.x.clone(), y: self.origin.y.clone() + self.size.height.clone(), } } + + /// Returns the requested corner point of the bounds. + /// + /// # Returns + /// + /// A `Point` representing the corner of the bounds requested by the parameter. + /// + /// # Examples + /// + /// ``` + /// # use zed::{Bounds, Corner, Point, Size}; + /// let bounds = Bounds { + /// origin: Point { x: 0, y: 0 }, + /// size: Size { width: 10, height: 20 }, + /// }; + /// let bottom_left = bounds.corner(Corner::BottomLeft); + /// assert_eq!(bottom_left, Point { x: 0, y: 20 }); + /// ``` + pub fn corner(&self, corner: Corner) -> Point { + match corner { + Corner::TopLeft => self.origin.clone(), + Corner::TopRight => self.top_right(), + Corner::BottomLeft => self.bottom_left(), + Corner::BottomRight => self.bottom_right(), + } + } } impl Bounds @@ -1205,7 +1350,7 @@ where /// # Examples /// /// ``` - /// # use zed::{Point, Bounds}; + /// # use gpui::{Point, Bounds}; /// let bounds = Bounds { /// origin: Point { x: 0, y: 0 }, /// size: Size { width: 10, height: 10 }, @@ -1240,7 +1385,7 @@ where /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds = Bounds { /// origin: Point { x: 10.0, y: 10.0 }, /// size: Size { width: 10.0, height: 20.0 }, @@ -1267,7 +1412,7 @@ where /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds = Bounds { /// origin: Point { x: 10.0, y: 10.0 }, /// size: Size { width: 10.0, height: 20.0 }, @@ -1291,7 +1436,7 @@ where /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size}; + /// # use gpui::{Bounds, Point, Size}; /// let bounds = Bounds { /// origin: Point { x: 10.0, y: 10.0 }, /// size: Size { width: 10.0, height: 20.0 }, @@ -1366,7 +1511,7 @@ impl Bounds { /// # Examples /// /// ``` - /// # use zed::{Bounds, Point, Size, Pixels}; + /// # use gpui::{Bounds, Point, Size, Pixels}; /// let bounds = Bounds { /// origin: Point { x: Pixels(10.0), y: Pixels(20.0) }, /// size: Size { width: Pixels(30.0), height: Pixels(40.0) }, @@ -1419,7 +1564,7 @@ impl Copy for Bounds {} /// # Examples /// /// ``` -/// # use zed::Edges; +/// # use gpui::Edges; /// let edges = Edges { /// top: 10.0, /// right: 20.0, @@ -1495,7 +1640,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::Edges; /// let uniform_edges = Edges::all(10.0); /// assert_eq!(uniform_edges.top, 10.0); /// assert_eq!(uniform_edges.right, 10.0); @@ -1528,7 +1673,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::Edges; /// let edges = Edges { top: 10, right: 20, bottom: 30, left: 40 }; /// let edges_float = edges.map(|&value| value as f32 * 1.1); /// assert_eq!(edges_float, Edges { top: 11.0, right: 22.0, bottom: 33.0, left: 44.0 }); @@ -1560,7 +1705,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::Edges; /// let edges = Edges { /// top: 10, /// right: 0, @@ -1592,7 +1737,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::Edges; /// let auto_edges = Edges::auto(); /// assert_eq!(auto_edges.top, Length::Auto); /// assert_eq!(auto_edges.right, Length::Auto); @@ -1620,7 +1765,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::Edges; /// let no_edges = Edges::zero(); /// assert_eq!(no_edges.top, Length::Definite(DefiniteLength::from(Pixels(0.)))); /// assert_eq!(no_edges.right, Length::Definite(DefiniteLength::from(Pixels(0.)))); @@ -1650,12 +1795,12 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::{px, Edges}; /// let no_edges = Edges::zero(); - /// assert_eq!(no_edges.top, DefiniteLength::from(zed::px(0.))); - /// assert_eq!(no_edges.right, DefiniteLength::from(zed::px(0.))); - /// assert_eq!(no_edges.bottom, DefiniteLength::from(zed::px(0.))); - /// assert_eq!(no_edges.left, DefiniteLength::from(zed::px(0.))); + /// assert_eq!(no_edges.top, DefiniteLength::from(px(0.))); + /// assert_eq!(no_edges.right, DefiniteLength::from(px(0.))); + /// assert_eq!(no_edges.bottom, DefiniteLength::from(px(0.))); + /// assert_eq!(no_edges.left, DefiniteLength::from(px(0.))); /// ``` pub fn zero() -> Self { Self { @@ -1683,7 +1828,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::{Edges, DefiniteLength, px, AbsoluteLength, Size}; + /// # use gpui::{Edges, DefiniteLength, px, AbsoluteLength, Size}; /// let edges = Edges { /// top: DefiniteLength::Absolute(AbsoluteLength::Pixels(px(10.0))), /// right: DefiniteLength::Fraction(0.5), @@ -1725,7 +1870,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::Edges; + /// # use gpui::Edges; /// let no_edges = Edges::zero(); /// assert_eq!(no_edges.top, AbsoluteLength::Pixels(Pixels(0.0))); /// assert_eq!(no_edges.right, AbsoluteLength::Pixels(Pixels(0.0))); @@ -1757,7 +1902,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::{Edges, AbsoluteLength, Pixels, px}; + /// # use gpui::{Edges, AbsoluteLength, Pixels, px}; /// let edges = Edges { /// top: AbsoluteLength::Pixels(px(10.0)), /// right: AbsoluteLength::Rems(rems(1.0)), @@ -1798,7 +1943,7 @@ impl Edges { /// # Examples /// /// ``` - /// # use zed::{Edges, Pixels}; + /// # use gpui::{Edges, Pixels}; /// let edges = Edges { /// top: Pixels(10.0), /// right: Pixels(20.0), @@ -1848,6 +1993,64 @@ impl From for Edges { } } +/// Identifies a corner of a 2d box. +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum Corner { + /// The top left corner + TopLeft, + /// The top right corner + TopRight, + /// The bottom left corner + BottomLeft, + /// The bottom right corner + BottomRight, +} + +impl Corner { + /// Returns the directly opposite corner. + /// + /// # Examples + /// + /// ``` + /// # use zed::Corner; + /// assert_eq!(Corner::TopLeft.opposite_corner(), Corner::BottomRight); + /// ``` + pub fn opposite_corner(self) -> Self { + match self { + Corner::TopLeft => Corner::BottomRight, + Corner::TopRight => Corner::BottomLeft, + Corner::BottomLeft => Corner::TopRight, + Corner::BottomRight => Corner::TopLeft, + } + } + + /// Returns the corner across from this corner, moving along the specified axis. + /// + /// # Examples + /// + /// ``` + /// # use zed::Corner; + /// let result = Corner::TopLeft.other_side_corner_along(Axis::Horizontal); + /// assert_eq!(result, Corner::TopRight); + /// ``` + pub fn other_side_corner_along(self, axis: Axis) -> Self { + match axis { + Axis::Vertical => match self { + Corner::TopLeft => Corner::BottomLeft, + Corner::TopRight => Corner::BottomRight, + Corner::BottomLeft => Corner::TopLeft, + Corner::BottomRight => Corner::TopRight, + }, + Axis::Horizontal => match self { + Corner::TopLeft => Corner::TopRight, + Corner::TopRight => Corner::TopLeft, + Corner::BottomLeft => Corner::BottomRight, + Corner::BottomRight => Corner::BottomLeft, + }, + } + } +} + /// Represents the corners of a box in a 2D space, such as border radius. /// /// Each field represents the size of the corner on one side of the box: `top_left`, `top_right`, `bottom_right`, and `bottom_left`. @@ -1886,7 +2089,7 @@ where /// # Examples /// /// ``` - /// # use zed::Corners; + /// # use gpui::Corners; /// let uniform_corners = Corners::all(5.0); /// assert_eq!(uniform_corners.top_left, 5.0); /// assert_eq!(uniform_corners.top_right, 5.0); @@ -1901,6 +2104,33 @@ where bottom_left: value, } } + + /// Returns the requested corner. + /// + /// # Returns + /// + /// A `Point` representing the corner requested by the parameter. + /// + /// # Examples + /// + /// ``` + /// # use zed::{Corner, Corners}; + /// let corners = Corners { + /// top_left: 1, + /// top_right: 2, + /// bottom_left: 3, + /// bottom_right: 4 + /// }; + /// assert_eq!(corners.corner(Corner::BottomLeft), 3); + /// ``` + pub fn corner(&self, corner: Corner) -> T { + match corner { + Corner::TopLeft => self.top_left.clone(), + Corner::TopRight => self.top_right.clone(), + Corner::BottomLeft => self.bottom_left.clone(), + Corner::BottomRight => self.bottom_right.clone(), + } + } } impl Corners { @@ -1923,7 +2153,7 @@ impl Corners { /// # Examples /// /// ``` - /// # use zed::{Corners, AbsoluteLength, Pixels, Size}; + /// # use gpui::{Corners, AbsoluteLength, Pixels, Size}; /// let corners = Corners { /// top_left: AbsoluteLength::Pixels(Pixels(15.0)), /// top_right: AbsoluteLength::Rems(Rems(1.0)), @@ -1967,7 +2197,7 @@ impl Corners { /// # Examples /// /// ``` - /// # use zed::{Corners, Pixels}; + /// # use gpui::{Corners, Pixels}; /// let corners = Corners { /// top_left: Pixels(10.0), /// top_right: Pixels(20.0), @@ -2020,7 +2250,7 @@ impl Corners { /// # Examples /// /// ``` - /// # use zed::{Corners, Pixels}; + /// # use gpui::{Corners, Pixels}; /// let corners = Corners { /// top_left: Pixels(10.0), /// top_right: Pixels(20.0), @@ -2174,7 +2404,7 @@ impl From for Radians { /// # Examples /// /// ``` -/// use zed::Pixels; +/// use gpui::Pixels; /// /// // Define a length of 10 pixels /// let length = Pixels(10.0); @@ -2486,7 +2716,7 @@ impl DevicePixels { /// # Examples /// /// ``` - /// # use zed::DevicePixels; + /// # use gpui::DevicePixels; /// let pixels = DevicePixels(10); // 10 device pixels /// let bytes_per_pixel = 4; // Assume each pixel is represented by 4 bytes (e.g., RGBA) /// let total_bytes = pixels.to_bytes(bytes_per_pixel); @@ -2698,7 +2928,7 @@ impl AbsoluteLength { /// # Examples /// /// ``` - /// # use zed::{AbsoluteLength, Pixels}; + /// # use gpui::{AbsoluteLength, Pixels}; /// let length_in_pixels = AbsoluteLength::Pixels(Pixels(42.0)); /// let length_in_rems = AbsoluteLength::Rems(Rems(2.0)); /// let rem_size = Pixels(16.0); @@ -2751,7 +2981,7 @@ impl DefiniteLength { /// # Examples /// /// ``` - /// # use zed::{DefiniteLength, AbsoluteLength, Pixels, px, rems}; + /// # use gpui::{DefiniteLength, AbsoluteLength, Pixels, px, rems}; /// let length_in_pixels = DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0))); /// let length_in_rems = DefiniteLength::Absolute(AbsoluteLength::Rems(rems(2.0))); /// let length_as_fraction = DefiniteLength::Fraction(0.5); diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 2952f4af8a..fd2617f393 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -56,7 +56,7 @@ //! and [`test`] modules for more details. //! //! Currently, the best way to learn about these APIs is to read the Zed source code, ask us about it at a fireside hack, or drop -//! a question in the [Zed Discord](https://discord.gg/zed-community). We're working on improving the documentation, creating more examples, +//! a question in the [Zed Discord](https://zed.dev/community-links). We're working on improving the documentation, creating more examples, //! and will be publishing more guides to GPUI on our [blog](https://zed.dev/blog). #![deny(missing_docs)] @@ -202,7 +202,7 @@ pub trait Context { /// Update a window for the given handle. fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result where - F: FnOnce(AnyView, &mut WindowContext<'_>) -> T; + F: FnOnce(AnyView, &mut WindowContext) -> T; /// Read a window off of the application context. fn read_window( @@ -231,7 +231,7 @@ pub trait VisualContext: Context { /// Construct a new view in the window referenced by this context. fn new_view( &mut self, - build_view: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view: impl FnOnce(&mut ViewContext) -> V, ) -> Self::Result> where V: 'static + Render; @@ -240,13 +240,13 @@ pub trait VisualContext: Context { fn update_view( &mut self, view: &View, - update: impl FnOnce(&mut V, &mut ViewContext<'_, V>) -> R, + update: impl FnOnce(&mut V, &mut ViewContext) -> R, ) -> Self::Result; /// Replace the root view of a window with a new view. fn replace_root_view( &mut self, - build_view: impl FnOnce(&mut ViewContext<'_, V>) -> V, + build_view: impl FnOnce(&mut ViewContext) -> V, ) -> Self::Result> where V: 'static + Render; @@ -344,15 +344,15 @@ impl Flatten for Result { } } +/// Information about the GPU GPUI is running on. #[derive(Default, Debug)] -/// Information about the GPU GPUI is running on -pub struct GPUSpecs { - /// true if the GPU is really a fake (like llvmpipe) running on the CPU +pub struct GpuSpecs { + /// Whether the GPU is really a fake (like `llvmpipe`) running on the CPU. pub is_software_emulated: bool, - /// Name of the device as reported by vulkan + /// The name of the device, as reported by Vulkan. pub device_name: String, - /// Name of the driver as reported by vulkan + /// The name of the driver, as reported by Vulkan. pub driver_name: String, - /// Further driver info as reported by vulkan + /// Further information about the driver, as reported by Vulkan. pub driver_info: String, } diff --git a/crates/gpui/src/input.rs b/crates/gpui/src/input.rs index 161401ecc6..2fb27ac7fc 100644 --- a/crates/gpui/src/input.rs +++ b/crates/gpui/src/input.rs @@ -9,8 +9,12 @@ use std::ops::Range; /// See [`InputHandler`] for details on how to implement each method. pub trait ViewInputHandler: 'static + Sized { /// See [`InputHandler::text_for_range`] for details - fn text_for_range(&mut self, range: Range, cx: &mut ViewContext) - -> Option; + fn text_for_range( + &mut self, + range: Range, + adjusted_range: &mut Option>, + cx: &mut ViewContext, + ) -> Option; /// See [`InputHandler::selected_text_range`] for details fn selected_text_range( @@ -89,10 +93,12 @@ impl InputHandler for ElementInputHandler { fn text_for_range( &mut self, range_utf16: Range, + adjusted_range: &mut Option>, cx: &mut WindowContext, ) -> Option { - self.view - .update(cx, |view, cx| view.text_for_range(range_utf16, cx)) + self.view.update(cx, |view, cx| { + view.text_for_range(range_utf16, adjusted_range, cx) + }) } fn replace_text_in_range( diff --git a/crates/gpui/src/interactive.rs b/crates/gpui/src/interactive.rs index daa9b30877..a6cff28ffb 100644 --- a/crates/gpui/src/interactive.rs +++ b/crates/gpui/src/interactive.rs @@ -468,7 +468,7 @@ mod test { use crate::{ self as gpui, div, FocusHandle, InteractiveElement, IntoElement, KeyBinding, Keystroke, - ParentElement, Render, TestAppContext, VisualContext, + ParentElement, Render, TestAppContext, ViewContext, VisualContext, }; struct TestView { @@ -480,7 +480,7 @@ mod test { actions!(test, [TestAction]); impl Render for TestView { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { div().id("testview").child( div() .key_context("parent") diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index ad40e284bb..c248d2a387 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -134,8 +134,6 @@ impl Keymap { /// If a user has disabled a binding with `"x": null` it will not be returned. Disabled /// bindings are evaluated with the same precedence rules so you can disable a rule in /// a given context only. - /// - /// In the case of multi-key bindings, the pub fn bindings_for_input( &self, input: &[Keystroke], diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index d9016afb68..5a4f9b93a0 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -28,7 +28,7 @@ mod windows; use crate::{ point, Action, AnyWindowHandle, AsyncWindowContext, BackgroundExecutor, Bounds, DevicePixels, - DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GPUSpecs, GlyphId, + DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, ScaledPixels, Scene, SharedString, Size, SvgRenderer, SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE, @@ -46,6 +46,7 @@ use smallvec::SmallVec; use std::borrow::Cow; use std::hash::{Hash, Hasher}; use std::io::Cursor; +use std::ops; use std::time::{Duration, Instant}; use std::{ fmt::{self, Debug}, @@ -70,6 +71,9 @@ pub(crate) use test::*; #[cfg(target_os = "windows")] pub(crate) use windows::*; +#[cfg(any(test, feature = "test-support"))] +pub use test::TestScreenCaptureSource; + #[cfg(target_os = "macos")] pub(crate) fn current_platform(headless: bool) -> Rc { Rc::new(MacPlatform::new(headless)) @@ -149,6 +153,10 @@ pub(crate) trait Platform: 'static { None } + fn screen_capture_sources( + &self, + ) -> oneshot::Receiver>>>; + fn open_window( &self, handle: AnyWindowHandle, @@ -167,6 +175,7 @@ pub(crate) trait Platform: 'static { options: PathPromptOptions, ) -> oneshot::Receiver>>>; fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver>>; + fn can_select_mixed_files_and_dirs(&self) -> bool; fn reveal_path(&self, path: &Path); fn open_with_system(&self, path: &Path); @@ -228,6 +237,25 @@ pub trait PlatformDisplay: Send + Sync + Debug { } } +/// A source of on-screen video content that can be captured. +pub trait ScreenCaptureSource { + /// Returns the video resolution of this source. + fn resolution(&self) -> Result>; + + /// Start capture video from this source, invoking the given callback + /// with each frame. + fn stream( + &self, + frame_callback: Box, + ) -> oneshot::Receiver>>; +} + +/// A video stream captured from a screen. +pub trait ScreenCaptureStream {} + +/// A frame of video captured from a screen. +pub struct ScreenCaptureFrame(pub PlatformScreenCaptureFrame); + /// An opaque identifier for a hardware display #[derive(PartialEq, Eq, Hash, Copy, Clone)] pub struct DisplayId(pub(crate) u32); @@ -393,6 +421,9 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn get_raw_handle(&self) -> windows::HWND; // Linux specific methods + fn inner_window_bounds(&self) -> WindowBounds { + self.window_bounds() + } fn request_decorations(&self, _decorations: WindowDecorations) {} fn show_window_menu(&self, _position: Point) {} fn start_window_move(&self) {} @@ -405,7 +436,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { WindowControls::default() } fn set_client_inset(&self, _inset: Pixels) {} - fn gpu_specs(&self) -> Option; + fn gpu_specs(&self) -> Option; fn update_ime_position(&self, _bounds: Bounds); @@ -561,6 +592,42 @@ pub(crate) trait PlatformAtlas: Send + Sync { key: &AtlasKey, build: &mut dyn FnMut() -> Result, Cow<'a, [u8]>)>>, ) -> Result>; + fn remove(&self, key: &AtlasKey); +} + +struct AtlasTextureList { + textures: Vec>, + free_list: Vec, +} + +impl Default for AtlasTextureList { + fn default() -> Self { + Self { + textures: Vec::default(), + free_list: Vec::default(), + } + } +} + +impl ops::Index for AtlasTextureList { + type Output = Option; + + fn index(&self, index: usize) -> &Self::Output { + &self.textures[index] + } +} + +impl AtlasTextureList { + #[allow(unused)] + fn drain(&mut self) -> std::vec::Drain> { + self.free_list.clear(); + self.textures.drain(..) + } + + #[allow(dead_code)] + fn iter_mut(&mut self) -> impl DoubleEndedIterator { + self.textures.iter_mut().flatten() + } } #[derive(Clone, Debug, PartialEq, Eq)] @@ -643,9 +710,13 @@ impl PlatformInputHandler { } #[cfg_attr(any(target_os = "linux", target_os = "freebsd"), allow(dead_code))] - fn text_for_range(&mut self, range_utf16: Range) -> Option { + fn text_for_range( + &mut self, + range_utf16: Range, + adjusted: &mut Option>, + ) -> Option { self.cx - .update(|cx| self.handler.text_for_range(range_utf16, cx)) + .update(|cx| self.handler.text_for_range(range_utf16, adjusted, cx)) .ok() .flatten() } @@ -712,6 +783,7 @@ impl PlatformInputHandler { /// A struct representing a selection in a text buffer, in UTF16 characters. /// This is different from a range because the head may be before the tail. +#[derive(Debug)] pub struct UTF16Selection { /// The range of text in the document this selection corresponds to /// in UTF16 characters. @@ -749,6 +821,7 @@ pub trait InputHandler: 'static { fn text_for_range( &mut self, range_utf16: Range, + adjusted_range: &mut Option>, cx: &mut WindowContext, ) -> Option; diff --git a/crates/gpui/src/platform/blade.rs b/crates/gpui/src/platform/blade.rs index 736c1888d8..9d966d8a4e 100644 --- a/crates/gpui/src/platform/blade.rs +++ b/crates/gpui/src/platform/blade.rs @@ -1,5 +1,11 @@ +#[cfg(target_os = "macos")] +mod apple_compat; mod blade_atlas; +mod blade_context; mod blade_renderer; +#[cfg(target_os = "macos")] +pub(crate) use apple_compat::*; pub(crate) use blade_atlas::*; +pub(crate) use blade_context::*; pub(crate) use blade_renderer::*; diff --git a/crates/gpui/src/platform/blade/apple_compat.rs b/crates/gpui/src/platform/blade/apple_compat.rs new file mode 100644 index 0000000000..b1baab8854 --- /dev/null +++ b/crates/gpui/src/platform/blade/apple_compat.rs @@ -0,0 +1,60 @@ +use super::{BladeContext, BladeRenderer, BladeSurfaceConfig}; +use blade_graphics as gpu; +use std::{ffi::c_void, ptr::NonNull}; + +#[derive(Clone)] +pub struct Context { + inner: BladeContext, +} +impl Default for Context { + fn default() -> Self { + Self { + inner: BladeContext::new().unwrap(), + } + } +} + +pub type Renderer = BladeRenderer; + +pub unsafe fn new_renderer( + context: Context, + _native_window: *mut c_void, + native_view: *mut c_void, + bounds: crate::Size, + transparent: bool, +) -> Renderer { + use raw_window_handle as rwh; + struct RawWindow { + view: *mut c_void, + } + + impl rwh::HasWindowHandle for RawWindow { + fn window_handle(&self) -> Result { + let view = NonNull::new(self.view).unwrap(); + let handle = rwh::AppKitWindowHandle::new(view); + Ok(unsafe { rwh::WindowHandle::borrow_raw(handle.into()) }) + } + } + impl rwh::HasDisplayHandle for RawWindow { + fn display_handle(&self) -> Result { + let handle = rwh::AppKitDisplayHandle::new(); + Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) }) + } + } + + BladeRenderer::new( + &context.inner, + &RawWindow { + view: native_view as *mut _, + }, + BladeSurfaceConfig { + size: gpu::Extent { + width: bounds.width as u32, + height: bounds.height as u32, + depth: 1, + }, + transparent, + }, + ) + .unwrap() +} diff --git a/crates/gpui/src/platform/blade/blade_atlas.rs b/crates/gpui/src/platform/blade/blade_atlas.rs index e6d5dc8ee9..fb703f2a41 100644 --- a/crates/gpui/src/platform/blade/blade_atlas.rs +++ b/crates/gpui/src/platform/blade/blade_atlas.rs @@ -1,6 +1,6 @@ use crate::{ - AtlasKey, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, DevicePixels, PlatformAtlas, - Point, Size, + platform::AtlasTextureList, AtlasKey, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, + DevicePixels, PlatformAtlas, Point, Size, }; use anyhow::Result; use blade_graphics as gpu; @@ -67,7 +67,7 @@ impl BladeAtlas { pub(crate) fn clear_textures(&self, texture_kind: AtlasTextureKind) { let mut lock = self.0.lock(); let textures = &mut lock.storage[texture_kind]; - for texture in textures { + for texture in textures.iter_mut() { texture.clear(); } } @@ -130,19 +130,48 @@ impl PlatformAtlas for BladeAtlas { Ok(Some(tile)) } } + + fn remove(&self, key: &AtlasKey) { + let mut lock = self.0.lock(); + + let Some(id) = lock.tiles_by_key.remove(key).map(|tile| tile.texture_id) else { + return; + }; + + let Some(texture_slot) = lock.storage[id.kind].textures.get_mut(id.index as usize) else { + return; + }; + + if let Some(mut texture) = texture_slot.take() { + texture.decrement_ref_count(); + if texture.is_unreferenced() { + lock.storage[id.kind] + .free_list + .push(texture.id.index as usize); + texture.destroy(&lock.gpu); + } else { + *texture_slot = Some(texture); + } + } + } } impl BladeAtlasState { fn allocate(&mut self, size: Size, texture_kind: AtlasTextureKind) -> AtlasTile { - let textures = &mut self.storage[texture_kind]; - textures - .iter_mut() - .rev() - .find_map(|texture| texture.allocate(size)) - .unwrap_or_else(|| { - let texture = self.push_texture(size, texture_kind); - texture.allocate(size).unwrap() - }) + { + let textures = &mut self.storage[texture_kind]; + + if let Some(tile) = textures + .iter_mut() + .rev() + .find_map(|texture| texture.allocate(size)) + { + return tile; + } + } + + let texture = self.push_texture(size, texture_kind); + texture.allocate(size).unwrap() } fn push_texture( @@ -185,6 +214,7 @@ impl BladeAtlasState { }, array_layer_count: 1, mip_level_count: 1, + sample_count: 1, dimension: gpu::TextureDimension::D2, usage, }); @@ -198,21 +228,30 @@ impl BladeAtlasState { }, ); - let textures = &mut self.storage[kind]; + let texture_list = &mut self.storage[kind]; + let index = texture_list.free_list.pop(); + let atlas_texture = BladeAtlasTexture { id: AtlasTextureId { - index: textures.len() as u32, + index: index.unwrap_or(texture_list.textures.len()) as u32, kind, }, allocator: etagere::BucketedAtlasAllocator::new(size.into()), format, raw, raw_view, + live_atlas_keys: 0, }; self.initializations.push(atlas_texture.id); - textures.push(atlas_texture); - textures.last_mut().unwrap() + + if let Some(ix) = index { + texture_list.textures[ix] = Some(atlas_texture); + texture_list.textures.get_mut(ix).unwrap().as_mut().unwrap() + } else { + texture_list.textures.push(Some(atlas_texture)); + texture_list.textures.last_mut().unwrap().as_mut().unwrap() + } } fn upload_texture(&mut self, id: AtlasTextureId, bounds: Bounds, bytes: &[u8]) { @@ -230,7 +269,7 @@ impl BladeAtlasState { fn flush(&mut self, encoder: &mut gpu::CommandEncoder) { self.flush_initializations(encoder); - let mut transfers = encoder.transfer(); + let mut transfers = encoder.transfer("atlas"); for upload in self.uploads.drain(..) { let texture = &self.storage[upload.id]; transfers.copy_buffer_to_texture( @@ -258,13 +297,13 @@ impl BladeAtlasState { #[derive(Default)] struct BladeAtlasStorage { - monochrome_textures: Vec, - polychrome_textures: Vec, - path_textures: Vec, + monochrome_textures: AtlasTextureList, + polychrome_textures: AtlasTextureList, + path_textures: AtlasTextureList, } impl ops::Index for BladeAtlasStorage { - type Output = Vec; + type Output = AtlasTextureList; fn index(&self, kind: AtlasTextureKind) -> &Self::Output { match kind { crate::AtlasTextureKind::Monochrome => &self.monochrome_textures, @@ -292,19 +331,19 @@ impl ops::Index for BladeAtlasStorage { crate::AtlasTextureKind::Polychrome => &self.polychrome_textures, crate::AtlasTextureKind::Path => &self.path_textures, }; - &textures[id.index as usize] + textures[id.index as usize].as_ref().unwrap() } } impl BladeAtlasStorage { fn destroy(&mut self, gpu: &gpu::Context) { - for mut texture in self.monochrome_textures.drain(..) { + for mut texture in self.monochrome_textures.drain().flatten() { texture.destroy(gpu); } - for mut texture in self.polychrome_textures.drain(..) { + for mut texture in self.polychrome_textures.drain().flatten() { texture.destroy(gpu); } - for mut texture in self.path_textures.drain(..) { + for mut texture in self.path_textures.drain().flatten() { texture.destroy(gpu); } } @@ -316,6 +355,7 @@ struct BladeAtlasTexture { raw: gpu::Texture, raw_view: gpu::TextureView, format: gpu::TextureFormat, + live_atlas_keys: u32, } impl BladeAtlasTexture { @@ -334,6 +374,7 @@ impl BladeAtlasTexture { size, }, }; + self.live_atlas_keys += 1; Some(tile) } @@ -345,6 +386,14 @@ impl BladeAtlasTexture { fn bytes_per_pixel(&self) -> u8 { self.format.block_info().size } + + fn decrement_ref_count(&mut self) { + self.live_atlas_keys -= 1; + } + + fn is_unreferenced(&mut self) -> bool { + self.live_atlas_keys == 0 + } } impl From> for etagere::Size { diff --git a/crates/gpui/src/platform/blade/blade_context.rs b/crates/gpui/src/platform/blade/blade_context.rs new file mode 100644 index 0000000000..f03fff01e0 --- /dev/null +++ b/crates/gpui/src/platform/blade/blade_context.rs @@ -0,0 +1,24 @@ +use blade_graphics as gpu; +use std::sync::Arc; + +#[cfg_attr(target_os = "macos", derive(Clone))] +pub struct BladeContext { + pub(super) gpu: Arc, +} + +impl BladeContext { + pub fn new() -> anyhow::Result { + let gpu = Arc::new( + unsafe { + gpu::Context::init(gpu::ContextDesc { + presentation: true, + validation: false, + device_id: 0, //TODO: hook up to user settings + ..Default::default() + }) + } + .map_err(|e| anyhow::anyhow!("{:?}", e))?, + ); + Ok(Self { gpu }) + } +} diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index 5c37caf2cb..ee8ffdfda7 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -1,9 +1,9 @@ // Doing `if let` gives you nice scoping with passes/encoders #![allow(irrefutable_let_patterns)] -use super::{BladeAtlas, PATH_TEXTURE_FORMAT}; +use super::{BladeAtlas, BladeContext, PATH_TEXTURE_FORMAT}; use crate::{ - AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels, GPUSpecs, Hsla, + AtlasTextureKind, AtlasTile, Background, Bounds, ContentMask, DevicePixels, GpuSpecs, MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline, }; @@ -11,8 +11,6 @@ use bytemuck::{Pod, Zeroable}; use collections::HashMap; #[cfg(target_os = "macos")] use media::core_video::CVMetalTextureCache; -#[cfg(target_os = "macos")] -use std::{ffi::c_void, ptr::NonNull}; use blade_graphics as gpu; use blade_util::{BufferBelt, BufferBeltDescriptor}; @@ -20,66 +18,6 @@ use std::{mem, sync::Arc}; const MAX_FRAME_TIME_MS: u32 = 10000; -#[cfg(target_os = "macos")] -#[derive(Clone, Default)] -pub struct Context {} -#[cfg(target_os = "macos")] -pub type Renderer = BladeRenderer; - -#[cfg(target_os = "macos")] -pub unsafe fn new_renderer( - _context: self::Context, - _native_window: *mut c_void, - native_view: *mut c_void, - bounds: crate::Size, - transparent: bool, -) -> Renderer { - use raw_window_handle as rwh; - struct RawWindow { - view: *mut c_void, - } - - impl rwh::HasWindowHandle for RawWindow { - fn window_handle(&self) -> Result { - let view = NonNull::new(self.view).unwrap(); - let handle = rwh::AppKitWindowHandle::new(view); - Ok(unsafe { rwh::WindowHandle::borrow_raw(handle.into()) }) - } - } - impl rwh::HasDisplayHandle for RawWindow { - fn display_handle(&self) -> Result { - let handle = rwh::AppKitDisplayHandle::new(); - Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) }) - } - } - - let gpu = Arc::new( - gpu::Context::init_windowed( - &RawWindow { - view: native_view as *mut _, - }, - gpu::ContextDesc { - validation: cfg!(debug_assertions), - capture: false, - overlay: false, - }, - ) - .unwrap(), - ); - - BladeRenderer::new( - gpu, - BladeSurfaceConfig { - size: gpu::Extent { - width: bounds.width as u32, - height: bounds.height as u32, - depth: 1, - }, - transparent, - }, - ) -} - #[repr(C)] #[derive(Clone, Copy, Pod, Zeroable)] struct GlobalParams { @@ -174,7 +112,7 @@ struct ShaderSurfacesData { #[repr(C)] struct PathSprite { bounds: Bounds, - color: Hsla, + color: Background, tile: AtlasTile, } @@ -236,8 +174,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_quad"), + fragment: Some(shader.at("fs_quad")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), shadows: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "shadows", @@ -249,8 +188,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_shadow"), + fragment: Some(shader.at("fs_shadow")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), path_rasterization: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "path_rasterization", @@ -262,12 +202,13 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_path_rasterization"), + fragment: Some(shader.at("fs_path_rasterization")), color_targets: &[gpu::ColorTargetState { format: PATH_TEXTURE_FORMAT, blend: Some(gpu::BlendState::ADDITIVE), write_mask: gpu::ColorWrites::default(), }], + multisample_state: gpu::MultisampleState::default(), }), paths: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "paths", @@ -279,8 +220,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_path"), + fragment: Some(shader.at("fs_path")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), underlines: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "underlines", @@ -292,8 +234,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_underline"), + fragment: Some(shader.at("fs_underline")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), mono_sprites: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "mono-sprites", @@ -305,8 +248,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_mono_sprite"), + fragment: Some(shader.at("fs_mono_sprite")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), poly_sprites: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "poly-sprites", @@ -318,8 +262,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_poly_sprite"), + fragment: Some(shader.at("fs_poly_sprite")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), surfaces: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "surfaces", @@ -331,8 +276,9 @@ impl BladePipelines { ..Default::default() }, depth_stencil: None, - fragment: shader.at("fs_surface"), + fragment: Some(shader.at("fs_surface")), color_targets, + multisample_state: gpu::MultisampleState::default(), }), } } @@ -354,10 +300,14 @@ pub struct BladeSurfaceConfig { pub transparent: bool, } +//Note: we could see some of these fields moved into `BladeContext` +// so that they are shared between windows. E.g. `pipelines`. +// But that is complicated by the fact that pipelines depend on +// the format and alpha mode. pub struct BladeRenderer { gpu: Arc, + surface: gpu::Surface, surface_config: gpu::SurfaceConfig, - alpha_mode: gpu::AlphaMode, command_encoder: gpu::CommandEncoder, last_sync_point: Option, pipelines: BladePipelines, @@ -370,7 +320,11 @@ pub struct BladeRenderer { } impl BladeRenderer { - pub fn new(gpu: Arc, config: BladeSurfaceConfig) -> Self { + pub fn new( + context: &BladeContext, + window: &I, + config: BladeSurfaceConfig, + ) -> anyhow::Result { let surface_config = gpu::SurfaceConfig { size: config.size, usage: gpu::TextureUsage::TARGET, @@ -379,20 +333,23 @@ impl BladeRenderer { allow_exclusive_full_screen: false, transparent: config.transparent, }; - let surface_info = gpu.resize(surface_config); + let surface = context + .gpu + .create_surface_configured(window, surface_config) + .unwrap(); - let command_encoder = gpu.create_command_encoder(gpu::CommandEncoderDesc { + let command_encoder = context.gpu.create_command_encoder(gpu::CommandEncoderDesc { name: "main", buffer_count: 2, }); - let pipelines = BladePipelines::new(&gpu, surface_info); + let pipelines = BladePipelines::new(&context.gpu, surface.info()); let instance_belt = BufferBelt::new(BufferBeltDescriptor { memory: gpu::Memory::Shared, min_chunk_size: 0x1000, alignment: 0x40, // Vulkan `minStorageBufferOffsetAlignment` on Intel Xe }); - let atlas = Arc::new(BladeAtlas::new(&gpu)); - let atlas_sampler = gpu.create_sampler(gpu::SamplerDesc { + let atlas = Arc::new(BladeAtlas::new(&context.gpu)); + let atlas_sampler = context.gpu.create_sampler(gpu::SamplerDesc { name: "atlas", mag_filter: gpu::FilterMode::Linear, min_filter: gpu::FilterMode::Linear, @@ -401,14 +358,16 @@ impl BladeRenderer { #[cfg(target_os = "macos")] let core_video_texture_cache = unsafe { - use foreign_types::ForeignType as _; - CVMetalTextureCache::new(gpu.metal_device().as_ptr()).unwrap() + CVMetalTextureCache::new( + objc2::rc::Retained::as_ptr(&context.gpu.metal_device()) as *mut _ + ) + .unwrap() }; - Self { - gpu, + Ok(Self { + gpu: Arc::clone(&context.gpu), + surface, surface_config, - alpha_mode: surface_info.alpha, command_encoder, last_sync_point: None, pipelines, @@ -418,7 +377,7 @@ impl BladeRenderer { atlas_sampler, #[cfg(target_os = "macos")] core_video_texture_cache, - } + }) } fn wait_for_gpu(&mut self) { @@ -431,16 +390,29 @@ impl BladeRenderer { } pub fn update_drawable_size(&mut self, size: Size) { + self.update_drawable_size_impl(size, false); + } + + /// Like `update_drawable_size` but skips the check that the size has changed. This is useful in + /// cases like restoring a window from minimization where the size is the same but the + /// renderer's swap chain needs to be recreated. + #[cfg_attr(any(target_os = "macos", target_os = "linux"), allow(dead_code))] + pub fn update_drawable_size_even_if_unchanged(&mut self, size: Size) { + self.update_drawable_size_impl(size, true); + } + + fn update_drawable_size_impl(&mut self, size: Size, always_resize: bool) { let gpu_size = gpu::Extent { width: size.width.0 as u32, height: size.height.0 as u32, depth: 1, }; - if gpu_size != self.surface_config.size { + if always_resize || gpu_size != self.surface_config.size { self.wait_for_gpu(); self.surface_config.size = gpu_size; - self.gpu.resize(self.surface_config); + self.gpu + .reconfigure_surface(&mut self.surface, self.surface_config); } } @@ -448,10 +420,10 @@ impl BladeRenderer { if transparent != self.surface_config.transparent { self.wait_for_gpu(); self.surface_config.transparent = transparent; - let surface_info = self.gpu.resize(self.surface_config); + self.gpu + .reconfigure_surface(&mut self.surface, self.surface_config); self.pipelines.destroy(&self.gpu); - self.pipelines = BladePipelines::new(&self.gpu, surface_info); - self.alpha_mode = surface_info.alpha; + self.pipelines = BladePipelines::new(&self.gpu, self.surface.info()); } } @@ -465,10 +437,10 @@ impl BladeRenderer { } #[cfg_attr(target_os = "macos", allow(dead_code))] - pub fn gpu_specs(&self) -> GPUSpecs { + pub fn gpu_specs(&self) -> GpuSpecs { let info = self.gpu.device_information(); - GPUSpecs { + GpuSpecs { is_software_emulated: info.is_software_emulated, device_name: info.device_name.clone(), driver_name: info.driver_name.clone(), @@ -478,13 +450,12 @@ impl BladeRenderer { #[cfg(target_os = "macos")] pub fn layer(&self) -> metal::MetalLayer { - self.gpu.metal_layer().unwrap() + unsafe { foreign_types::ForeignType::from_ptr(self.layer_ptr()) } } #[cfg(target_os = "macos")] pub fn layer_ptr(&self) -> *mut metal::CAMetalLayer { - use metal::foreign_types::ForeignType as _; - self.gpu.metal_layer().unwrap().as_ptr() + objc2::rc::Retained::as_ptr(&self.surface.metal_layer()) as *mut _ } #[profiling::function] @@ -526,14 +497,17 @@ impl BladeRenderer { }; let vertex_buf = unsafe { self.instance_belt.alloc_typed(&vertices, &self.gpu) }; - let mut pass = self.command_encoder.render(gpu::RenderTargetSet { - colors: &[gpu::RenderTarget { - view: tex_info.raw_view, - init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), - finish_op: gpu::FinishOp::Store, - }], - depth_stencil: None, - }); + let mut pass = self.command_encoder.render( + "paths", + gpu::RenderTargetSet { + colors: &[gpu::RenderTarget { + view: tex_info.raw_view, + init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), + finish_op: gpu::FinishOp::Store, + }], + depth_stencil: None, + }, + ); let mut encoder = pass.with(&self.pipelines.path_rasterization); encoder.bind( @@ -554,6 +528,7 @@ impl BladeRenderer { self.instance_belt.destroy(&self.gpu); self.gpu.destroy_command_encoder(&mut self.command_encoder); self.pipelines.destroy(&self.gpu); + self.gpu.destroy_surface(&mut self.surface); } pub fn draw(&mut self, scene: &Scene) { @@ -563,7 +538,7 @@ impl BladeRenderer { let frame = { profiling::scope!("acquire frame"); - self.gpu.acquire_frame() + self.surface.acquire_frame() }; self.command_encoder.init_texture(frame.texture()); @@ -572,21 +547,24 @@ impl BladeRenderer { self.surface_config.size.width as f32, self.surface_config.size.height as f32, ], - premultiplied_alpha: match self.alpha_mode { + premultiplied_alpha: match self.surface.info().alpha { gpu::AlphaMode::Ignored | gpu::AlphaMode::PostMultiplied => 0, gpu::AlphaMode::PreMultiplied => 1, }, pad: 0, }; - if let mut pass = self.command_encoder.render(gpu::RenderTargetSet { - colors: &[gpu::RenderTarget { - view: frame.texture_view(), - init_op: gpu::InitOp::Clear(gpu::TextureColor::TransparentBlack), - finish_op: gpu::FinishOp::Store, - }], - depth_stencil: None, - }) { + if let mut pass = self.command_encoder.render( + "main", + gpu::RenderTargetSet { + colors: &[gpu::RenderTarget { + view: frame.texture_view(), + init_op: gpu::InitOp::Clear(gpu::TextureColor::TransparentBlack), + finish_op: gpu::FinishOp::Store, + }], + depth_stencil: None, + }, + ) { profiling::scope!("render pass"); for batch in scene.batches() { match batch { @@ -709,45 +687,59 @@ impl BladeRenderer { #[cfg(target_os = "macos")] { - let (t_y, t_cb_cr) = { + let (t_y, t_cb_cr) = unsafe { use core_foundation::base::TCFType as _; use std::ptr; assert_eq!( - surface.image_buffer.pixel_format_type(), - media::core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange - ); + surface.image_buffer.pixel_format_type(), + media::core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + ); - let y_texture = unsafe { - self.core_video_texture_cache - .create_texture_from_image( - surface.image_buffer.as_concrete_TypeRef(), - ptr::null(), - metal::MTLPixelFormat::R8Unorm, - surface.image_buffer.plane_width(0), - surface.image_buffer.plane_height(0), - 0, - ) - .unwrap() - }; - let cb_cr_texture = unsafe { - self.core_video_texture_cache - .create_texture_from_image( - surface.image_buffer.as_concrete_TypeRef(), - ptr::null(), - metal::MTLPixelFormat::RG8Unorm, - surface.image_buffer.plane_width(1), - surface.image_buffer.plane_height(1), - 1, - ) - .unwrap() - }; + let y_texture = self + .core_video_texture_cache + .create_texture_from_image( + surface.image_buffer.as_concrete_TypeRef(), + ptr::null(), + metal::MTLPixelFormat::R8Unorm, + surface.image_buffer.plane_width(0), + surface.image_buffer.plane_height(0), + 0, + ) + .unwrap(); + let cb_cr_texture = self + .core_video_texture_cache + .create_texture_from_image( + surface.image_buffer.as_concrete_TypeRef(), + ptr::null(), + metal::MTLPixelFormat::RG8Unorm, + surface.image_buffer.plane_width(1), + surface.image_buffer.plane_height(1), + 1, + ) + .unwrap(); ( gpu::TextureView::from_metal_texture( - y_texture.as_texture_ref(), + &objc2::rc::Retained::retain( + foreign_types::ForeignTypeRef::as_ptr( + y_texture.as_texture_ref(), + ) + as *mut objc2::runtime::ProtocolObject< + dyn objc2_metal::MTLTexture, + >, + ) + .unwrap(), ), gpu::TextureView::from_metal_texture( - cb_cr_texture.as_texture_ref(), + &objc2::rc::Retained::retain( + foreign_types::ForeignTypeRef::as_ptr( + cb_cr_texture.as_texture_ref(), + ) + as *mut objc2::runtime::ProtocolObject< + dyn objc2_metal::MTLTexture, + >, + ) + .unwrap(), ), ) }; diff --git a/crates/gpui/src/platform/blade/shaders.wgsl b/crates/gpui/src/platform/blade/shaders.wgsl index 6099cbd93a..d497c40d7a 100644 --- a/crates/gpui/src/platform/blade/shaders.wgsl +++ b/crates/gpui/src/platform/blade/shaders.wgsl @@ -15,18 +15,21 @@ struct Bounds { origin: vec2, size: vec2, } + struct Corners { top_left: f32, top_right: f32, bottom_right: f32, bottom_left: f32, } + struct Edges { top: f32, right: f32, bottom: f32, left: f32, } + struct Hsla { h: f32, s: f32, @@ -34,6 +37,24 @@ struct Hsla { a: f32, } +struct LinearColorStop { + color: Hsla, + percentage: f32, +} + +struct Background { + // 0u is Solid + // 1u is LinearGradient + tag: u32, + // 0u is sRGB linear color + // 1u is Oklab color + color_space: u32, + solid: Hsla, + angle: f32, + colors: array, + pad: u32, +} + struct AtlasTextureId { index: u32, kind: u32, @@ -43,6 +64,7 @@ struct AtlasBounds { origin: vec2, size: vec2, } + struct AtlasTile { texture_id: AtlasTextureId, tile_id: u32, @@ -96,6 +118,24 @@ fn srgb_to_linear(srgb: vec3) -> vec3 { return select(higher, lower, cutoff); } +fn linear_to_srgb(linear: vec3) -> vec3 { + let cutoff = linear < vec3(0.0031308); + let higher = vec3(1.055) * pow(linear, vec3(1.0 / 2.4)) - vec3(0.055); + let lower = linear * vec3(12.92); + return select(higher, lower, cutoff); +} + +/// Convert a linear color to sRGBA space. +fn linear_to_srgba(color: vec4) -> vec4 { + return vec4(linear_to_srgb(color.rgb), color.a); +} + +/// Convert a sRGBA color to linear space. +fn srgba_to_linear(color: vec4) -> vec4 { + return vec4(srgb_to_linear(color.rgb), color.a); +} + +/// Hsla to linear RGBA conversion. fn hsla_to_rgba(hsla: Hsla) -> vec4 { let h = hsla.h * 6.0; // Now, it's an angle but scaled in [0, 6) range let s = hsla.s; @@ -135,6 +175,43 @@ fn hsla_to_rgba(hsla: Hsla) -> vec4 { return vec4(linear, a); } +/// Convert a linear sRGB to Oklab space. +/// Reference: https://bottosson.github.io/posts/oklab/#converting-from-linear-srgb-to-oklab +fn linear_srgb_to_oklab(color: vec4) -> vec4 { + let l = 0.4122214708 * color.r + 0.5363325363 * color.g + 0.0514459929 * color.b; + let m = 0.2119034982 * color.r + 0.6806995451 * color.g + 0.1073969566 * color.b; + let s = 0.0883024619 * color.r + 0.2817188376 * color.g + 0.6299787005 * color.b; + + let l_ = pow(l, 1.0 / 3.0); + let m_ = pow(m, 1.0 / 3.0); + let s_ = pow(s, 1.0 / 3.0); + + return vec4( + 0.2104542553 * l_ + 0.7936177850 * m_ - 0.0040720468 * s_, + 1.9779984951 * l_ - 2.4285922050 * m_ + 0.4505937099 * s_, + 0.0259040371 * l_ + 0.7827717662 * m_ - 0.8086757660 * s_, + color.a + ); +} + +/// Convert an Oklab color to linear sRGB space. +fn oklab_to_linear_srgb(color: vec4) -> vec4 { + let l_ = color.r + 0.3963377774 * color.g + 0.2158037573 * color.b; + let m_ = color.r - 0.1055613458 * color.g - 0.0638541728 * color.b; + let s_ = color.r - 0.0894841775 * color.g - 1.2914855480 * color.b; + + let l = l_ * l_ * l_; + let m = m_ * m_ * m_; + let s = s_ * s_ * s_; + + return vec4( + 4.0767416621 * l - 3.3077115913 * m + 0.2309699292 * s, + -1.2684380046 * l + 2.6097574011 * m - 0.3413193965 * s, + -0.0041960863 * l - 0.7034186147 * m + 1.7076147010 * s, + color.a + ); +} + fn over(below: vec4, above: vec4) -> vec4 { let alpha = above.a + below.a * (1.0 - above.a); let color = (above.rgb * above.a + below.rgb * below.a * (1.0 - above.a)) / alpha; @@ -197,6 +274,94 @@ fn blend_color(color: vec4, alpha_factor: f32) -> vec4 { return vec4(color.rgb * multiplier, alpha); } + +struct GradientColor { + solid: vec4, + color0: vec4, + color1: vec4, +} + +fn prepare_gradient_color(tag: u32, color_space: u32, + solid: Hsla, colors: array) -> GradientColor { + var result = GradientColor(); + + if (tag == 0u) { + result.solid = hsla_to_rgba(solid); + } else if (tag == 1u) { + // The hsla_to_rgba is returns a linear sRGB color + result.color0 = hsla_to_rgba(colors[0].color); + result.color1 = hsla_to_rgba(colors[1].color); + + // Prepare color space in vertex for avoid conversion + // in fragment shader for performance reasons + if (color_space == 0u) { + // sRGB + result.color0 = linear_to_srgba(result.color0); + result.color1 = linear_to_srgba(result.color1); + } else if (color_space == 1u) { + // Oklab + result.color0 = linear_srgb_to_oklab(result.color0); + result.color1 = linear_srgb_to_oklab(result.color1); + } + } + + return result; +} + +fn gradient_color(background: Background, position: vec2, bounds: Bounds, + sold_color: vec4, color0: vec4, color1: vec4) -> vec4 { + var background_color = vec4(0.0); + + switch (background.tag) { + default: { + return sold_color; + } + case 1u: { + // Linear gradient background. + // -90 degrees to match the CSS gradient angle. + let radians = (background.angle % 360.0 - 90.0) * M_PI_F / 180.0; + var direction = vec2(cos(radians), sin(radians)); + let stop0_percentage = background.colors[0].percentage; + let stop1_percentage = background.colors[1].percentage; + + // Expand the short side to be the same as the long side + if (bounds.size.x > bounds.size.y) { + direction.y *= bounds.size.y / bounds.size.x; + } else { + direction.x *= bounds.size.x / bounds.size.y; + } + + // Get the t value for the linear gradient with the color stop percentages. + let half_size = bounds.size / 2.0; + let center = bounds.origin + half_size; + let center_to_point = position - center; + var t = dot(center_to_point, direction) / length(direction); + // Check the direct to determine the use x or y + if (abs(direction.x) > abs(direction.y)) { + t = (t + half_size.x) / bounds.size.x; + } else { + t = (t + half_size.y) / bounds.size.y; + } + + // Adjust t based on the stop percentages + t = (t - stop0_percentage) / (stop1_percentage - stop0_percentage); + t = clamp(t, 0.0, 1.0); + + switch (background.color_space) { + default: { + background_color = srgba_to_linear(mix(color0, color1, t)); + } + case 1u: { + let oklab_color = mix(color0, color1, t); + background_color = oklab_to_linear_srgb(oklab_color); + } + } + } + } + + return background_color; +} + // --- quads --- // struct Quad { @@ -204,7 +369,7 @@ struct Quad { pad: u32, bounds: Bounds, content_mask: Bounds, - background: Hsla, + background: Background, border_color: Hsla, corner_radii: Corners, border_widths: Edges, @@ -213,11 +378,13 @@ var b_quads: array; struct QuadVarying { @builtin(position) position: vec4, - @location(0) @interpolate(flat) background_color: vec4, - @location(1) @interpolate(flat) border_color: vec4, - @location(2) @interpolate(flat) quad_id: u32, - //TODO: use `clip_distance` once Naga supports it - @location(3) clip_distances: vec4, + @location(0) @interpolate(flat) border_color: vec4, + @location(1) @interpolate(flat) quad_id: u32, + // TODO: use `clip_distance` once Naga supports it + @location(2) clip_distances: vec4, + @location(3) @interpolate(flat) background_solid: vec4, + @location(4) @interpolate(flat) background_color0: vec4, + @location(5) @interpolate(flat) background_color1: vec4, } @vertex @@ -227,7 +394,16 @@ fn vs_quad(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) insta var out = QuadVarying(); out.position = to_device_position(unit_vertex, quad.bounds); - out.background_color = hsla_to_rgba(quad.background); + + let gradient = prepare_gradient_color( + quad.background.tag, + quad.background.color_space, + quad.background.solid, + quad.background.colors + ); + out.background_solid = gradient.solid; + out.background_color0 = gradient.color0; + out.background_color1 = gradient.color1; out.border_color = hsla_to_rgba(quad.border_color); out.quad_id = instance_id; out.clip_distances = distance_from_clip_rect(unit_vertex, quad.bounds, quad.content_mask); @@ -242,21 +418,23 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4 { } let quad = b_quads[input.quad_id]; + let half_size = quad.bounds.size / 2.0; + let center = quad.bounds.origin + half_size; + let center_to_point = input.position.xy - center; + + let background_color = gradient_color(quad.background, input.position.xy, quad.bounds, + input.background_solid, input.background_color0, input.background_color1); + // Fast path when the quad is not rounded and doesn't have any border. if (quad.corner_radii.top_left == 0.0 && quad.corner_radii.bottom_left == 0.0 && quad.corner_radii.top_right == 0.0 && quad.corner_radii.bottom_right == 0.0 && quad.border_widths.top == 0.0 && quad.border_widths.left == 0.0 && quad.border_widths.right == 0.0 && quad.border_widths.bottom == 0.0) { - return blend_color(input.background_color, 1.0); + return blend_color(background_color, 1.0); } - let half_size = quad.bounds.size / 2.0; - let center = quad.bounds.origin + half_size; - let center_to_point = input.position.xy - center; - let corner_radius = pick_corner_radius(center_to_point, quad.corner_radii); - let rounded_edge_to_point = abs(center_to_point) - half_size + corner_radius; let distance = length(max(vec2(0.0), rounded_edge_to_point)) + @@ -277,13 +455,13 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4 { border_width = vertical_border; } - var color = input.background_color; + var color = background_color; if (border_width > 0.0) { let inset_distance = distance + border_width; // Blend the border on top of the background and then linearly interpolate // between the two as we slide inside the background. - let blended_border = over(input.background_color, input.border_color); - color = mix(blended_border, input.background_color, + let blended_border = over(background_color, input.border_color); + color = mix(blended_border, background_color, saturate(0.5 - inset_distance)); } @@ -408,7 +586,7 @@ fn fs_path_rasterization(input: PathRasterizationVarying) -> @location(0) f32 { struct PathSprite { bounds: Bounds, - color: Hsla, + color: Background, tile: AtlasTile, } var b_path_sprites: array; @@ -416,7 +594,10 @@ var b_path_sprites: array; struct PathVarying { @builtin(position) position: vec4, @location(0) tile_position: vec2, - @location(1) color: vec4, + @location(1) @interpolate(flat) instance_id: u32, + @location(2) @interpolate(flat) color_solid: vec4, + @location(3) @interpolate(flat) color0: vec4, + @location(4) @interpolate(flat) color1: vec4, } @vertex @@ -428,7 +609,17 @@ fn vs_path(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) insta var out = PathVarying(); out.position = to_device_position(unit_vertex, sprite.bounds); out.tile_position = to_tile_position(unit_vertex, sprite.tile); - out.color = hsla_to_rgba(sprite.color); + out.instance_id = instance_id; + + let gradient = prepare_gradient_color( + sprite.color.tag, + sprite.color.color_space, + sprite.color.solid, + sprite.color.colors + ); + out.color_solid = gradient.solid; + out.color0 = gradient.color0; + out.color1 = gradient.color1; return out; } @@ -436,7 +627,11 @@ fn vs_path(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) insta fn fs_path(input: PathVarying) -> @location(0) vec4 { let sample = textureSample(t_sprite, s_sprite, input.tile_position).r; let mask = 1.0 - abs(1.0 - sample % 2.0); - return blend_color(input.color, mask); + let sprite = b_path_sprites[input.instance_id]; + let background = sprite.color; + let color = gradient_color(background, input.position.xy, sprite.bounds, + input.color_solid, input.color0, input.color1); + return blend_color(color, mask); } // --- underlines --- // diff --git a/crates/gpui/src/platform/keystroke.rs b/crates/gpui/src/platform/keystroke.rs index f61beab9e9..af1e5179db 100644 --- a/crates/gpui/src/platform/keystroke.rs +++ b/crates/gpui/src/platform/keystroke.rs @@ -12,14 +12,15 @@ pub struct Keystroke { /// e.g. for option-s, key is "s" pub key: String, - /// ime_key is the character inserted by the IME engine when that key was pressed. - /// e.g. for option-s, ime_key is "ß" - pub ime_key: Option, + /// key_char is the character that could have been typed when + /// this binding was pressed. + /// e.g. for s this is "s", for option-s "ß", and cmd-s None + pub key_char: Option, } impl Keystroke { /// When matching a key we cannot know whether the user intended to type - /// the ime_key or the key itself. On some non-US keyboards keys we use in our + /// the key_char or the key itself. On some non-US keyboards keys we use in our /// bindings are behind option (for example `$` is typed `alt-ç` on a Czech keyboard), /// and on some keyboards the IME handler converts a sequence of keys into a /// specific character (for example `"` is typed as `" space` on a brazilian keyboard). @@ -27,10 +28,10 @@ impl Keystroke { /// This method assumes that `self` was typed and `target' is in the keymap, and checks /// both possibilities for self against the target. pub(crate) fn should_match(&self, target: &Keystroke) -> bool { - if let Some(ime_key) = self - .ime_key + if let Some(key_char) = self + .key_char .as_ref() - .filter(|ime_key| ime_key != &&self.key) + .filter(|key_char| key_char != &&self.key) { let ime_modifiers = Modifiers { control: self.modifiers.control, @@ -38,7 +39,7 @@ impl Keystroke { ..Default::default() }; - if &target.key == ime_key && target.modifiers == ime_modifiers { + if &target.key == key_char && target.modifiers == ime_modifiers { return true; } } @@ -47,9 +48,9 @@ impl Keystroke { } /// key syntax is: - /// [ctrl-][alt-][shift-][cmd-][fn-]key[->ime_key] - /// ime_key syntax is only used for generating test events, - /// when matching a key with an ime_key set will be matched without it. + /// [ctrl-][alt-][shift-][cmd-][fn-]key[->key_char] + /// key_char syntax is only used for generating test events, + /// when matching a key with an key_char set will be matched without it. pub fn parse(source: &str) -> anyhow::Result { let mut control = false; let mut alt = false; @@ -57,7 +58,7 @@ impl Keystroke { let mut platform = false; let mut function = false; let mut key = None; - let mut ime_key = None; + let mut key_char = None; let mut components = source.split('-').peekable(); while let Some(component) = components.next() { @@ -74,7 +75,7 @@ impl Keystroke { break; } else if next.len() > 1 && next.starts_with('>') { key = Some(String::from(component)); - ime_key = Some(String::from(&next[1..])); + key_char = Some(String::from(&next[1..])); components.next(); } else { return Err(anyhow!("Invalid keystroke `{}`", source)); @@ -118,7 +119,7 @@ impl Keystroke { function, }, key, - ime_key, + key_char: key_char, }) } @@ -154,7 +155,7 @@ impl Keystroke { /// Returns true if this keystroke left /// the ime system in an incomplete state. pub fn is_ime_in_progress(&self) -> bool { - self.ime_key.is_none() + self.key_char.is_none() && (is_printable_key(&self.key) || self.key.is_empty()) && !(self.modifiers.platform || self.modifiers.control @@ -162,17 +163,17 @@ impl Keystroke { || self.modifiers.alt) } - /// Returns a new keystroke with the ime_key filled. + /// Returns a new keystroke with the key_char filled. /// This is used for dispatch_keystroke where we want users to /// be able to simulate typing "space", etc. pub fn with_simulated_ime(mut self) -> Self { - if self.ime_key.is_none() + if self.key_char.is_none() && !self.modifiers.platform && !self.modifiers.control && !self.modifiers.function && !self.modifiers.alt { - self.ime_key = match self.key.as_str() { + self.key_char = match self.key.as_str() { "space" => Some(" ".into()), "tab" => Some("\t".into()), "enter" => Some("\n".into()), @@ -222,6 +223,8 @@ fn is_printable_key(key: &str) -> bool { | "insert" | "home" | "end" + | "back" + | "forward" | "escape" ) } diff --git a/crates/gpui/src/platform/linux.rs b/crates/gpui/src/platform/linux.rs index 0499869361..089b52cf1e 100644 --- a/crates/gpui/src/platform/linux.rs +++ b/crates/gpui/src/platform/linux.rs @@ -20,3 +20,5 @@ pub(crate) use text_system::*; pub(crate) use wayland::*; #[cfg(feature = "x11")] pub(crate) use x11::*; + +pub(crate) type PlatformScreenCaptureFrame = (); diff --git a/crates/gpui/src/platform/linux/headless/client.rs b/crates/gpui/src/platform/linux/headless/client.rs index d0cfaa9fbb..71fdc26d9e 100644 --- a/crates/gpui/src/platform/linux/headless/client.rs +++ b/crates/gpui/src/platform/linux/headless/client.rs @@ -47,6 +47,10 @@ impl LinuxClient for HeadlessClient { f(&mut self.0.borrow_mut().common) } + fn keyboard_layout(&self) -> String { + "unknown".to_string() + } + fn displays(&self) -> Vec> { vec![] } diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index a2e9af691b..d5823e091a 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -1,61 +1,55 @@ -#![allow(unused)] - -use std::any::{type_name, Any}; -use std::cell::{self, RefCell}; -use std::env; -use std::ffi::OsString; -use std::fs::File; -use std::io::Read; -use std::ops::{Deref, DerefMut}; -use std::os::fd::{AsFd, AsRawFd, FromRawFd}; -use std::panic::Location; -use std::rc::Weak; use std::{ + env, path::{Path, PathBuf}, process::Command, rc::Rc, sync::Arc, +}; +#[cfg(any(feature = "wayland", feature = "x11"))] +use std::{ + ffi::OsString, + fs::File, + io::Read as _, + os::fd::{AsFd, AsRawFd, FromRawFd}, time::Duration, }; -use anyhow::anyhow; +use anyhow::{anyhow, Context as _}; use async_task::Runnable; -use calloop::channel::Channel; -use calloop::{EventLoop, LoopHandle, LoopSignal}; -use flume::{Receiver, Sender}; +use calloop::{channel::Channel, LoopSignal}; use futures::channel::oneshot; -use parking_lot::Mutex; -use util::ResultExt; - +use util::ResultExt as _; #[cfg(any(feature = "wayland", feature = "x11"))] use xkbcommon::xkb::{self, Keycode, Keysym, State}; -use crate::platform::NoopTextSystem; use crate::{ px, Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId, - ForegroundExecutor, Keymap, Keystroke, LinuxDispatcher, Menu, MenuItem, Modifiers, OwnedMenu, - PathPromptOptions, Pixels, Platform, PlatformDisplay, PlatformInputHandler, PlatformTextSystem, - PlatformWindow, Point, PromptLevel, Result, SemanticVersion, SharedString, Size, Task, - WindowAppearance, WindowOptions, WindowParams, + ForegroundExecutor, Keymap, LinuxDispatcher, Menu, MenuItem, OwnedMenu, PathPromptOptions, + Pixels, Platform, PlatformDisplay, PlatformTextSystem, PlatformWindow, Point, Result, + ScreenCaptureSource, Task, WindowAppearance, WindowParams, }; - +#[cfg(any(feature = "wayland", feature = "x11"))] pub(crate) const SCROLL_LINES: f32 = 3.0; // Values match the defaults on GTK. // Taken from https://github.com/GNOME/gtk/blob/main/gtk/gtksettings.c#L320 +#[cfg(any(feature = "wayland", feature = "x11"))] pub(crate) const DOUBLE_CLICK_INTERVAL: Duration = Duration::from_millis(400); pub(crate) const DOUBLE_CLICK_DISTANCE: Pixels = px(5.0); pub(crate) const KEYRING_LABEL: &str = "zed-github-account"; +#[cfg(any(feature = "wayland", feature = "x11"))] const FILE_PICKER_PORTAL_MISSING: &str = "Couldn't open file picker due to missing xdg-desktop-portal implementation."; pub trait LinuxClient { fn compositor_name(&self) -> &'static str; fn with_common(&self, f: impl FnOnce(&mut LinuxCommon) -> R) -> R; + fn keyboard_layout(&self) -> String; fn displays(&self) -> Vec>; - fn primary_display(&self) -> Option>; + #[allow(unused)] fn display(&self, id: DisplayId) -> Option>; + fn primary_display(&self) -> Option>; fn open_window( &self, @@ -82,6 +76,7 @@ pub(crate) struct PlatformHandlers { pub(crate) app_menu_action: Option>, pub(crate) will_open_app_menu: Option>, pub(crate) validate_app_menu_command: Option bool>>, + pub(crate) keyboard_layout_change: Option>, } pub(crate) struct LinuxCommon { @@ -98,9 +93,9 @@ pub(crate) struct LinuxCommon { impl LinuxCommon { pub fn new(signal: LoopSignal) -> (Self, Channel) { let (main_sender, main_receiver) = calloop::channel::channel::(); + #[cfg(any(feature = "wayland", feature = "x11"))] let text_system = Arc::new(crate::CosmicTextSystem::new()); - #[cfg(not(any(feature = "wayland", feature = "x11")))] let text_system = Arc::new(crate::NoopTextSystem::new()); @@ -139,11 +134,11 @@ impl Platform for P { } fn keyboard_layout(&self) -> String { - "unknown".into() + self.keyboard_layout() } - fn on_keyboard_layout_change(&self, _callback: Box) { - // todo(linux) + fn on_keyboard_layout_change(&self, callback: Box) { + self.with_common(|common| common.callbacks.keyboard_layout_change = Some(callback)); } fn run(&self, on_finish_launching: Box) { @@ -218,7 +213,7 @@ impl Platform for P { } } - fn activate(&self, ignoring_other_apps: bool) { + fn activate(&self, _ignoring_other_apps: bool) { log::info!("activate is not implemented on Linux, ignoring the call") } @@ -242,6 +237,14 @@ impl Platform for P { self.displays() } + fn screen_capture_sources( + &self, + ) -> oneshot::Receiver>>> { + let (mut tx, rx) = oneshot::channel(); + tx.send(Err(anyhow!("screen capture not implemented"))).ok(); + rx + } + fn active_window(&self) -> Option { self.active_window() } @@ -273,7 +276,7 @@ impl Platform for P { let (done_tx, done_rx) = oneshot::channel(); #[cfg(not(any(feature = "wayland", feature = "x11")))] - done_tx.send(Ok(None)); + let _ = (done_tx.send(Ok(None)), options); #[cfg(any(feature = "wayland", feature = "x11"))] self.foreground_executor() @@ -298,7 +301,7 @@ impl Platform for P { ashpd::Error::PortalNotFound(_) => anyhow!(FILE_PICKER_PORTAL_MISSING), err => err.into(), }; - done_tx.send(Err(result)); + let _ = done_tx.send(Err(result)); return; } }; @@ -314,7 +317,7 @@ impl Platform for P { Err(ashpd::Error::Response(_)) => Ok(None), Err(e) => Err(e.into()), }; - done_tx.send(result); + let _ = done_tx.send(result); }) .detach(); done_rx @@ -324,7 +327,7 @@ impl Platform for P { let (done_tx, done_rx) = oneshot::channel(); #[cfg(not(any(feature = "wayland", feature = "x11")))] - done_tx.send(Ok(None)); + let _ = (done_tx.send(Ok(None)), directory); #[cfg(any(feature = "wayland", feature = "x11"))] self.foreground_executor() @@ -348,7 +351,7 @@ impl Platform for P { } err => err.into(), }; - done_tx.send(Err(result)); + let _ = done_tx.send(Err(result)); return; } }; @@ -361,7 +364,7 @@ impl Platform for P { Err(ashpd::Error::Response(_)) => Ok(None), Err(e) => Err(e.into()), }; - done_tx.send(result); + let _ = done_tx.send(result); } }) .detach(); @@ -369,19 +372,24 @@ impl Platform for P { done_rx } + fn can_select_mixed_files_and_dirs(&self) -> bool { + // org.freedesktop.portal.FileChooser only supports "pick files" and "pick directories". + false + } + fn reveal_path(&self, path: &Path) { self.reveal_path(path.to_owned()); } fn open_with_system(&self, path: &Path) { - let executor = self.background_executor().clone(); let path = path.to_owned(); - executor + self.background_executor() .spawn(async move { let _ = std::process::Command::new("xdg-open") .arg(path) .spawn() - .expect("Failed to open file with xdg-open"); + .context("invoking xdg-open") + .log_err(); }) .detach(); } @@ -418,7 +426,7 @@ impl Platform for P { fn app_path(&self) -> Result { // get the path of the executable of the current process - let exe_path = std::env::current_exe()?; + let exe_path = env::current_exe()?; Ok(exe_path) } @@ -432,9 +440,9 @@ impl Platform for P { self.with_common(|common| Some(common.menus.clone())) } - fn set_dock_menu(&self, menu: Vec, keymap: &Keymap) {} + fn set_dock_menu(&self, _menu: Vec, _keymap: &Keymap) {} - fn path_for_auxiliary_executable(&self, name: &str) -> Result { + fn path_for_auxiliary_executable(&self, _name: &str) -> Result { Err(anyhow::Error::msg( "Platform::path_for_auxiliary_executable is not implemented yet", )) @@ -601,6 +609,7 @@ pub(super) fn reveal_path_internal( .detach(); } +#[allow(unused)] pub(super) fn is_within_click_distance(a: Point, b: Point) -> bool { let diff = a - b; diff.x.abs() <= DOUBLE_CLICK_DISTANCE && diff.y.abs() <= DOUBLE_CLICK_DISTANCE @@ -609,7 +618,7 @@ pub(super) fn is_within_click_distance(a: Point, b: Point) -> bo #[cfg(any(feature = "wayland", feature = "x11"))] pub(super) fn get_xkb_compose_state(cx: &xkb::Context) -> Option { let mut locales = Vec::default(); - if let Some(locale) = std::env::var_os("LC_CTYPE") { + if let Some(locale) = env::var_os("LC_CTYPE") { locales.push(locale); } locales.push(OsString::from("C")); @@ -637,12 +646,13 @@ pub(super) unsafe fn read_fd(mut fd: filedescriptor::FileDescriptor) -> Result String { // Based on cursor names from https://gitlab.gnome.org/GNOME/adwaita-icon-theme (GNOME) // and https://github.com/KDE/breeze (KDE). Both of them seem to be also derived from // Web CSS cursor names: https://developer.mozilla.org/en-US/docs/Web/CSS/cursor#values match self { - CursorStyle::Arrow => "arrow", + CursorStyle::Arrow => "left_ptr", CursorStyle::IBeam => "text", CursorStyle::Crosshair => "crosshair", CursorStyle::ClosedHand => "grabbing", @@ -669,10 +679,12 @@ impl CursorStyle { } #[cfg(any(feature = "wayland", feature = "x11"))] -impl Keystroke { - pub(super) fn from_xkb(state: &State, modifiers: Modifiers, keycode: Keycode) -> Self { - let mut modifiers = modifiers; - +impl crate::Keystroke { + pub(super) fn from_xkb( + state: &State, + mut modifiers: crate::Modifiers, + keycode: Keycode, + ) -> Self { let key_utf32 = state.key_get_utf32(keycode); let key_utf8 = state.key_get_utf8(keycode); let key_sym = state.key_get_one_sym(keycode); @@ -684,6 +696,8 @@ impl Keystroke { Keysym::ISO_Left_Tab => "tab".to_owned(), Keysym::KP_Prior => "pageup".to_owned(), Keysym::KP_Next => "pagedown".to_owned(), + Keysym::XF86_Back => "back".to_owned(), + Keysym::XF86_Forward => "forward".to_owned(), Keysym::comma => ",".to_owned(), Keysym::period => ".".to_owned(), @@ -740,14 +754,14 @@ impl Keystroke { } } - // Ignore control characters (and DEL) for the purposes of ime_key - let ime_key = + // Ignore control characters (and DEL) for the purposes of key_char + let key_char = (key_utf32 >= 32 && key_utf32 != 127 && !key_utf8.is_empty()).then_some(key_utf8); - Keystroke { + Self { modifiers, key, - ime_key, + key_char, } } @@ -761,7 +775,6 @@ impl Keystroke { Keysym::dead_acute => Some("´".to_owned()), Keysym::dead_circumflex => Some("^".to_owned()), Keysym::dead_tilde => Some("~".to_owned()), - Keysym::dead_perispomeni => Some("͂".to_owned()), Keysym::dead_macron => Some("¯".to_owned()), Keysym::dead_breve => Some("˘".to_owned()), Keysym::dead_abovedot => Some("˙".to_owned()), @@ -779,9 +792,7 @@ impl Keystroke { Keysym::dead_horn => Some("̛".to_owned()), Keysym::dead_stroke => Some("̶̶".to_owned()), Keysym::dead_abovecomma => Some("̓̓".to_owned()), - Keysym::dead_psili => Some("᾿".to_owned()), Keysym::dead_abovereversedcomma => Some("ʽ".to_owned()), - Keysym::dead_dasia => Some("῾".to_owned()), Keysym::dead_doublegrave => Some("̏".to_owned()), Keysym::dead_belowring => Some("˳".to_owned()), Keysym::dead_belowmacron => Some("̱".to_owned()), @@ -815,7 +826,7 @@ impl Keystroke { } #[cfg(any(feature = "wayland", feature = "x11"))] -impl Modifiers { +impl crate::Modifiers { pub(super) fn from_xkb(keymap_state: &State) -> Self { let shift = keymap_state.mod_name_is_active(xkb::MOD_NAME_SHIFT, xkb::STATE_MODS_EFFECTIVE); let alt = keymap_state.mod_name_is_active(xkb::MOD_NAME_ALT, xkb::STATE_MODS_EFFECTIVE); @@ -823,7 +834,7 @@ impl Modifiers { keymap_state.mod_name_is_active(xkb::MOD_NAME_CTRL, xkb::STATE_MODS_EFFECTIVE); let platform = keymap_state.mod_name_is_active(xkb::MOD_NAME_LOGO, xkb::STATE_MODS_EFFECTIVE); - Modifiers { + Self { shift, alt, control, diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index ab87bb2024..624114b089 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -1,12 +1,16 @@ -use std::cell::{RefCell, RefMut}; -use std::hash::Hash; -use std::os::fd::{AsRawFd, BorrowedFd}; -use std::path::PathBuf; -use std::rc::{Rc, Weak}; -use std::time::{Duration, Instant}; +use std::{ + cell::{RefCell, RefMut}, + hash::Hash, + os::fd::{AsRawFd, BorrowedFd}, + path::PathBuf, + rc::{Rc, Weak}, + time::{Duration, Instant}, +}; -use calloop::timer::{TimeoutAction, Timer}; -use calloop::{EventLoop, LoopHandle}; +use calloop::{ + timer::{TimeoutAction, Timer}, + EventLoop, LoopHandle, +}; use calloop_wayland_source::WaylandSource; use collections::HashMap; use filedescriptor::Pipe; @@ -64,30 +68,28 @@ use xkbcommon::xkb::{self, Keycode, KEYMAP_COMPILE_NO_FLAGS}; use super::display::WaylandDisplay; use super::window::{ImeInput, WaylandWindowStatePtr}; -use crate::platform::linux::wayland::clipboard::{ - Clipboard, DataOffer, FILE_LIST_MIME_TYPE, TEXT_MIME_TYPE, -}; -use crate::platform::linux::wayland::cursor::Cursor; -use crate::platform::linux::wayland::serial::{SerialKind, SerialTracker}; -use crate::platform::linux::wayland::window::WaylandWindow; -use crate::platform::linux::xdg_desktop_portal::{Event as XDPEvent, XDPEventSource}; -use crate::platform::linux::LinuxClient; + use crate::platform::linux::{ get_xkb_compose_state, is_within_click_distance, open_uri_internal, read_fd, reveal_path_internal, + wayland::{ + clipboard::{Clipboard, DataOffer, FILE_LIST_MIME_TYPE, TEXT_MIME_TYPE}, + cursor::Cursor, + serial::{SerialKind, SerialTracker}, + window::WaylandWindow, + }, + xdg_desktop_portal::{Event as XDPEvent, XDPEventSource}, + LinuxClient, }; -use crate::platform::PlatformWindow; +use crate::platform::{blade::BladeContext, PlatformWindow}; use crate::{ - point, px, size, Bounds, DevicePixels, FileDropEvent, ForegroundExecutor, MouseExitEvent, Size, - DOUBLE_CLICK_INTERVAL, SCROLL_LINES, + point, px, size, AnyWindowHandle, Bounds, CursorStyle, DevicePixels, DisplayId, FileDropEvent, + ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon, Modifiers, + ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, + MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScaledPixels, + ScrollDelta, ScrollWheelEvent, Size, TouchPhase, WindowParams, DOUBLE_CLICK_INTERVAL, + SCROLL_LINES, }; -use crate::{ - AnyWindowHandle, CursorStyle, DisplayId, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, - ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, - NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, - ScrollWheelEvent, TouchPhase, -}; -use crate::{LinuxCommon, WindowParams}; /// Used to convert evdev scancode to xkb scancode const MIN_KEYCODE: u32 = 8; @@ -186,6 +188,7 @@ pub struct Output { pub(crate) struct WaylandClientState { serial_tracker: SerialTracker, globals: Globals, + gpu_context: BladeContext, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, wl_keyboard: Option, @@ -459,6 +462,8 @@ impl WaylandClient { }) .unwrap(); + let gpu_context = BladeContext::new().expect("Unable to init GPU context"); + let seat = seat.unwrap(); let globals = Globals::new( globals, @@ -496,7 +501,7 @@ impl WaylandClient { XDPEvent::CursorTheme(theme) => { if let Some(client) = client.0.upgrade() { let mut client = client.borrow_mut(); - client.cursor.set_theme(theme.as_str(), None); + client.cursor.set_theme(theme.as_str()); } } XDPEvent::CursorSize(size) => { @@ -512,6 +517,7 @@ impl WaylandClient { let mut state = Rc::new(RefCell::new(WaylandClientState { serial_tracker: SerialTracker::new(), globals, + gpu_context, wl_seat: seat, wl_pointer: None, wl_keyboard: None, @@ -582,6 +588,19 @@ impl WaylandClient { } impl LinuxClient for WaylandClient { + fn keyboard_layout(&self) -> String { + let state = self.0.borrow(); + if let Some(keymap_state) = &state.keymap_state { + let layout_idx = keymap_state.serialize_layout(xkbcommon::xkb::STATE_LAYOUT_EFFECTIVE); + keymap_state + .get_keymap() + .layout_get_name(layout_idx) + .to_string() + } else { + "unknown".to_string() + } + } + fn displays(&self) -> Vec> { self.0 .borrow() @@ -627,6 +646,7 @@ impl LinuxClient for WaylandClient { let (window, surface_id) = WaylandWindow::new( handle, state.globals.clone(), + &state.gpu_context, WaylandClientStatePtr(Rc::downgrade(&self.0)), params, state.common.appearance, @@ -649,15 +669,16 @@ impl LinuxClient for WaylandClient { if let Some(cursor_shape_device) = &state.cursor_shape_device { cursor_shape_device.set_shape(serial, style.to_shape()); - } else if state.mouse_focused_window.is_some() { + } else if let Some(focused_window) = &state.mouse_focused_window { // cursor-shape-v1 isn't supported, set the cursor using a surface. let wl_pointer = state .wl_pointer .clone() .expect("window is focused by pointer"); + let scale = focused_window.primary_output_scale(); state .cursor - .set_icon(&wl_pointer, serial, &style.to_icon_name()); + .set_icon(&wl_pointer, serial, &style.to_icon_name(), scale); } } } @@ -1131,6 +1152,13 @@ impl Dispatch for WaylandClientStatePtr { }; state.keymap_state = Some(xkb::State::new(&keymap)); state.compose_state = get_xkb_compose_state(&xkb_context); + + if let Some(mut callback) = state.common.callbacks.keyboard_layout_change.take() { + drop(state); + callback(); + state = client.borrow_mut(); + state.common.callbacks.keyboard_layout_change = Some(callback); + } } wl_keyboard::Event::Enter { surface, .. } => { state.keyboard_focused_window = get_window(&mut state, &surface.id()); @@ -1168,9 +1196,21 @@ impl Dispatch for WaylandClientStatePtr { let focused_window = state.keyboard_focused_window.clone(); let keymap_state = state.keymap_state.as_mut().unwrap(); + let old_layout = + keymap_state.serialize_layout(xkbcommon::xkb::STATE_LAYOUT_EFFECTIVE); keymap_state.update_mask(mods_depressed, mods_latched, mods_locked, 0, 0, group); state.modifiers = Modifiers::from_xkb(keymap_state); + if group != old_layout { + if let Some(mut callback) = state.common.callbacks.keyboard_layout_change.take() + { + drop(state); + callback(); + state = client.borrow_mut(); + state.common.callbacks.keyboard_layout_change = Some(callback); + } + } + let Some(focused_window) = focused_window else { return; }; @@ -1208,7 +1248,7 @@ impl Dispatch for WaylandClientStatePtr { compose.feed(keysym); match compose.status() { xkb::Status::Composing => { - keystroke.ime_key = None; + keystroke.key_char = None; state.pre_edit_text = compose.utf8().or(Keystroke::underlying_dead_key(keysym)); let pre_edit = @@ -1220,7 +1260,7 @@ impl Dispatch for WaylandClientStatePtr { xkb::Status::Composed => { state.pre_edit_text.take(); - keystroke.ime_key = compose.utf8(); + keystroke.key_char = compose.utf8(); if let Some(keysym) = compose.keysym() { keystroke.key = xkb::keysym_get_name(keysym); } @@ -1340,7 +1380,7 @@ impl Dispatch for WaylandClientStatePtr { keystroke: Keystroke { modifiers: Modifiers::default(), key: commit_text.clone(), - ime_key: Some(commit_text), + key_char: Some(commit_text), }, is_held: false, })); @@ -1439,9 +1479,13 @@ impl Dispatch for WaylandClientStatePtr { if let Some(cursor_shape_device) = &state.cursor_shape_device { cursor_shape_device.set_shape(serial, style.to_shape()); } else { - state - .cursor - .set_icon(&wl_pointer, serial, &style.to_icon_name()); + let scale = window.primary_output_scale(); + state.cursor.set_icon( + &wl_pointer, + serial, + &style.to_icon_name(), + scale, + ); } } drop(state); diff --git a/crates/gpui/src/platform/linux/wayland/cursor.rs b/crates/gpui/src/platform/linux/wayland/cursor.rs index 6a52765042..09aa414deb 100644 --- a/crates/gpui/src/platform/linux/wayland/cursor.rs +++ b/crates/gpui/src/platform/linux/wayland/cursor.rs @@ -9,6 +9,7 @@ use wayland_cursor::{CursorImageBuffer, CursorTheme}; pub(crate) struct Cursor { theme: Option, theme_name: Option, + theme_size: u32, surface: WlSurface, size: u32, shm: WlShm, @@ -27,6 +28,7 @@ impl Cursor { Self { theme: CursorTheme::load(&connection, globals.shm.clone(), size).log_err(), theme_name: None, + theme_size: size, surface: globals.compositor.create_surface(&globals.qh, ()), shm: globals.shm.clone(), connection: connection.clone(), @@ -34,26 +36,26 @@ impl Cursor { } } - pub fn set_theme(&mut self, theme_name: &str, size: Option) { - if let Some(size) = size { - self.size = size; - } - if let Some(theme) = - CursorTheme::load_from_name(&self.connection, self.shm.clone(), theme_name, self.size) - .log_err() + pub fn set_theme(&mut self, theme_name: &str) { + if let Some(theme) = CursorTheme::load_from_name( + &self.connection, + self.shm.clone(), + theme_name, + self.theme_size, + ) + .log_err() { self.theme = Some(theme); self.theme_name = Some(theme_name.to_string()); } else if let Some(theme) = - CursorTheme::load(&self.connection, self.shm.clone(), self.size).log_err() + CursorTheme::load(&self.connection, self.shm.clone(), self.theme_size).log_err() { self.theme = Some(theme); self.theme_name = None; } } - pub fn set_size(&mut self, size: u32) { - self.size = size; + fn set_theme_size(&mut self, theme_size: u32) { self.theme = self .theme_name .as_ref() @@ -62,14 +64,29 @@ impl Cursor { &self.connection, self.shm.clone(), name.as_str(), - self.size, + theme_size, ) .log_err() }) - .or_else(|| CursorTheme::load(&self.connection, self.shm.clone(), self.size).log_err()); + .or_else(|| { + CursorTheme::load(&self.connection, self.shm.clone(), theme_size).log_err() + }); } - pub fn set_icon(&mut self, wl_pointer: &WlPointer, serial_id: u32, mut cursor_icon_name: &str) { + pub fn set_size(&mut self, size: u32) { + self.size = size; + self.set_theme_size(size); + } + + pub fn set_icon( + &mut self, + wl_pointer: &WlPointer, + serial_id: u32, + mut cursor_icon_name: &str, + scale: i32, + ) { + self.set_theme_size(self.size * scale as u32); + if let Some(theme) = &mut self.theme { let mut buffer: Option<&CursorImageBuffer>; @@ -91,7 +108,15 @@ impl Cursor { let (width, height) = buffer.dimensions(); let (hot_x, hot_y) = buffer.hotspot(); - wl_pointer.set_cursor(serial_id, Some(&self.surface), hot_x as i32, hot_y as i32); + self.surface.set_buffer_scale(scale); + + wl_pointer.set_cursor( + serial_id, + Some(&self.surface), + hot_x as i32 / scale, + hot_y as i32 / scale, + ); + self.surface.attach(Some(&buffer), 0, 0); self.surface.damage(0, 0, width as i32, height as i32); self.surface.commit(); diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index 8d4516b3f3..4cdc8929a5 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -1,8 +1,10 @@ -use std::cell::{Ref, RefCell, RefMut}; -use std::ffi::c_void; -use std::ptr::NonNull; -use std::rc::Rc; -use std::sync::Arc; +use std::{ + cell::{Ref, RefCell, RefMut}, + ffi::c_void, + ptr::NonNull, + rc::Rc, + sync::Arc, +}; use blade_graphics as gpu; use collections::HashMap; @@ -19,13 +21,14 @@ use wayland_protocols::xdg::shell::client::xdg_surface; use wayland_protocols::xdg::shell::client::xdg_toplevel::{self}; use wayland_protocols_plasma::blur::client::org_kde_kwin_blur; -use crate::platform::blade::{BladeRenderer, BladeSurfaceConfig}; -use crate::platform::linux::wayland::display::WaylandDisplay; -use crate::platform::linux::wayland::serial::SerialKind; -use crate::platform::{PlatformAtlas, PlatformInputHandler, PlatformWindow}; +use crate::platform::{ + blade::{BladeContext, BladeRenderer, BladeSurfaceConfig}, + linux::wayland::{display::WaylandDisplay, serial::SerialKind}, + PlatformAtlas, PlatformInputHandler, PlatformWindow, +}; use crate::scene::Scene; use crate::{ - px, size, AnyWindowHandle, Bounds, Decorations, GPUSpecs, Globals, Modifiers, Output, Pixels, + px, size, AnyWindowHandle, Bounds, Decorations, Globals, GpuSpecs, Modifiers, Output, Pixels, PlatformDisplay, PlatformInput, Point, PromptLevel, RequestFrameOptions, ResizeEdge, ScaledPixels, Size, Tiling, WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, WindowParams, @@ -123,37 +126,28 @@ impl WaylandWindowState { viewport: Option, client: WaylandClientStatePtr, globals: Globals, + gpu_context: &BladeContext, options: WindowParams, ) -> anyhow::Result { - let raw = RawWindow { - window: surface.id().as_ptr().cast::(), - display: surface - .backend() - .upgrade() - .unwrap() - .display_ptr() - .cast::(), - }; - let gpu = Arc::new( - unsafe { - gpu::Context::init_windowed( - &raw, - gpu::ContextDesc { - validation: false, - capture: false, - overlay: false, - }, - ) - } - .map_err(|e| anyhow::anyhow!("{:?}", e))?, - ); - let config = BladeSurfaceConfig { - size: gpu::Extent { - width: options.bounds.size.width.0 as u32, - height: options.bounds.size.height.0 as u32, - depth: 1, - }, - transparent: true, + let renderer = { + let raw_window = RawWindow { + window: surface.id().as_ptr().cast::(), + display: surface + .backend() + .upgrade() + .unwrap() + .display_ptr() + .cast::(), + }; + let config = BladeSurfaceConfig { + size: gpu::Extent { + width: options.bounds.size.width.0 as u32, + height: options.bounds.size.height.0 as u32, + depth: 1, + }, + transparent: true, + }; + BladeRenderer::new(gpu_context, &raw_window, config)? }; Ok(Self { @@ -168,7 +162,7 @@ impl WaylandWindowState { globals, outputs: HashMap::default(), display: None, - renderer: BladeRenderer::new(gpu, config), + renderer, bounds: options.bounds, scale: 1.0, input_handler: None, @@ -194,6 +188,23 @@ impl WaylandWindowState { self.decorations == WindowDecorations::Client || self.background_appearance != WindowBackgroundAppearance::Opaque } + + pub fn primary_output_scale(&mut self) -> i32 { + let mut scale = 1; + let mut current_output = self.display.take(); + for (id, output) in self.outputs.iter() { + if let Some((_, output_data)) = ¤t_output { + if output.scale > output_data.scale { + current_output = Some((id.clone(), output.clone())); + } + } else { + current_output = Some((id.clone(), output.clone())); + } + scale = scale.max(output.scale); + } + self.display = current_output; + scale + } } pub(crate) struct WaylandWindow(pub WaylandWindowStatePtr); @@ -249,6 +260,7 @@ impl WaylandWindow { pub fn new( handle: AnyWindowHandle, globals: Globals, + gpu_context: &BladeContext, client: WaylandClientStatePtr, params: WindowParams, appearance: WindowAppearance, @@ -291,6 +303,7 @@ impl WaylandWindow { viewport, client, globals, + gpu_context, params, )?)), callbacks: Rc::new(RefCell::new(Callbacks::default())), @@ -560,7 +573,7 @@ impl WaylandWindowStatePtr { state.outputs.insert(id, output.clone()); - let scale = primary_output_scale(&mut state); + let scale = state.primary_output_scale(); // We use `PreferredBufferScale` instead to set the scale if it's available if state.surface.version() < wl_surface::EVT_PREFERRED_BUFFER_SCALE_SINCE { @@ -572,7 +585,7 @@ impl WaylandWindowStatePtr { wl_surface::Event::Leave { output } => { state.outputs.remove(&output.id()); - let scale = primary_output_scale(&mut state); + let scale = state.primary_output_scale(); // We use `PreferredBufferScale` instead to set the scale if it's available if state.surface.version() < wl_surface::EVT_PREFERRED_BUFFER_SCALE_SINCE { @@ -687,11 +700,11 @@ impl WaylandWindowStatePtr { } } if let PlatformInput::KeyDown(event) = input { - if let Some(ime_key) = &event.keystroke.ime_key { + if let Some(key_char) = &event.keystroke.key_char { let mut state = self.state.borrow_mut(); if let Some(mut input_handler) = state.input_handler.take() { drop(state); - input_handler.replace_text_in_range(None, ime_key); + input_handler.replace_text_in_range(None, key_char); self.state.borrow_mut().input_handler = Some(input_handler); } } @@ -719,6 +732,10 @@ impl WaylandWindowStatePtr { (fun)() } } + + pub fn primary_output_scale(&self) -> i32 { + self.state.borrow_mut().primary_output_scale() + } } fn extract_states<'a, S: TryFrom + 'a>(states: &'a [u8]) -> impl Iterator + 'a @@ -732,23 +749,6 @@ where .flat_map(S::try_from) } -fn primary_output_scale(state: &mut RefMut) -> i32 { - let mut scale = 1; - let mut current_output = state.display.take(); - for (id, output) in state.outputs.iter() { - if let Some((_, output_data)) = ¤t_output { - if output.scale > output_data.scale { - current_output = Some((id.clone(), output.clone())); - } - } else { - current_output = Some((id.clone(), output.clone())); - } - scale = scale.max(output.scale); - } - state.display = current_output; - scale -} - impl rwh::HasWindowHandle for WaylandWindow { fn window_handle(&self) -> Result, rwh::HandleError> { unimplemented!() @@ -781,6 +781,19 @@ impl PlatformWindow for WaylandWindow { } } + fn inner_window_bounds(&self) -> WindowBounds { + let state = self.borrow(); + if state.fullscreen { + WindowBounds::Fullscreen(state.window_bounds) + } else if state.maximized { + WindowBounds::Maximized(state.window_bounds) + } else { + let inset = state.inset.unwrap_or(px(0.)); + drop(state); + WindowBounds::Windowed(self.bounds().inset(inset)) + } + } + fn content_size(&self) -> Size { self.borrow().bounds.size } @@ -1015,7 +1028,7 @@ impl PlatformWindow for WaylandWindow { state.client.update_ime_position(bounds); } - fn gpu_specs(&self) -> Option { + fn gpu_specs(&self) -> Option { self.borrow().renderer.gpu_specs().into() } } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 82ef39fc6b..1bd7d9a305 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,46 +1,43 @@ use core::str; -use std::cell::RefCell; -use std::collections::{BTreeMap, HashSet}; -use std::ops::Deref; -use std::path::PathBuf; -use std::rc::{Rc, Weak}; -use std::time::{Duration, Instant}; +use std::{ + cell::RefCell, + collections::{BTreeMap, HashSet}, + ops::Deref, + path::PathBuf, + rc::{Rc, Weak}, + time::{Duration, Instant}, +}; -use calloop::generic::{FdWrapper, Generic}; -use calloop::{EventLoop, LoopHandle, RegistrationToken}; +use calloop::{ + generic::{FdWrapper, Generic}, + EventLoop, LoopHandle, RegistrationToken, +}; +use anyhow::Context as _; use collections::HashMap; use http_client::Url; use smallvec::SmallVec; use util::ResultExt; -use x11rb::connection::{Connection, RequestConnection}; -use x11rb::cursor; -use x11rb::errors::ConnectionError; -use x11rb::protocol::randr::ConnectionExt as _; -use x11rb::protocol::xinput::ConnectionExt; -use x11rb::protocol::xkb::ConnectionExt as _; -use x11rb::protocol::xproto::{ - AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, ConnectionExt as _, - EventMask, KeyPressEvent, +use x11rb::{ + connection::{Connection, RequestConnection}, + cursor, + errors::ConnectionError, + protocol::randr::ConnectionExt as _, + protocol::xinput::ConnectionExt, + protocol::xkb::ConnectionExt as _, + protocol::xproto::{ + AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, + ConnectionExt as _, EventMask, KeyPressEvent, + }, + protocol::{randr, render, xinput, xkb, xproto, Event}, + resource_manager::Database, + wrapper::ConnectionExt as _, + xcb_ffi::XCBConnection, }; -use x11rb::protocol::{randr, render, xinput, xkb, xproto, Event}; -use x11rb::resource_manager::Database; -use x11rb::wrapper::ConnectionExt as _; -use x11rb::xcb_ffi::XCBConnection; -use xim::{x11rb::X11rbClient, Client}; -use xim::{AttributeName, InputStyle}; +use xim::{x11rb::X11rbClient, AttributeName, Client, InputStyle}; use xkbc::x11::ffi::{XKB_X11_MIN_MAJOR_XKB_VERSION, XKB_X11_MIN_MINOR_XKB_VERSION}; -use xkbcommon::xkb::{self as xkbc, LayoutIndex, ModMask}; - -use crate::platform::linux::LinuxClient; -use crate::platform::{LinuxCommon, PlatformWindow}; -use crate::{ - modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, - DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, - Platform, PlatformDisplay, PlatformInput, Point, RequestFrameOptions, ScaledPixels, - ScrollDelta, Size, TouchPhase, WindowParams, X11Window, -}; +use xkbcommon::xkb::{self as xkbc, LayoutIndex, ModMask, STATE_LAYOUT_EFFECTIVE}; use super::{ button_or_scroll_from_event_detail, get_valuator_axis_index, modifiers_from_state, @@ -48,10 +45,23 @@ use super::{ }; use super::{X11Display, X11WindowStatePtr, XcbAtoms}; use super::{XimCallbackEvent, XimHandler}; -use crate::platform::linux::platform::{DOUBLE_CLICK_INTERVAL, SCROLL_LINES}; -use crate::platform::linux::xdg_desktop_portal::{Event as XDPEvent, XDPEventSource}; -use crate::platform::linux::{ - get_xkb_compose_state, is_within_click_distance, open_uri_internal, reveal_path_internal, + +use crate::platform::{ + blade::BladeContext, + linux::{ + get_xkb_compose_state, is_within_click_distance, open_uri_internal, + platform::{DOUBLE_CLICK_INTERVAL, SCROLL_LINES}, + reveal_path_internal, + xdg_desktop_portal::{Event as XDPEvent, XDPEventSource}, + LinuxClient, + }, + LinuxCommon, PlatformWindow, +}; +use crate::{ + modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, + DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, + Platform, PlatformDisplay, PlatformInput, Point, RequestFrameOptions, ScaledPixels, + ScrollDelta, Size, TouchPhase, WindowParams, X11Window, }; /// Value for DeviceId parameters which selects all devices. @@ -157,6 +167,8 @@ pub struct X11ClientState { pub(crate) last_location: Point, pub(crate) current_count: usize, + gpu_context: BladeContext, + pub(crate) scale_factor: f32, xkb_context: xkbc::Context, @@ -178,7 +190,7 @@ pub struct X11ClientState { pub(crate) compose_state: Option, pub(crate) pre_edit_text: Option, pub(crate) composing: bool, - pub(crate) pre_ime_key_down: Option, + pub(crate) pre_key_char_down: Option, pub(crate) cursor_handle: cursor::Handle, pub(crate) cursor_styles: HashMap, pub(crate) cursor_cache: HashMap, @@ -359,6 +371,8 @@ impl X11Client { let compose_state = get_xkb_compose_state(&xkb_context); let resource_database = x11rb::resource_manager::new_from_default(&xcb_connection).unwrap(); + let gpu_context = BladeContext::new().expect("Unable to init GPU context"); + let scale_factor = resource_database .get_value("Xft.dpi", "Xft.dpi") .ok() @@ -427,6 +441,7 @@ impl X11Client { last_mouse_button: None, last_location: Point::new(px(0.0), px(0.0)), current_count: 0, + gpu_context, scale_factor, xkb_context, @@ -446,7 +461,7 @@ impl X11Client { compose_state, pre_edit_text: None, - pre_ime_key_down: None, + pre_key_char_down: None, composing: false, cursor_handle, @@ -776,11 +791,11 @@ impl X11Client { }, }; let window = self.get_window(event.window)?; - window.configure(bounds); + window.configure(bounds).unwrap(); } Event::PropertyNotify(event) => { let window = self.get_window(event.window)?; - window.property_notify(event); + window.property_notify(event).unwrap(); } Event::FocusIn(event) => { let window = self.get_window(event.event)?; @@ -825,6 +840,8 @@ impl X11Client { } Event::XkbStateNotify(event) => { let mut state = self.0.borrow_mut(); + let old_layout = state.xkb.serialize_layout(STATE_LAYOUT_EFFECTIVE); + let new_layout = u32::from(event.group); state.xkb.update_mask( event.base_mods.into(), event.latched_mods.into(), @@ -838,6 +855,17 @@ impl X11Client { latched_layout: event.latched_group as u32, locked_layout: event.locked_group.into(), }; + + if new_layout != old_layout { + if let Some(mut callback) = state.common.callbacks.keyboard_layout_change.take() + { + drop(state); + callback(); + state = self.0.borrow_mut(); + state.common.callbacks.keyboard_layout_change = Some(callback); + } + } + let modifiers = Modifiers::from_xkb(&state.xkb); if state.modifiers == modifiers { drop(state); @@ -858,7 +886,7 @@ impl X11Client { let modifiers = modifiers_from_state(event.state); state.modifiers = modifiers; - state.pre_ime_key_down.take(); + state.pre_key_char_down.take(); let keystroke = { let code = event.detail.into(); let xkb_state = state.previous_xkb_state.clone(); @@ -880,13 +908,13 @@ impl X11Client { match compose_state.status() { xkbc::Status::Composed => { state.pre_edit_text.take(); - keystroke.ime_key = compose_state.utf8(); + keystroke.key_char = compose_state.utf8(); if let Some(keysym) = compose_state.keysym() { keystroke.key = xkbc::keysym_get_name(keysym); } } xkbc::Status::Composing => { - keystroke.ime_key = None; + keystroke.key_char = None; state.pre_edit_text = compose_state .utf8() .or(crate::Keystroke::underlying_dead_key(keysym)); @@ -1156,7 +1184,7 @@ impl X11Client { match event { Event::KeyPress(event) | Event::KeyRelease(event) => { let mut state = self.0.borrow_mut(); - state.pre_ime_key_down = Some(Keystroke::from_xkb( + state.pre_key_char_down = Some(Keystroke::from_xkb( &state.xkb, state.modifiers, event.detail.into(), @@ -1187,11 +1215,11 @@ impl X11Client { fn xim_handle_commit(&self, window: xproto::Window, text: String) -> Option<()> { let window = self.get_window(window).unwrap(); let mut state = self.0.borrow_mut(); - let keystroke = state.pre_ime_key_down.take(); + let keystroke = state.pre_key_char_down.take(); state.composing = false; drop(state); if let Some(mut keystroke) = keystroke { - keystroke.ime_key = Some(text.clone()); + keystroke.key_char = Some(text.clone()); window.handle_input(PlatformInput::KeyDown(crate::KeyDownEvent { keystroke, is_held: false, @@ -1250,6 +1278,16 @@ impl LinuxClient for X11Client { f(&mut self.0.borrow_mut().common) } + fn keyboard_layout(&self) -> String { + let state = self.0.borrow(); + let layout_idx = state.xkb.serialize_layout(STATE_LAYOUT_EFFECTIVE); + state + .xkb + .get_keymap() + .layout_get_name(layout_idx) + .to_string() + } + fn displays(&self) -> Vec> { let state = self.0.borrow(); let setup = state.xcb_connection.setup(); @@ -1258,11 +1296,9 @@ impl LinuxClient for X11Client { .iter() .enumerate() .filter_map(|(root_id, _)| { - Some(Rc::new(X11Display::new( - &state.xcb_connection, - state.scale_factor, - root_id, - )?) as Rc) + Some(Rc::new( + X11Display::new(&state.xcb_connection, state.scale_factor, root_id).ok()?, + ) as Rc) }) .collect() } @@ -1283,11 +1319,9 @@ impl LinuxClient for X11Client { fn display(&self, id: DisplayId) -> Option> { let state = self.0.borrow(); - Some(Rc::new(X11Display::new( - &state.xcb_connection, - state.scale_factor, - id.0 as usize, - )?)) + Some(Rc::new( + X11Display::new(&state.xcb_connection, state.scale_factor, id.0 as usize).ok()?, + )) } fn open_window( @@ -1302,6 +1336,7 @@ impl LinuxClient for X11Client { handle, X11ClientStatePtr(Rc::downgrade(&self.0)), state.common.foreground_executor.clone(), + &state.gpu_context, params, &state.xcb_connection, state.client_side_decorations_supported, @@ -1421,9 +1456,10 @@ impl LinuxClient for X11Client { ..Default::default() }, ) - .expect("failed to change window cursor") - .check() - .unwrap(); + .anyhow() + .and_then(|cookie| cookie.check().anyhow()) + .context("setting cursor style") + .log_err(); } fn open_uri(&self, uri: &str) { @@ -1567,7 +1603,7 @@ impl LinuxClient for X11Client { } } -// Adatpted from: +// Adapted from: // https://docs.rs/winit/0.29.11/src/winit/platform_impl/linux/x11/monitor.rs.html#103-111 pub fn mode_refresh_rate(mode: &randr::ModeInfo) -> Duration { if mode.dot_clock == 0 || mode.htotal == 0 || mode.vtotal == 0 { diff --git a/crates/gpui/src/platform/linux/x11/display.rs b/crates/gpui/src/platform/linux/x11/display.rs index 871d709fa9..4983e2f5a3 100644 --- a/crates/gpui/src/platform/linux/x11/display.rs +++ b/crates/gpui/src/platform/linux/x11/display.rs @@ -13,12 +13,17 @@ pub(crate) struct X11Display { impl X11Display { pub(crate) fn new( - xc: &XCBConnection, + xcb: &XCBConnection, scale_factor: f32, x_screen_index: usize, - ) -> Option { - let screen = xc.setup().roots.get(x_screen_index).unwrap(); - Some(Self { + ) -> anyhow::Result { + let Some(screen) = xcb.setup().roots.get(x_screen_index) else { + return Err(anyhow::anyhow!( + "No screen found with index {}", + x_screen_index + )); + }; + Ok(Self { x_screen_index, bounds: Bounds { origin: Default::default(), diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index 15712233c2..40eab6a343 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -1,8 +1,8 @@ -use anyhow::Context; +use anyhow::{anyhow, Context}; +use crate::platform::blade::{BladeContext, BladeRenderer, BladeSurfaceConfig}; use crate::{ - platform::blade::{BladeRenderer, BladeSurfaceConfig}, - px, size, AnyWindowHandle, Bounds, Decorations, DevicePixels, ForegroundExecutor, GPUSpecs, + px, size, AnyWindowHandle, Bounds, Decorations, DevicePixels, ForegroundExecutor, GpuSpecs, Modifiers, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PromptLevel, RequestFrameOptions, ResizeEdge, ScaledPixels, Scene, Size, Tiling, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowDecorations, @@ -14,6 +14,8 @@ use raw_window_handle as rwh; use util::{maybe, ResultExt}; use x11rb::{ connection::Connection, + cookie::{Cookie, VoidCookie}, + errors::ConnectionError, properties::WmSizeHints, protocol::{ sync, @@ -25,7 +27,7 @@ use x11rb::{ }; use std::{ - cell::RefCell, ffi::c_void, mem::size_of, num::NonZeroU32, ops::Div, ptr::NonNull, rc::Rc, + cell::RefCell, ffi::c_void, fmt::Display, num::NonZeroU32, ops::Div, ptr::NonNull, rc::Rc, sync::Arc, }; @@ -53,6 +55,7 @@ x11rb::atom_manager! { WM_PROTOCOLS, WM_DELETE_WINDOW, WM_CHANGE_STATE, + _NET_WM_PID, _NET_WM_NAME, _NET_WM_STATE, _NET_WM_STATE_MAXIMIZED_VERT, @@ -77,17 +80,16 @@ x11rb::atom_manager! { } } -fn query_render_extent(xcb_connection: &XCBConnection, x_window: xproto::Window) -> gpu::Extent { - let reply = xcb_connection - .get_geometry(x_window) - .unwrap() - .reply() - .unwrap(); - gpu::Extent { +fn query_render_extent( + xcb: &Rc, + x_window: xproto::Window, +) -> anyhow::Result { + let reply = get_reply(|| "X11 GetGeometry failed.", xcb.get_geometry(x_window))?; + Ok(gpu::Extent { width: reply.width as u32, height: reply.height as u32, depth: 1, - } + }) } impl ResizeEdge { @@ -148,7 +150,7 @@ impl EdgeConstraints { } } -#[derive(Debug)] +#[derive(Copy, Clone, Debug)] struct Visual { id: xproto::Visualid, colormap: u32, @@ -163,8 +165,8 @@ struct VisualSet { black_pixel: u32, } -fn find_visuals(xcb_connection: &XCBConnection, screen_index: usize) -> VisualSet { - let screen = &xcb_connection.setup().roots[screen_index]; +fn find_visuals(xcb: &XCBConnection, screen_index: usize) -> VisualSet { + let screen = &xcb.setup().roots[screen_index]; let mut set = VisualSet { inherit: Visual { id: screen.root_visual, @@ -246,7 +248,6 @@ pub struct X11WindowState { x_root_window: xproto::Window, pub(crate) counter_id: sync::Counter, pub(crate) last_sync_counter: Option, - _raw: RawWindow, bounds: Bounds, scale_factor: f32, renderer: BladeRenderer, @@ -277,13 +278,16 @@ impl X11WindowState { pub(crate) struct X11WindowStatePtr { pub state: Rc>, pub(crate) callbacks: Rc>, - xcb_connection: Rc, + xcb: Rc, x_window: xproto::Window, } impl rwh::HasWindowHandle for RawWindow { fn window_handle(&self) -> Result { - let non_zero = NonZeroU32::new(self.window_id).unwrap(); + let Some(non_zero) = NonZeroU32::new(self.window_id) else { + log::error!("RawWindow.window_id zero when getting window handle."); + return Err(rwh::HandleError::Unavailable); + }; let mut handle = rwh::XcbWindowHandle::new(non_zero); handle.visual_id = NonZeroU32::new(self.visual_id); Ok(unsafe { rwh::WindowHandle::borrow_raw(handle.into()) }) @@ -291,7 +295,10 @@ impl rwh::HasWindowHandle for RawWindow { } impl rwh::HasDisplayHandle for RawWindow { fn display_handle(&self) -> Result { - let non_zero = NonNull::new(self.connection).unwrap(); + let Some(non_zero) = NonNull::new(self.connection) else { + log::error!("Null RawWindow.connection when getting display handle."); + return Err(rwh::HandleError::Unavailable); + }; let handle = rwh::XcbDisplayHandle::new(Some(non_zero), self.screen_id as i32); Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) }) } @@ -308,14 +315,52 @@ impl rwh::HasDisplayHandle for X11Window { } } +fn check_reply( + failure_context: F, + result: Result>, ConnectionError>, +) -> anyhow::Result<()> +where + C: Display + Send + Sync + 'static, + F: FnOnce() -> C, +{ + result + .map_err(|connection_error| anyhow!(connection_error)) + .and_then(|response| { + response + .check() + .map_err(|error_response| anyhow!(error_response)) + }) + .with_context(failure_context) +} + +fn get_reply( + failure_context: F, + result: Result, O>, ConnectionError>, +) -> anyhow::Result +where + C: Display + Send + Sync + 'static, + F: FnOnce() -> C, + O: x11rb::x11_utils::TryParse, +{ + result + .map_err(|connection_error| anyhow!(connection_error)) + .and_then(|response| { + response + .reply() + .map_err(|error_response| anyhow!(error_response)) + }) + .with_context(failure_context) +} + impl X11WindowState { #[allow(clippy::too_many_arguments)] pub fn new( handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, + gpu_context: &BladeContext, params: WindowParams, - xcb_connection: &Rc, + xcb: &Rc, client_side_decorations_supported: bool, x_main_screen_index: usize, x_window: xproto::Window, @@ -327,7 +372,7 @@ impl X11WindowState { .display_id .map_or(x_main_screen_index, |did| did.0 as usize); - let visual_set = find_visuals(&xcb_connection, x_screen_index); + let visual_set = find_visuals(&xcb, x_screen_index); let visual = match visual_set.transparent { Some(visual) => visual, @@ -341,12 +386,12 @@ impl X11WindowState { let colormap = if visual.colormap != 0 { visual.colormap } else { - let id = xcb_connection.generate_id().unwrap(); + let id = xcb.generate_id()?; log::info!("Creating colormap {}", id); - xcb_connection - .create_colormap(xproto::ColormapAlloc::NONE, id, visual_set.root, visual.id) - .unwrap() - .check()?; + check_reply( + || format!("X11 CreateColormap failed. id: {}", id), + xcb.create_colormap(xproto::ColormapAlloc::NONE, id, visual_set.root, visual.id), + )?; id }; @@ -370,8 +415,12 @@ impl X11WindowState { bounds.size.height = 600.into(); } - xcb_connection - .create_window( + check_reply( + || { + format!("X11 CreateWindow failed. depth: {}, x_window: {}, visual_set.root: {}, bounds.origin.x.0: {}, bounds.origin.y.0: {}, bounds.size.width.0: {}, bounds.size.height.0: {}", + visual.depth, x_window, visual_set.root, bounds.origin.x.0 + 2, bounds.origin.y.0, bounds.size.width.0, bounds.size.height.0) + }, + xcb.create_window( visual.depth, x_window, visual_set.root, @@ -383,189 +432,206 @@ impl X11WindowState { xproto::WindowClass::INPUT_OUTPUT, visual.id, &win_aux, - ) - .unwrap() - .check().with_context(|| { - format!("CreateWindow request to X server failed. depth: {}, x_window: {}, visual_set.root: {}, bounds.origin.x.0: {}, bounds.origin.y.0: {}, bounds.size.width.0: {}, bounds.size.height.0: {}", - visual.depth, x_window, visual_set.root, bounds.origin.x.0 + 2, bounds.origin.y.0, bounds.size.width.0, bounds.size.height.0) - })?; + ), + )?; - if let Some(size) = params.window_min_size { - let mut size_hints = WmSizeHints::new(); - size_hints.min_size = Some((size.width.0 as i32, size.height.0 as i32)); - size_hints - .set_normal_hints(xcb_connection, x_window) - .unwrap(); - } - - let reply = xcb_connection - .get_geometry(x_window) - .unwrap() - .reply() - .unwrap(); - if reply.x == 0 && reply.y == 0 { - bounds.origin.x.0 += 2; - // Work around a bug where our rendered content appears - // outside the window bounds when opened at the default position - // (14px, 49px on X + Gnome + Ubuntu 22). - xcb_connection - .configure_window( - x_window, - &xproto::ConfigureWindowAux::new() - .x(bounds.origin.x.0) - .y(bounds.origin.y.0), - ) - .unwrap(); - } - if let Some(titlebar) = params.titlebar { - if let Some(title) = titlebar.title { - xcb_connection - .change_property8( - xproto::PropMode::REPLACE, - x_window, - xproto::AtomEnum::WM_NAME, - xproto::AtomEnum::STRING, - title.as_bytes(), - ) - .unwrap(); - } - } - if params.kind == WindowKind::PopUp { - xcb_connection - .change_property32( + // Collect errors during setup, so that window can be destroyed on failure. + let setup_result = maybe!({ + let pid = std::process::id(); + check_reply( + || "X11 ChangeProperty for _NET_WM_PID failed.", + xcb.change_property32( xproto::PropMode::REPLACE, x_window, - atoms._NET_WM_WINDOW_TYPE, + atoms._NET_WM_PID, + xproto::AtomEnum::CARDINAL, + &[pid], + ), + )?; + + if let Some(size) = params.window_min_size { + let mut size_hints = WmSizeHints::new(); + let min_size = (size.width.0 as i32, size.height.0 as i32); + size_hints.min_size = Some(min_size); + check_reply( + || { + format!( + "X11 change of WM_SIZE_HINTS failed. min_size: {:?}", + min_size + ) + }, + size_hints.set_normal_hints(xcb, x_window), + )?; + } + + let reply = get_reply(|| "X11 GetGeometry failed.", xcb.get_geometry(x_window))?; + if reply.x == 0 && reply.y == 0 { + bounds.origin.x.0 += 2; + // Work around a bug where our rendered content appears + // outside the window bounds when opened at the default position + // (14px, 49px on X + Gnome + Ubuntu 22). + let x = bounds.origin.x.0; + let y = bounds.origin.y.0; + check_reply( + || format!("X11 ConfigureWindow failed. x: {}, y: {}", x, y), + xcb.configure_window(x_window, &xproto::ConfigureWindowAux::new().x(x).y(y)), + )?; + } + if let Some(titlebar) = params.titlebar { + if let Some(title) = titlebar.title { + check_reply( + || "X11 ChangeProperty8 on window title failed.", + xcb.change_property8( + xproto::PropMode::REPLACE, + x_window, + xproto::AtomEnum::WM_NAME, + xproto::AtomEnum::STRING, + title.as_bytes(), + ), + )?; + } + } + if params.kind == WindowKind::PopUp { + check_reply( + || "X11 ChangeProperty32 setting window type for pop-up failed.", + xcb.change_property32( + xproto::PropMode::REPLACE, + x_window, + atoms._NET_WM_WINDOW_TYPE, + xproto::AtomEnum::ATOM, + &[atoms._NET_WM_WINDOW_TYPE_NOTIFICATION], + ), + )?; + } + + check_reply( + || "X11 ChangeProperty32 setting protocols failed.", + xcb.change_property32( + xproto::PropMode::REPLACE, + x_window, + atoms.WM_PROTOCOLS, xproto::AtomEnum::ATOM, - &[atoms._NET_WM_WINDOW_TYPE_NOTIFICATION], - ) - .unwrap(); + &[atoms.WM_DELETE_WINDOW, atoms._NET_WM_SYNC_REQUEST], + ), + )?; + + get_reply( + || "X11 sync protocol initialize failed.", + sync::initialize(xcb, 3, 1), + )?; + let sync_request_counter = xcb.generate_id()?; + check_reply( + || "X11 sync CreateCounter failed.", + sync::create_counter(xcb, sync_request_counter, sync::Int64 { lo: 0, hi: 0 }), + )?; + + check_reply( + || "X11 ChangeProperty32 setting sync request counter failed.", + xcb.change_property32( + xproto::PropMode::REPLACE, + x_window, + atoms._NET_WM_SYNC_REQUEST_COUNTER, + xproto::AtomEnum::CARDINAL, + &[sync_request_counter], + ), + )?; + + check_reply( + || "X11 XiSelectEvents failed.", + xcb.xinput_xi_select_events( + x_window, + &[xinput::EventMask { + deviceid: XINPUT_ALL_DEVICE_GROUPS, + mask: vec![ + xinput::XIEventMask::MOTION + | xinput::XIEventMask::BUTTON_PRESS + | xinput::XIEventMask::BUTTON_RELEASE + | xinput::XIEventMask::ENTER + | xinput::XIEventMask::LEAVE, + ], + }], + ), + )?; + + check_reply( + || "X11 XiSelectEvents for device changes failed.", + xcb.xinput_xi_select_events( + x_window, + &[xinput::EventMask { + deviceid: XINPUT_ALL_DEVICES, + mask: vec![ + xinput::XIEventMask::HIERARCHY | xinput::XIEventMask::DEVICE_CHANGED, + ], + }], + ), + )?; + + xcb.flush().with_context(|| "X11 Flush failed.")?; + + let renderer = { + let raw_window = RawWindow { + connection: as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection( + xcb, + ) as *mut _, + screen_id: x_screen_index, + window_id: x_window, + visual_id: visual.id, + }; + let config = BladeSurfaceConfig { + // Note: this has to be done after the GPU init, or otherwise + // the sizes are immediately invalidated. + size: query_render_extent(xcb, x_window)?, + // We set it to transparent by default, even if we have client-side + // decorations, since those seem to work on X11 even without `true` here. + // If the window appearance changes, then the renderer will get updated + // too + transparent: false, + }; + BladeRenderer::new(gpu_context, &raw_window, config)? + }; + + check_reply(|| "X11 MapWindow failed.", xcb.map_window(x_window))?; + let display = Rc::new(X11Display::new(xcb, scale_factor, x_screen_index)?); + + Ok(Self { + client, + executor, + display, + x_root_window: visual_set.root, + bounds: bounds.to_pixels(scale_factor), + scale_factor, + renderer, + atoms: *atoms, + input_handler: None, + active: false, + hovered: false, + fullscreen: false, + maximized_vertical: false, + maximized_horizontal: false, + hidden: false, + appearance, + handle, + background_appearance: WindowBackgroundAppearance::Opaque, + destroyed: false, + client_side_decorations_supported, + decorations: WindowDecorations::Server, + last_insets: [0, 0, 0, 0], + edge_constraints: None, + counter_id: sync_request_counter, + last_sync_counter: None, + }) + }); + + if setup_result.is_err() { + check_reply( + || "X11 DestroyWindow failed while cleaning it up after setup failure.", + xcb.destroy_window(x_window), + )?; + xcb.flush() + .with_context(|| "X11 Flush failed while cleaning it up after setup failure.")?; } - xcb_connection - .change_property32( - xproto::PropMode::REPLACE, - x_window, - atoms.WM_PROTOCOLS, - xproto::AtomEnum::ATOM, - &[atoms.WM_DELETE_WINDOW, atoms._NET_WM_SYNC_REQUEST], - ) - .unwrap(); - - sync::initialize(xcb_connection, 3, 1).unwrap(); - let sync_request_counter = xcb_connection.generate_id().unwrap(); - sync::create_counter( - xcb_connection, - sync_request_counter, - sync::Int64 { lo: 0, hi: 0 }, - ) - .unwrap(); - - xcb_connection - .change_property32( - xproto::PropMode::REPLACE, - x_window, - atoms._NET_WM_SYNC_REQUEST_COUNTER, - xproto::AtomEnum::CARDINAL, - &[sync_request_counter], - ) - .unwrap(); - - xcb_connection - .xinput_xi_select_events( - x_window, - &[xinput::EventMask { - deviceid: XINPUT_ALL_DEVICE_GROUPS, - mask: vec![ - xinput::XIEventMask::MOTION - | xinput::XIEventMask::BUTTON_PRESS - | xinput::XIEventMask::BUTTON_RELEASE - | xinput::XIEventMask::ENTER - | xinput::XIEventMask::LEAVE, - ], - }], - ) - .unwrap(); - - xcb_connection - .xinput_xi_select_events( - x_window, - &[xinput::EventMask { - deviceid: XINPUT_ALL_DEVICES, - mask: vec![ - xinput::XIEventMask::HIERARCHY, - xinput::XIEventMask::DEVICE_CHANGED, - ], - }], - ) - .unwrap(); - - xcb_connection.flush().unwrap(); - - let raw = RawWindow { - connection: as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection( - xcb_connection, - ) as *mut _, - screen_id: x_screen_index, - window_id: x_window, - visual_id: visual.id, - }; - let gpu = Arc::new( - unsafe { - gpu::Context::init_windowed( - &raw, - gpu::ContextDesc { - validation: false, - capture: false, - overlay: false, - }, - ) - } - .map_err(|e| anyhow::anyhow!("{:?}", e))?, - ); - - let config = BladeSurfaceConfig { - // Note: this has to be done after the GPU init, or otherwise - // the sizes are immediately invalidated. - size: query_render_extent(xcb_connection, x_window), - // We set it to transparent by default, even if we have client-side - // decorations, since those seem to work on X11 even without `true` here. - // If the window appearance changes, then the renderer will get updated - // too - transparent: false, - }; - xcb_connection.map_window(x_window).unwrap(); - - Ok(Self { - client, - executor, - display: Rc::new( - X11Display::new(xcb_connection, scale_factor, x_screen_index).unwrap(), - ), - _raw: raw, - x_root_window: visual_set.root, - bounds: bounds.to_pixels(scale_factor), - scale_factor, - renderer: BladeRenderer::new(gpu, config), - atoms: *atoms, - input_handler: None, - active: false, - hovered: false, - fullscreen: false, - maximized_vertical: false, - maximized_horizontal: false, - hidden: false, - appearance, - handle, - background_appearance: WindowBackgroundAppearance::Opaque, - destroyed: false, - client_side_decorations_supported, - decorations: WindowDecorations::Server, - last_insets: [0, 0, 0, 0], - edge_constraints: None, - counter_id: sync_request_counter, - last_sync_counter: None, - }) + setup_result } fn content_size(&self) -> Size { @@ -577,6 +643,28 @@ impl X11WindowState { } } +/// A handle to an X11 window which destroys it on Drop. +pub struct X11WindowHandle { + id: xproto::Window, + xcb: Rc, +} + +impl Drop for X11WindowHandle { + fn drop(&mut self) { + maybe!({ + check_reply( + || "X11 DestroyWindow failed while dropping X11WindowHandle.", + self.xcb.destroy_window(self.id), + )?; + self.xcb + .flush() + .with_context(|| "X11 Flush failed while dropping X11WindowHandle.")?; + anyhow::Ok(()) + }) + .log_err(); + } +} + pub(crate) struct X11Window(pub X11WindowStatePtr); impl Drop for X11Window { @@ -585,13 +673,17 @@ impl Drop for X11Window { state.renderer.destroy(); let destroy_x_window = maybe!({ - self.0.xcb_connection.unmap_window(self.0.x_window)?; - self.0.xcb_connection.destroy_window(self.0.x_window)?; - self.0.xcb_connection.flush()?; + check_reply( + || "X11 DestroyWindow failure.", + self.0.xcb.destroy_window(self.0.x_window), + )?; + self.0 + .xcb + .flush() + .with_context(|| "X11 Flush failed after calling DestroyWindow.")?; anyhow::Ok(()) }) - .context("unmapping and destroying X11 window") .log_err(); if destroy_x_window.is_some() { @@ -626,8 +718,9 @@ impl X11Window { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, + gpu_context: &BladeContext, params: WindowParams, - xcb_connection: &Rc, + xcb: &Rc, client_side_decorations_supported: bool, x_main_screen_index: usize, x_window: xproto::Window, @@ -640,8 +733,9 @@ impl X11Window { handle, client, executor, + gpu_context, params, - xcb_connection, + xcb, client_side_decorations_supported, x_main_screen_index, x_window, @@ -650,17 +744,23 @@ impl X11Window { appearance, )?)), callbacks: Rc::new(RefCell::new(Callbacks::default())), - xcb_connection: xcb_connection.clone(), + xcb: xcb.clone(), x_window, }; let state = ptr.state.borrow_mut(); - ptr.set_wm_properties(state); + ptr.set_wm_properties(state)?; Ok(Self(ptr)) } - fn set_wm_hints(&self, wm_hint_property_state: WmHintPropertyState, prop1: u32, prop2: u32) { + fn set_wm_hints C>( + &self, + failure_context: F, + wm_hint_property_state: WmHintPropertyState, + prop1: u32, + prop2: u32, + ) -> anyhow::Result<()> { let state = self.0.state.borrow(); let message = ClientMessageEvent::new( 32, @@ -668,51 +768,45 @@ impl X11Window { state.atoms._NET_WM_STATE, [wm_hint_property_state as u32, prop1, prop2, 1, 0], ); - self.0 - .xcb_connection - .send_event( + check_reply( + failure_context, + self.0.xcb.send_event( false, state.x_root_window, EventMask::SUBSTRUCTURE_REDIRECT | EventMask::SUBSTRUCTURE_NOTIFY, message, - ) - .unwrap() - .check() - .unwrap(); + ), + ) } - fn get_root_position(&self, position: Point) -> TranslateCoordinatesReply { + fn get_root_position( + &self, + position: Point, + ) -> anyhow::Result { let state = self.0.state.borrow(); - self.0 - .xcb_connection - .translate_coordinates( + get_reply( + || "X11 TranslateCoordinates failed.", + self.0.xcb.translate_coordinates( self.0.x_window, state.x_root_window, (position.x.0 * state.scale_factor) as i16, (position.y.0 * state.scale_factor) as i16, - ) - .unwrap() - .reply() - .unwrap() + ), + ) } - fn send_moveresize(&self, flag: u32) { + fn send_moveresize(&self, flag: u32) -> anyhow::Result<()> { let state = self.0.state.borrow(); - self.0 - .xcb_connection - .ungrab_pointer(x11rb::CURRENT_TIME) - .unwrap() - .check() - .unwrap(); + check_reply( + || "X11 UngrabPointer before move/resize of window ailed.", + self.0.xcb.ungrab_pointer(x11rb::CURRENT_TIME), + )?; - let pointer = self - .0 - .xcb_connection - .query_pointer(self.0.x_window) - .unwrap() - .reply() - .unwrap(); + let pointer = get_reply( + || "X11 QueryPointer before move/resize of window failed.", + self.0.xcb.query_pointer(self.0.x_window), + )?; let message = ClientMessageEvent::new( 32, self.0.x_window, @@ -725,17 +819,21 @@ impl X11Window { 0, ], ); - self.0 - .xcb_connection - .send_event( + check_reply( + || "X11 SendEvent to move/resize window failed.", + self.0.xcb.send_event( false, state.x_root_window, EventMask::SUBSTRUCTURE_REDIRECT | EventMask::SUBSTRUCTURE_NOTIFY, message, - ) - .unwrap(); + ), + )?; - self.0.xcb_connection.flush().unwrap(); + self.flush() + } + + fn flush(&self) -> anyhow::Result<()> { + self.0.xcb.flush().with_context(|| "X11 Flush failed.") } } @@ -751,51 +849,56 @@ impl X11WindowStatePtr { } } - pub fn property_notify(&self, event: xproto::PropertyNotifyEvent) { + pub fn property_notify(&self, event: xproto::PropertyNotifyEvent) -> anyhow::Result<()> { let mut state = self.state.borrow_mut(); if event.atom == state.atoms._NET_WM_STATE { - self.set_wm_properties(state); + self.set_wm_properties(state)?; } else if event.atom == state.atoms._GTK_EDGE_CONSTRAINTS { - self.set_edge_constraints(state); + self.set_edge_constraints(state)?; } + Ok(()) } - fn set_edge_constraints(&self, mut state: std::cell::RefMut) { - let reply = self - .xcb_connection - .get_property( + fn set_edge_constraints( + &self, + mut state: std::cell::RefMut, + ) -> anyhow::Result<()> { + let reply = get_reply( + || "X11 GetProperty for _GTK_EDGE_CONSTRAINTS failed.", + self.xcb.get_property( false, self.x_window, state.atoms._GTK_EDGE_CONSTRAINTS, xproto::AtomEnum::CARDINAL, 0, 4, - ) - .unwrap() - .reply() - .unwrap(); + ), + )?; if reply.value_len != 0 { let atom = u32::from_ne_bytes(reply.value[0..4].try_into().unwrap()); let edge_constraints = EdgeConstraints::from_atom(atom); state.edge_constraints.replace(edge_constraints); } + + Ok(()) } - fn set_wm_properties(&self, mut state: std::cell::RefMut) { - let reply = self - .xcb_connection - .get_property( + fn set_wm_properties( + &self, + mut state: std::cell::RefMut, + ) -> anyhow::Result<()> { + let reply = get_reply( + || "X11 GetProperty for _NET_WM_STATE failed.", + self.xcb.get_property( false, self.x_window, state.atoms._NET_WM_STATE, xproto::AtomEnum::ATOM, 0, u32::MAX, - ) - .unwrap() - .reply() - .unwrap(); + ), + )?; let atoms = reply .value @@ -821,6 +924,8 @@ impl X11WindowStatePtr { state.hidden = true; } } + + Ok(()) } pub fn close(&self) { @@ -846,9 +951,9 @@ impl X11WindowStatePtr { if let PlatformInput::KeyDown(event) = input { let mut state = self.state.borrow_mut(); if let Some(mut input_handler) = state.input_handler.take() { - if let Some(ime_key) = &event.keystroke.ime_key { + if let Some(key_char) = &event.keystroke.key_char { drop(state); - input_handler.replace_text_in_range(None, ime_key); + input_handler.replace_text_in_range(None, key_char); state = self.state.borrow_mut(); } state.input_handler = Some(input_handler); @@ -912,7 +1017,7 @@ impl X11WindowStatePtr { bounds } - pub fn configure(&self, bounds: Bounds) { + pub fn configure(&self, bounds: Bounds) -> anyhow::Result<()> { let mut resize_args = None; let is_resize; { @@ -930,7 +1035,7 @@ impl X11WindowStatePtr { state.bounds = bounds; } - let gpu_size = query_render_extent(&self.xcb_connection, self.x_window); + let gpu_size = query_render_extent(&self.xcb, self.x_window)?; if true { state.renderer.update_drawable_size(size( DevicePixels(gpu_size.width as i32), @@ -939,7 +1044,10 @@ impl X11WindowStatePtr { resize_args = Some((state.content_size(), state.scale_factor)); } if let Some(value) = state.last_sync_counter.take() { - sync::set_counter(&self.xcb_connection, state.counter_id, value).unwrap(); + check_reply( + || "X11 sync SetCounter failed.", + sync::set_counter(&self.xcb, state.counter_id, value), + )?; } } @@ -951,9 +1059,11 @@ impl X11WindowStatePtr { } if !is_resize { if let Some(ref mut fun) = callbacks.moved { - fun() + fun(); } } + + Ok(()) } pub fn set_active(&self, focus: bool) { @@ -1003,6 +1113,30 @@ impl PlatformWindow for X11Window { } } + fn inner_window_bounds(&self) -> WindowBounds { + let state = self.0.state.borrow(); + if self.is_maximized() { + WindowBounds::Maximized(state.bounds) + } else { + let mut bounds = state.bounds; + let [left, right, top, bottom] = state.last_insets; + + let [left, right, top, bottom] = [ + Pixels((left as f32) / state.scale_factor), + Pixels((right as f32) / state.scale_factor), + Pixels((top as f32) / state.scale_factor), + Pixels((bottom as f32) / state.scale_factor), + ]; + + bounds.origin.x += left; + bounds.origin.y += top; + bounds.size.width -= left + right; + bounds.size.height -= top + bottom; + + WindowBounds::Windowed(bounds) + } + } + fn content_size(&self) -> Size { // We divide by the scale factor here because this value is queried to determine how much to draw, // but it will be multiplied later by the scale to adjust for scaling. @@ -1025,13 +1159,11 @@ impl PlatformWindow for X11Window { } fn mouse_position(&self) -> Point { - let reply = self - .0 - .xcb_connection - .query_pointer(self.0.x_window) - .unwrap() - .reply() - .unwrap(); + let reply = get_reply( + || "X11 QueryPointer failed.", + self.0.xcb.query_pointer(self.0.x_window), + ) + .unwrap(); Point::new((reply.root_x as u32).into(), (reply.root_y as u32).into()) } @@ -1073,7 +1205,7 @@ impl PlatformWindow for X11Window { data, ); self.0 - .xcb_connection + .xcb .send_event( false, self.0.state.borrow().x_root_window, @@ -1082,14 +1214,14 @@ impl PlatformWindow for X11Window { ) .log_err(); self.0 - .xcb_connection + .xcb .set_input_focus( xproto::InputFocus::POINTER_ROOT, self.0.x_window, xproto::Time::CURRENT_TIME, ) .log_err(); - self.0.xcb_connection.flush().unwrap(); + self.flush().unwrap(); } fn is_active(&self) -> bool { @@ -1101,28 +1233,30 @@ impl PlatformWindow for X11Window { } fn set_title(&mut self, title: &str) { - self.0 - .xcb_connection - .change_property8( + check_reply( + || "X11 ChangeProperty8 on WM_NAME failed.", + self.0.xcb.change_property8( xproto::PropMode::REPLACE, self.0.x_window, xproto::AtomEnum::WM_NAME, xproto::AtomEnum::STRING, title.as_bytes(), - ) - .unwrap(); + ), + ) + .log_err(); - self.0 - .xcb_connection - .change_property8( + check_reply( + || "X11 ChangeProperty8 on _NET_WM_NAME failed.", + self.0.xcb.change_property8( xproto::PropMode::REPLACE, self.0.x_window, self.0.state.borrow().atoms._NET_WM_NAME, self.0.state.borrow().atoms.UTF8_STRING, title.as_bytes(), - ) - .unwrap(); - self.0.xcb_connection.flush().unwrap(); + ), + ) + .log_err(); + self.flush().log_err(); } fn set_app_id(&mut self, app_id: &str) { @@ -1131,18 +1265,17 @@ impl PlatformWindow for X11Window { data.push(b'\0'); data.extend(app_id.bytes()); // class - self.0 - .xcb_connection - .change_property8( + check_reply( + || "X11 ChangeProperty8 for WM_CLASS failed.", + self.0.xcb.change_property8( xproto::PropMode::REPLACE, self.0.x_window, xproto::AtomEnum::WM_CLASS, xproto::AtomEnum::STRING, &data, - ) - .unwrap() - .check() - .unwrap(); + ), + ) + .unwrap(); } fn set_edited(&mut self, _edited: bool) { @@ -1169,35 +1302,38 @@ impl PlatformWindow for X11Window { state.atoms.WM_CHANGE_STATE, [WINDOW_ICONIC_STATE, 0, 0, 0, 0], ); - self.0 - .xcb_connection - .send_event( + check_reply( + || "X11 SendEvent to minimize window failed.", + self.0.xcb.send_event( false, state.x_root_window, EventMask::SUBSTRUCTURE_REDIRECT | EventMask::SUBSTRUCTURE_NOTIFY, message, - ) - .unwrap() - .check() - .unwrap(); + ), + ) + .unwrap(); } fn zoom(&self) { let state = self.0.state.borrow(); self.set_wm_hints( + || "X11 SendEvent to maximize a window failed.", WmHintPropertyState::Toggle, state.atoms._NET_WM_STATE_MAXIMIZED_VERT, state.atoms._NET_WM_STATE_MAXIMIZED_HORZ, - ); + ) + .unwrap(); } fn toggle_fullscreen(&self) { let state = self.0.state.borrow(); self.set_wm_hints( + || "X11 SendEvent to fullscreen a window failed.", WmHintPropertyState::Toggle, state.atoms._NET_WM_STATE_FULLSCREEN, xproto::AtomEnum::NONE.into(), - ); + ) + .unwrap(); } fn is_fullscreen(&self) -> bool { @@ -1253,14 +1389,13 @@ impl PlatformWindow for X11Window { fn show_window_menu(&self, position: Point) { let state = self.0.state.borrow(); - self.0 - .xcb_connection - .ungrab_pointer(x11rb::CURRENT_TIME) - .unwrap() - .check() - .unwrap(); + check_reply( + || "X11 UngrabPointer failed.", + self.0.xcb.ungrab_pointer(x11rb::CURRENT_TIME), + ) + .unwrap(); - let coords = self.get_root_position(position); + let coords = self.get_root_position(position).unwrap(); let message = ClientMessageEvent::new( 32, self.0.x_window, @@ -1273,26 +1408,25 @@ impl PlatformWindow for X11Window { 0, ], ); - self.0 - .xcb_connection - .send_event( + check_reply( + || "X11 SendEvent to show window menu failed.", + self.0.xcb.send_event( false, state.x_root_window, EventMask::SUBSTRUCTURE_REDIRECT | EventMask::SUBSTRUCTURE_NOTIFY, message, - ) - .unwrap() - .check() - .unwrap(); + ), + ) + .unwrap(); } fn start_window_move(&self) { const MOVERESIZE_MOVE: u32 = 8; - self.send_moveresize(MOVERESIZE_MOVE); + self.send_moveresize(MOVERESIZE_MOVE).unwrap(); } fn start_window_resize(&self, edge: ResizeEdge) { - self.send_moveresize(edge.to_moveresize()); + self.send_moveresize(edge.to_moveresize()).unwrap(); } fn window_decorations(&self) -> crate::Decorations { @@ -1355,9 +1489,9 @@ impl PlatformWindow for X11Window { if state.last_insets != insets { state.last_insets = insets; - self.0 - .xcb_connection - .change_property( + check_reply( + || "X11 ChangeProperty for _GTK_FRAME_EXTENTS failed.", + self.0.xcb.change_property( xproto::PropMode::REPLACE, self.0.x_window, state.atoms._GTK_FRAME_EXTENTS, @@ -1365,10 +1499,9 @@ impl PlatformWindow for X11Window { size_of::() as u8 * 8, 4, bytemuck::cast_slice::(&insets), - ) - .unwrap() - .check() - .unwrap(); + ), + ) + .unwrap(); } } @@ -1390,20 +1523,19 @@ impl PlatformWindow for X11Window { WindowDecorations::Client => [1 << 1, 0, 0, 0, 0], }; - self.0 - .xcb_connection - .change_property( + check_reply( + || "X11 ChangeProperty for _MOTIF_WM_HINTS failed.", + self.0.xcb.change_property( xproto::PropMode::REPLACE, self.0.x_window, state.atoms._MOTIF_WM_HINTS, state.atoms._MOTIF_WM_HINTS, - std::mem::size_of::() as u8 * 8, + size_of::() as u8 * 8, 5, bytemuck::cast_slice::(&hints_data), - ) - .unwrap() - .check() - .unwrap(); + ), + ) + .unwrap(); match decorations { WindowDecorations::Server => { @@ -1432,7 +1564,7 @@ impl PlatformWindow for X11Window { client.update_ime_position(bounds); } - fn gpu_specs(&self) -> Option { + fn gpu_specs(&self) -> Option { self.0.state.borrow().renderer.gpu_specs().into() } } diff --git a/crates/gpui/src/platform/linux/xdg_desktop_portal.rs b/crates/gpui/src/platform/linux/xdg_desktop_portal.rs index 64aa3975b8..722947a299 100644 --- a/crates/gpui/src/platform/linux/xdg_desktop_portal.rs +++ b/crates/gpui/src/platform/linux/xdg_desktop_portal.rs @@ -42,11 +42,13 @@ impl XDPEventSource { { sender.send(Event::CursorTheme(initial_theme))?; } + + // If u32 is used here, it throws invalid type error if let Ok(initial_size) = settings - .read::("org.gnome.desktop.interface", "cursor-size") + .read::("org.gnome.desktop.interface", "cursor-size") .await { - sender.send(Event::CursorSize(initial_size))?; + sender.send(Event::CursorSize(initial_size as u32))?; } if let Ok(mut cursor_theme_changed) = settings @@ -69,7 +71,7 @@ impl XDPEventSource { } if let Ok(mut cursor_size_changed) = settings - .receive_setting_changed_with_args::( + .receive_setting_changed_with_args::( "org.gnome.desktop.interface", "cursor-size", ) @@ -80,7 +82,7 @@ impl XDPEventSource { .spawn(async move { while let Some(size) = cursor_size_changed.next().await { let size = size?; - sender.send(Event::CursorSize(size))?; + sender.send(Event::CursorSize(size as u32))?; } anyhow::Ok(()) }) diff --git a/crates/gpui/src/platform/mac.rs b/crates/gpui/src/platform/mac.rs index 396fd49d04..bd3d8f35ac 100644 --- a/crates/gpui/src/platform/mac.rs +++ b/crates/gpui/src/platform/mac.rs @@ -4,12 +4,14 @@ mod dispatcher; mod display; mod display_link; mod events; +mod screen_capture; #[cfg(not(feature = "macos-blade"))] mod metal_atlas; #[cfg(not(feature = "macos-blade"))] pub mod metal_renderer; +use media::core_video::CVImageBuffer; #[cfg(not(feature = "macos-blade"))] use metal_renderer as renderer; @@ -49,6 +51,9 @@ pub(crate) use window::*; #[cfg(feature = "font-kit")] pub(crate) use text_system::*; +/// A frame of video captured from a screen. +pub(crate) type PlatformScreenCaptureFrame = CVImageBuffer; + trait BoolExt { fn to_objc(self) -> BOOL; } diff --git a/crates/gpui/src/platform/mac/events.rs b/crates/gpui/src/platform/mac/events.rs index aeff08ada8..56256aeffd 100644 --- a/crates/gpui/src/platform/mac/events.rs +++ b/crates/gpui/src/platform/mac/events.rs @@ -245,7 +245,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { .charactersIgnoringModifiers() .to_str() .to_string(); - let mut ime_key = None; + let mut key_char = None; let first_char = characters.chars().next().map(|ch| ch as u16); let modifiers = native_event.modifierFlags(); @@ -260,11 +260,20 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { #[allow(non_upper_case_globals)] let key = match first_char { - Some(SPACE_KEY) => "space".to_string(), + Some(SPACE_KEY) => { + key_char = Some(" ".to_string()); + "space".to_string() + } + Some(TAB_KEY) => { + key_char = Some("\t".to_string()); + "tab".to_string() + } + Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => { + key_char = Some("\n".to_string()); + "enter".to_string() + } Some(BACKSPACE_KEY) => "backspace".to_string(), - Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter".to_string(), Some(ESCAPE_KEY) => "escape".to_string(), - Some(TAB_KEY) => "tab".to_string(), Some(SHIFT_TAB_KEY) => "tab".to_string(), Some(NSUpArrowFunctionKey) => "up".to_string(), Some(NSDownArrowFunctionKey) => "down".to_string(), @@ -313,7 +322,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { let mut chars_with_shift = chars_for_modified_key(native_event.keyCode(), SHIFT_MOD); let always_use_cmd_layout = always_use_command_layout(); - // Handle Dvorak+QWERTY / Russian / Armeniam + // Handle Dvorak+QWERTY / Russian / Armenian if command || always_use_cmd_layout { let chars_with_cmd = chars_for_modified_key(native_event.keyCode(), CMD_MOD); let chars_with_both = @@ -332,6 +341,18 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { chars_ignoring_modifiers = chars_with_cmd; } + if !control && !command && !function { + let mut mods = NO_MOD; + if shift { + mods |= SHIFT_MOD; + } + if alt { + mods |= OPTION_MOD; + } + + key_char = Some(chars_for_modified_key(native_event.keyCode(), mods)); + } + let mut key = if shift && chars_ignoring_modifiers .chars() @@ -345,20 +366,6 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { chars_ignoring_modifiers }; - if always_use_cmd_layout || alt { - let mut mods = NO_MOD; - if shift { - mods |= SHIFT_MOD; - } - if alt { - mods |= OPTION_MOD; - } - let alt_key = chars_for_modified_key(native_event.keyCode(), mods); - if alt_key != key { - ime_key = Some(alt_key); - } - }; - key } }; @@ -372,7 +379,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { function, }, key, - ime_key, + key_char, } } diff --git a/crates/gpui/src/platform/mac/metal_atlas.rs b/crates/gpui/src/platform/mac/metal_atlas.rs index 89a6987752..ca595c5ce3 100644 --- a/crates/gpui/src/platform/mac/metal_atlas.rs +++ b/crates/gpui/src/platform/mac/metal_atlas.rs @@ -1,6 +1,6 @@ use crate::{ - AtlasKey, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, DevicePixels, PlatformAtlas, - Point, Size, + platform::AtlasTextureList, AtlasKey, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, + DevicePixels, PlatformAtlas, Point, Size, }; use anyhow::{anyhow, Result}; use collections::FxHashMap; @@ -42,7 +42,7 @@ impl MetalAtlas { AtlasTextureKind::Polychrome => &mut lock.polychrome_textures, AtlasTextureKind::Path => &mut lock.path_textures, }; - for texture in textures { + for texture in textures.iter_mut() { texture.clear(); } } @@ -50,9 +50,9 @@ impl MetalAtlas { struct MetalAtlasState { device: AssertSend, - monochrome_textures: Vec, - polychrome_textures: Vec, - path_textures: Vec, + monochrome_textures: AtlasTextureList, + polychrome_textures: AtlasTextureList, + path_textures: AtlasTextureList, tiles_by_key: FxHashMap, } @@ -78,6 +78,38 @@ impl PlatformAtlas for MetalAtlas { Ok(Some(tile)) } } + + fn remove(&self, key: &AtlasKey) { + let mut lock = self.0.lock(); + let Some(id) = lock.tiles_by_key.get(key).map(|v| v.texture_id) else { + return; + }; + + let textures = match id.kind { + AtlasTextureKind::Monochrome => &mut lock.monochrome_textures, + AtlasTextureKind::Polychrome => &mut lock.polychrome_textures, + AtlasTextureKind::Path => &mut lock.polychrome_textures, + }; + + let Some(texture_slot) = textures + .textures + .iter_mut() + .find(|texture| texture.as_ref().is_some_and(|v| v.id == id)) + else { + return; + }; + + if let Some(mut texture) = texture_slot.take() { + texture.decrement_ref_count(); + + if texture.is_unreferenced() { + textures.free_list.push(id.index as usize); + lock.tiles_by_key.remove(key); + } else { + *texture_slot = Some(texture); + } + } + } } impl MetalAtlasState { @@ -86,20 +118,24 @@ impl MetalAtlasState { size: Size, texture_kind: AtlasTextureKind, ) -> Option { - let textures = match texture_kind { - AtlasTextureKind::Monochrome => &mut self.monochrome_textures, - AtlasTextureKind::Polychrome => &mut self.polychrome_textures, - AtlasTextureKind::Path => &mut self.path_textures, - }; + { + let textures = match texture_kind { + AtlasTextureKind::Monochrome => &mut self.monochrome_textures, + AtlasTextureKind::Polychrome => &mut self.polychrome_textures, + AtlasTextureKind::Path => &mut self.path_textures, + }; - textures - .iter_mut() - .rev() - .find_map(|texture| texture.allocate(size)) - .or_else(|| { - let texture = self.push_texture(size, texture_kind); - texture.allocate(size) - }) + if let Some(tile) = textures + .iter_mut() + .rev() + .find_map(|texture| texture.allocate(size)) + { + return Some(tile); + } + } + + let texture = self.push_texture(size, texture_kind); + texture.allocate(size) } fn push_texture( @@ -140,21 +176,31 @@ impl MetalAtlasState { texture_descriptor.set_usage(usage); let metal_texture = self.device.new_texture(&texture_descriptor); - let textures = match kind { + let texture_list = match kind { AtlasTextureKind::Monochrome => &mut self.monochrome_textures, AtlasTextureKind::Polychrome => &mut self.polychrome_textures, AtlasTextureKind::Path => &mut self.path_textures, }; + + let index = texture_list.free_list.pop(); + let atlas_texture = MetalAtlasTexture { id: AtlasTextureId { - index: textures.len() as u32, + index: index.unwrap_or(texture_list.textures.len()) as u32, kind, }, allocator: etagere::BucketedAtlasAllocator::new(size.into()), metal_texture: AssertSend(metal_texture), + live_atlas_keys: 0, }; - textures.push(atlas_texture); - textures.last_mut().unwrap() + + if let Some(ix) = index { + texture_list.textures[ix] = Some(atlas_texture); + texture_list.textures.get_mut(ix).unwrap().as_mut().unwrap() + } else { + texture_list.textures.push(Some(atlas_texture)); + texture_list.textures.last_mut().unwrap().as_mut().unwrap() + } } fn texture(&self, id: AtlasTextureId) -> &MetalAtlasTexture { @@ -163,7 +209,7 @@ impl MetalAtlasState { crate::AtlasTextureKind::Polychrome => &self.polychrome_textures, crate::AtlasTextureKind::Path => &self.path_textures, }; - &textures[id.index as usize] + textures[id.index as usize].as_ref().unwrap() } } @@ -171,6 +217,7 @@ struct MetalAtlasTexture { id: AtlasTextureId, allocator: BucketedAtlasAllocator, metal_texture: AssertSend, + live_atlas_keys: u32, } impl MetalAtlasTexture { @@ -189,6 +236,7 @@ impl MetalAtlasTexture { }, padding: 0, }; + self.live_atlas_keys += 1; Some(tile) } @@ -215,6 +263,14 @@ impl MetalAtlasTexture { _ => unimplemented!(), } } + + fn decrement_ref_count(&mut self) { + self.live_atlas_keys -= 1; + } + + fn is_unreferenced(&mut self) -> bool { + self.live_atlas_keys == 0 + } } impl From> for etagere::Size { diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index f42a2e2df7..c290d12f7e 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -1,7 +1,7 @@ use super::metal_atlas::MetalAtlas; use crate::{ - point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels, - Hsla, MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite, + point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Background, Bounds, ContentMask, + DevicePixels, MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline, }; use anyhow::{anyhow, Result}; @@ -1242,7 +1242,7 @@ enum PathRasterizationInputIndex { #[repr(C)] pub struct PathSprite { pub bounds: Bounds, - pub color: Hsla, + pub color: Background, pub tile: AtlasTile, } diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index b744c658ce..da645750a5 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -1,16 +1,16 @@ use super::{ attributed_string::{NSAttributedString, NSMutableAttributedString}, events::key_to_native, - BoolExt, + renderer, screen_capture, BoolExt, }; use crate::{ hash, Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem, ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, Keymap, MacDispatcher, MacDisplay, MacWindow, Menu, MenuItem, PathPromptOptions, Platform, PlatformDisplay, - PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task, WindowAppearance, - WindowParams, + PlatformTextSystem, PlatformWindow, Result, ScreenCaptureSource, SemanticVersion, Task, + WindowAppearance, WindowParams, }; -use anyhow::anyhow; +use anyhow::{anyhow, Context as _}; use block::ConcreteBlock; use cocoa::{ appkit::{ @@ -57,8 +57,7 @@ use std::{ sync::Arc, }; use strum::IntoEnumIterator; - -use super::renderer; +use util::ResultExt; #[allow(non_upper_case_globals)] const NSUTF8StringEncoding: NSUInteger = 4; @@ -343,8 +342,10 @@ impl MacPlatform { ns_string(key_to_native(&keystroke.key).as_ref()), ) .autorelease(); - let _: () = - msg_send![item, setAllowsAutomaticKeyEquivalentLocalization: NO]; + if MacPlatform::os_version().unwrap() >= SemanticVersion::new(12, 0, 0) { + let _: () = + msg_send![item, setAllowsAutomaticKeyEquivalentLocalization: NO]; + } item.setKeyEquivalentModifierMask_(mask); } // For multi-keystroke bindings, render the keystroke as part of the title. @@ -550,6 +551,12 @@ impl Platform for MacPlatform { .collect() } + fn screen_capture_sources( + &self, + ) -> oneshot::Receiver>>> { + screen_capture::get_sources() + } + fn active_window(&self) -> Option { MacWindow::active_window() } @@ -752,6 +759,10 @@ impl Platform for MacPlatform { done_rx } + fn can_select_mixed_files_and_dirs(&self) -> bool { + true + } + fn reveal_path(&self, path: &Path) { unsafe { let path = path.to_path_buf(); @@ -773,15 +784,16 @@ impl Platform for MacPlatform { } fn open_with_system(&self, path: &Path) { - let path = path.to_path_buf(); + let path = path.to_owned(); self.0 .lock() .background_executor .spawn(async move { - std::process::Command::new("open") + let _ = std::process::Command::new("open") .arg(path) .spawn() - .expect("Failed to open file"); + .context("invoking open command") + .log_err(); }) .detach(); } @@ -842,7 +854,9 @@ impl Platform for MacPlatform { let app: id = msg_send![APP_CLASS, sharedApplication]; let mut state = self.0.lock(); let actions = &mut state.menu_actions; - app.setMainMenu_(self.create_menu_bar(menus, NSWindow::delegate(app), actions, keymap)); + let menu = self.create_menu_bar(menus, NSWindow::delegate(app), actions, keymap); + drop(state); + app.setMainMenu_(menu); } } diff --git a/crates/gpui/src/platform/mac/screen_capture.rs b/crates/gpui/src/platform/mac/screen_capture.rs new file mode 100644 index 0000000000..a2b535996f --- /dev/null +++ b/crates/gpui/src/platform/mac/screen_capture.rs @@ -0,0 +1,239 @@ +use crate::{ + platform::{ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream}, + px, size, Pixels, Size, +}; +use anyhow::{anyhow, Result}; +use block::ConcreteBlock; +use cocoa::{ + base::{id, nil, YES}, + foundation::NSArray, +}; +use core_foundation::base::TCFType; +use ctor::ctor; +use futures::channel::oneshot; +use media::core_media::{CMSampleBuffer, CMSampleBufferRef}; +use metal::NSInteger; +use objc::{ + class, + declare::ClassDecl, + msg_send, + runtime::{Class, Object, Sel}, + sel, sel_impl, +}; +use std::{cell::RefCell, ffi::c_void, mem, ptr, rc::Rc}; + +#[derive(Clone)] +pub struct MacScreenCaptureSource { + sc_display: id, +} + +pub struct MacScreenCaptureStream { + sc_stream: id, + sc_stream_output: id, +} + +#[link(name = "ScreenCaptureKit", kind = "framework")] +extern "C" {} + +static mut DELEGATE_CLASS: *const Class = ptr::null(); +static mut OUTPUT_CLASS: *const Class = ptr::null(); +const FRAME_CALLBACK_IVAR: &str = "frame_callback"; + +#[allow(non_upper_case_globals)] +const SCStreamOutputTypeScreen: NSInteger = 0; + +impl ScreenCaptureSource for MacScreenCaptureSource { + fn resolution(&self) -> Result> { + unsafe { + let width: i64 = msg_send![self.sc_display, width]; + let height: i64 = msg_send![self.sc_display, height]; + Ok(size(px(width as f32), px(height as f32))) + } + } + + fn stream( + &self, + frame_callback: Box, + ) -> oneshot::Receiver>> { + unsafe { + let stream: id = msg_send![class!(SCStream), alloc]; + let filter: id = msg_send![class!(SCContentFilter), alloc]; + let configuration: id = msg_send![class!(SCStreamConfiguration), alloc]; + let delegate: id = msg_send![DELEGATE_CLASS, alloc]; + let output: id = msg_send![OUTPUT_CLASS, alloc]; + + let excluded_windows = NSArray::array(nil); + let filter: id = msg_send![filter, initWithDisplay:self.sc_display excludingWindows:excluded_windows]; + let configuration: id = msg_send![configuration, init]; + let delegate: id = msg_send![delegate, init]; + let output: id = msg_send![output, init]; + + output.as_mut().unwrap().set_ivar( + FRAME_CALLBACK_IVAR, + Box::into_raw(Box::new(frame_callback)) as *mut c_void, + ); + + let stream: id = msg_send![stream, initWithFilter:filter configuration:configuration delegate:delegate]; + + let (mut tx, rx) = oneshot::channel(); + + let mut error: id = nil; + let _: () = msg_send![stream, addStreamOutput:output type:SCStreamOutputTypeScreen sampleHandlerQueue:0 error:&mut error as *mut id]; + if error != nil { + let message: id = msg_send![error, localizedDescription]; + tx.send(Err(anyhow!("failed to add stream output {message:?}"))) + .ok(); + return rx; + } + + let tx = Rc::new(RefCell::new(Some(tx))); + let handler = ConcreteBlock::new({ + move |error: id| { + let result = if error == nil { + let stream = MacScreenCaptureStream { + sc_stream: stream, + sc_stream_output: output, + }; + Ok(Box::new(stream) as Box) + } else { + let message: id = msg_send![error, localizedDescription]; + Err(anyhow!("failed to stop screen capture stream {message:?}")) + }; + if let Some(tx) = tx.borrow_mut().take() { + tx.send(result).ok(); + } + } + }); + let handler = handler.copy(); + let _: () = msg_send![stream, startCaptureWithCompletionHandler:handler]; + rx + } + } +} + +impl Drop for MacScreenCaptureSource { + fn drop(&mut self) { + unsafe { + let _: () = msg_send![self.sc_display, release]; + } + } +} + +impl ScreenCaptureStream for MacScreenCaptureStream {} + +impl Drop for MacScreenCaptureStream { + fn drop(&mut self) { + unsafe { + let mut error: id = nil; + let _: () = msg_send![self.sc_stream, removeStreamOutput:self.sc_stream_output type:SCStreamOutputTypeScreen error:&mut error as *mut _]; + if error != nil { + let message: id = msg_send![error, localizedDescription]; + log::error!("failed to add stream output {message:?}"); + } + + let handler = ConcreteBlock::new(move |error: id| { + if error != nil { + let message: id = msg_send![error, localizedDescription]; + log::error!("failed to stop screen capture stream {message:?}"); + } + }); + let block = handler.copy(); + let _: () = msg_send![self.sc_stream, stopCaptureWithCompletionHandler:block]; + let _: () = msg_send![self.sc_stream, release]; + let _: () = msg_send![self.sc_stream_output, release]; + } + } +} + +pub(crate) fn get_sources() -> oneshot::Receiver>>> { + unsafe { + let (mut tx, rx) = oneshot::channel(); + let tx = Rc::new(RefCell::new(Some(tx))); + + let block = ConcreteBlock::new(move |shareable_content: id, error: id| { + let Some(mut tx) = tx.borrow_mut().take() else { + return; + }; + let result = if error == nil { + let displays: id = msg_send![shareable_content, displays]; + let mut result = Vec::new(); + for i in 0..displays.count() { + let display = displays.objectAtIndex(i); + let source = MacScreenCaptureSource { + sc_display: msg_send![display, retain], + }; + result.push(Box::new(source) as Box); + } + Ok(result) + } else { + let msg: id = msg_send![error, localizedDescription]; + Err(anyhow!("Failed to register: {:?}", msg)) + }; + tx.send(result).ok(); + }); + let block = block.copy(); + + let _: () = msg_send![ + class!(SCShareableContent), + getShareableContentExcludingDesktopWindows:YES + onScreenWindowsOnly:YES + completionHandler:block]; + rx + } +} + +#[ctor] +unsafe fn build_classes() { + let mut decl = ClassDecl::new("GPUIStreamDelegate", class!(NSObject)).unwrap(); + decl.add_method( + sel!(outputVideoEffectDidStartForStream:), + output_video_effect_did_start_for_stream as extern "C" fn(&Object, Sel, id), + ); + decl.add_method( + sel!(outputVideoEffectDidStopForStream:), + output_video_effect_did_stop_for_stream as extern "C" fn(&Object, Sel, id), + ); + decl.add_method( + sel!(stream:didStopWithError:), + stream_did_stop_with_error as extern "C" fn(&Object, Sel, id, id), + ); + DELEGATE_CLASS = decl.register(); + + let mut decl = ClassDecl::new("GPUIStreamOutput", class!(NSObject)).unwrap(); + decl.add_method( + sel!(stream:didOutputSampleBuffer:ofType:), + stream_did_output_sample_buffer_of_type as extern "C" fn(&Object, Sel, id, id, NSInteger), + ); + decl.add_ivar::<*mut c_void>(FRAME_CALLBACK_IVAR); + + OUTPUT_CLASS = decl.register(); +} + +extern "C" fn output_video_effect_did_start_for_stream(_this: &Object, _: Sel, _stream: id) {} + +extern "C" fn output_video_effect_did_stop_for_stream(_this: &Object, _: Sel, _stream: id) {} + +extern "C" fn stream_did_stop_with_error(_this: &Object, _: Sel, _stream: id, _error: id) {} + +extern "C" fn stream_did_output_sample_buffer_of_type( + this: &Object, + _: Sel, + _stream: id, + sample_buffer: id, + buffer_type: NSInteger, +) { + if buffer_type != SCStreamOutputTypeScreen { + return; + } + + unsafe { + let sample_buffer = sample_buffer as CMSampleBufferRef; + let sample_buffer = CMSampleBuffer::wrap_under_get_rule(sample_buffer); + if let Some(buffer) = sample_buffer.image_buffer() { + let callback: Box> = + Box::from_raw(*this.get_ivar::<*mut c_void>(FRAME_CALLBACK_IVAR) as *mut _); + callback(ScreenCaptureFrame(buffer)); + mem::forget(callback); + } + } +} diff --git a/crates/gpui/src/platform/mac/shaders.metal b/crates/gpui/src/platform/mac/shaders.metal index 464e4b5903..7ee5d63add 100644 --- a/crates/gpui/src/platform/mac/shaders.metal +++ b/crates/gpui/src/platform/mac/shaders.metal @@ -4,6 +4,10 @@ using namespace metal; float4 hsla_to_rgba(Hsla hsla); +float3 srgb_to_linear(float3 color); +float3 linear_to_srgb(float3 color); +float4 srgb_to_oklab(float4 color); +float4 oklab_to_srgb(float4 color); float4 to_device_position(float2 unit_vertex, Bounds_ScaledPixels bounds, constant Size_DevicePixels *viewport_size); float4 to_device_position_transformed(float2 unit_vertex, Bounds_ScaledPixels bounds, @@ -21,20 +25,34 @@ float2 erf(float2 x); float blur_along_x(float x, float y, float sigma, float corner, float2 half_size); float4 over(float4 below, float4 above); +float radians(float degrees); +float4 gradient_color(Background background, float2 position, Bounds_ScaledPixels bounds, + float4 solid_color, float4 color0, float4 color1); + +struct GradientColor { + float4 solid; + float4 color0; + float4 color1; +}; +GradientColor prepare_gradient_color(uint tag, uint color_space, Hsla solid, Hsla color0, Hsla color1); struct QuadVertexOutput { - float4 position [[position]]; - float4 background_color [[flat]]; - float4 border_color [[flat]]; uint quad_id [[flat]]; + float4 position [[position]]; + float4 border_color [[flat]]; + float4 background_solid [[flat]]; + float4 background_color0 [[flat]]; + float4 background_color1 [[flat]]; float clip_distance [[clip_distance]][4]; }; struct QuadFragmentInput { - float4 position [[position]]; - float4 background_color [[flat]]; - float4 border_color [[flat]]; uint quad_id [[flat]]; + float4 position [[position]]; + float4 border_color [[flat]]; + float4 background_solid [[flat]]; + float4 background_color0 [[flat]]; + float4 background_color1 [[flat]]; }; vertex QuadVertexOutput quad_vertex(uint unit_vertex_id [[vertex_id]], @@ -51,13 +69,23 @@ vertex QuadVertexOutput quad_vertex(uint unit_vertex_id [[vertex_id]], to_device_position(unit_vertex, quad.bounds, viewport_size); float4 clip_distance = distance_from_clip_rect(unit_vertex, quad.bounds, quad.content_mask.bounds); - float4 background_color = hsla_to_rgba(quad.background); float4 border_color = hsla_to_rgba(quad.border_color); + + GradientColor gradient = prepare_gradient_color( + quad.background.tag, + quad.background.color_space, + quad.background.solid, + quad.background.colors[0].color, + quad.background.colors[1].color + ); + return QuadVertexOutput{ - device_position, - background_color, - border_color, quad_id, + device_position, + border_color, + gradient.solid, + gradient.color0, + gradient.color1, {clip_distance.x, clip_distance.y, clip_distance.z, clip_distance.w}}; } @@ -65,6 +93,11 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]], constant Quad *quads [[buffer(QuadInputIndex_Quads)]]) { Quad quad = quads[input.quad_id]; + float2 half_size = float2(quad.bounds.size.width, quad.bounds.size.height) / 2.; + float2 center = float2(quad.bounds.origin.x, quad.bounds.origin.y) + half_size; + float2 center_to_point = input.position.xy - center; + float4 color = gradient_color(quad.background, input.position.xy, quad.bounds, + input.background_solid, input.background_color0, input.background_color1); // Fast path when the quad is not rounded and doesn't have any border. if (quad.corner_radii.top_left == 0. && quad.corner_radii.bottom_left == 0. && @@ -72,14 +105,9 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]], quad.corner_radii.bottom_right == 0. && quad.border_widths.top == 0. && quad.border_widths.left == 0. && quad.border_widths.right == 0. && quad.border_widths.bottom == 0.) { - return input.background_color; + return color; } - float2 half_size = - float2(quad.bounds.size.width, quad.bounds.size.height) / 2.; - float2 center = - float2(quad.bounds.origin.x, quad.bounds.origin.y) + half_size; - float2 center_to_point = input.position.xy - center; float corner_radius; if (center_to_point.x < 0.) { if (center_to_point.y < 0.) { @@ -118,15 +146,12 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]], border_width = vertical_border; } - float4 color; - if (border_width == 0.) { - color = input.background_color; - } else { + if (border_width != 0.) { float inset_distance = distance + border_width; // Blend the border on top of the background and then linearly interpolate // between the two as we slide inside the background. - float4 blended_border = over(input.background_color, input.border_color); - color = mix(blended_border, input.background_color, + float4 blended_border = over(color, input.border_color); + color = mix(blended_border, color, saturate(0.5 - inset_distance)); } @@ -202,21 +227,27 @@ fragment float4 shadow_fragment(ShadowFragmentInput input [[stage_in]], } } - // The signal is only non-zero in a limited range, so don't waste samples - float low = point.y - half_size.y; - float high = point.y + half_size.y; - float start = clamp(-3. * shadow.blur_radius, low, high); - float end = clamp(3. * shadow.blur_radius, low, high); + float alpha; + if (shadow.blur_radius == 0.) { + float distance = quad_sdf(input.position.xy, shadow.bounds, shadow.corner_radii); + alpha = saturate(0.5 - distance); + } else { + // The signal is only non-zero in a limited range, so don't waste samples + float low = point.y - half_size.y; + float high = point.y + half_size.y; + float start = clamp(-3. * shadow.blur_radius, low, high); + float end = clamp(3. * shadow.blur_radius, low, high); - // Accumulate samples (we can get away with surprisingly few samples) - float step = (end - start) / 4.; - float y = start + step * 0.5; - float alpha = 0.; - for (int i = 0; i < 4; i++) { - alpha += blur_along_x(point.x, point.y - y, shadow.blur_radius, - corner_radius, half_size) * - gaussian(y, shadow.blur_radius) * step; - y += step; + // Accumulate samples (we can get away with surprisingly few samples) + float step = (end - start) / 4.; + float y = start + step * 0.5; + alpha = 0.; + for (int i = 0; i < 4; i++) { + alpha += blur_along_x(point.x, point.y - y, shadow.blur_radius, + corner_radius, half_size) * + gaussian(y, shadow.blur_radius) * step; + y += step; + } } return input.color * float4(1., 1., 1., alpha); @@ -437,7 +468,10 @@ fragment float4 path_rasterization_fragment(PathRasterizationFragmentInput input struct PathSpriteVertexOutput { float4 position [[position]]; float2 tile_position; - float4 color [[flat]]; + uint sprite_id [[flat]]; + float4 solid_color [[flat]]; + float4 color0 [[flat]]; + float4 color1 [[flat]]; }; vertex PathSpriteVertexOutput path_sprite_vertex( @@ -456,8 +490,23 @@ vertex PathSpriteVertexOutput path_sprite_vertex( float4 device_position = to_device_position(unit_vertex, sprite.bounds, viewport_size); float2 tile_position = to_tile_position(unit_vertex, sprite.tile, atlas_size); - float4 color = hsla_to_rgba(sprite.color); - return PathSpriteVertexOutput{device_position, tile_position, color}; + + GradientColor gradient = prepare_gradient_color( + sprite.color.tag, + sprite.color.color_space, + sprite.color.solid, + sprite.color.colors[0].color, + sprite.color.colors[1].color + ); + + return PathSpriteVertexOutput{ + device_position, + tile_position, + sprite_id, + gradient.solid, + gradient.color0, + gradient.color1 + }; } fragment float4 path_sprite_fragment( @@ -469,7 +518,10 @@ fragment float4 path_sprite_fragment( float4 sample = atlas_texture.sample(atlas_texture_sampler, input.tile_position); float mask = 1. - abs(1. - fmod(sample.r, 2.)); - float4 color = input.color; + PathSprite sprite = sprites[input.sprite_id]; + Background background = sprite.color; + float4 color = gradient_color(background, input.position.xy, sprite.bounds, + input.solid_color, input.color0, input.color1); color.a *= mask; return color; } @@ -574,6 +626,56 @@ float4 hsla_to_rgba(Hsla hsla) { return rgba; } +float3 srgb_to_linear(float3 color) { + return pow(color, float3(2.2)); +} + +float3 linear_to_srgb(float3 color) { + return pow(color, float3(1.0 / 2.2)); +} + +// Converts a sRGB color to the Oklab color space. +// Reference: https://bottosson.github.io/posts/oklab/#converting-from-linear-srgb-to-oklab +float4 srgb_to_oklab(float4 color) { + // Convert non-linear sRGB to linear sRGB + color = float4(srgb_to_linear(color.rgb), color.a); + + float l = 0.4122214708 * color.r + 0.5363325363 * color.g + 0.0514459929 * color.b; + float m = 0.2119034982 * color.r + 0.6806995451 * color.g + 0.1073969566 * color.b; + float s = 0.0883024619 * color.r + 0.2817188376 * color.g + 0.6299787005 * color.b; + + float l_ = pow(l, 1.0/3.0); + float m_ = pow(m, 1.0/3.0); + float s_ = pow(s, 1.0/3.0); + + return float4( + 0.2104542553 * l_ + 0.7936177850 * m_ - 0.0040720468 * s_, + 1.9779984951 * l_ - 2.4285922050 * m_ + 0.4505937099 * s_, + 0.0259040371 * l_ + 0.7827717662 * m_ - 0.8086757660 * s_, + color.a + ); +} + +// Converts an Oklab color to the sRGB color space. +float4 oklab_to_srgb(float4 color) { + float l_ = color.r + 0.3963377774 * color.g + 0.2158037573 * color.b; + float m_ = color.r - 0.1055613458 * color.g - 0.0638541728 * color.b; + float s_ = color.r - 0.0894841775 * color.g - 1.2914855480 * color.b; + + float l = l_ * l_ * l_; + float m = m_ * m_ * m_; + float s = s_ * s_ * s_; + + float3 linear_rgb = float3( + 4.0767416621 * l - 3.3077115913 * m + 0.2309699292 * s, + -1.2684380046 * l + 2.6097574011 * m - 0.3413193965 * s, + -0.0041960863 * l - 0.7034186147 * m + 1.7076147010 * s + ); + + // Convert linear sRGB to non-linear sRGB + return float4(linear_to_srgb(linear_rgb), color.a); +} + float4 to_device_position(float2 unit_vertex, Bounds_ScaledPixels bounds, constant Size_DevicePixels *input_viewport_size) { float2 position = @@ -691,3 +793,81 @@ float4 over(float4 below, float4 above) { result.a = alpha; return result; } + +GradientColor prepare_gradient_color(uint tag, uint color_space, Hsla solid, + Hsla color0, Hsla color1) { + GradientColor out; + if (tag == 0) { + out.solid = hsla_to_rgba(solid); + } else if (tag == 1) { + out.color0 = hsla_to_rgba(color0); + out.color1 = hsla_to_rgba(color1); + + // Prepare color space in vertex for avoid conversion + // in fragment shader for performance reasons + if (color_space == 1) { + // Oklab + out.color0 = srgb_to_oklab(out.color0); + out.color1 = srgb_to_oklab(out.color1); + } + } + + return out; +} + +float4 gradient_color(Background background, + float2 position, + Bounds_ScaledPixels bounds, + float4 solid_color, float4 color0, float4 color1) { + float4 color; + + switch (background.tag) { + case 0: + color = solid_color; + break; + case 1: { + // -90 degrees to match the CSS gradient angle. + float radians = (fmod(background.angle, 360.0) - 90.0) * (M_PI_F / 180.0); + float2 direction = float2(cos(radians), sin(radians)); + + // Expand the short side to be the same as the long side + if (bounds.size.width > bounds.size.height) { + direction.y *= bounds.size.height / bounds.size.width; + } else { + direction.x *= bounds.size.width / bounds.size.height; + } + + // Get the t value for the linear gradient with the color stop percentages. + float2 half_size = float2(bounds.size.width, bounds.size.height) / 2.; + float2 center = float2(bounds.origin.x, bounds.origin.y) + half_size; + float2 center_to_point = position - center; + float t = dot(center_to_point, direction) / length(direction); + // Check the direct to determine the use x or y + if (abs(direction.x) > abs(direction.y)) { + t = (t + half_size.x) / bounds.size.width; + } else { + t = (t + half_size.y) / bounds.size.height; + } + + // Adjust t based on the stop percentages + t = (t - background.colors[0].percentage) + / (background.colors[1].percentage + - background.colors[0].percentage); + t = clamp(t, 0.0, 1.0); + + switch (background.color_space) { + case 0: + color = mix(color0, color1, t); + break; + case 1: { + float4 oklab_color = mix(color0, color1, t); + color = oklab_to_srgb(oklab_color); + break; + } + } + break; + } + } + + return color; +} diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index e5a04191a3..04fda6294a 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -38,6 +38,7 @@ use std::{ cell::Cell, ffi::{c_void, CStr}, mem, + ops::Range, path::PathBuf, ptr::{self, NonNull}, rc::Rc, @@ -151,10 +152,6 @@ unsafe fn build_classes() { sel!(flagsChanged:), handle_view_event as extern "C" fn(&Object, Sel, id), ); - decl.add_method( - sel!(cancelOperation:), - cancel_operation as extern "C" fn(&Object, Sel, id), - ); decl.add_method( sel!(makeBackingLayer), @@ -330,6 +327,7 @@ struct MacWindowState { traffic_light_position: Option>, previous_modifiers_changed_event: Option, keystroke_for_do_command: Option, + do_command_handled: Option, external_files_dragged: bool, // Whether the next left-mouse click is also the focusing click. first_mouse: bool, @@ -608,6 +606,7 @@ impl MacWindow { .and_then(|titlebar| titlebar.traffic_light_position), previous_modifiers_changed_event: None, keystroke_for_do_command: None, + do_command_handled: None, external_files_dragged: false, first_mouse: false, fullscreen_restore_bounds: Bounds::default(), @@ -1103,15 +1102,21 @@ impl PlatformWindow for MacWindow { self.0.lock().renderer.sprite_atlas().clone() } - fn gpu_specs(&self) -> Option { + fn gpu_specs(&self) -> Option { None } fn update_ime_position(&self, _bounds: Bounds) { - unsafe { - let input_context: id = msg_send![class!(NSTextInputContext), currentInputContext]; - let _: () = msg_send![input_context, invalidateCharacterCoordinates]; - } + let executor = self.0.lock().executor.clone(); + executor + .spawn(async move { + unsafe { + let input_context: id = + msg_send![class!(NSTextInputContext), currentInputContext]; + let _: () = msg_send![input_context, invalidateCharacterCoordinates]; + } + }) + .detach() } } @@ -1250,14 +1255,25 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: // otherwise we only send to the input handler if we don't have a matching binding. // The input handler may call `do_command_by_selector` if it doesn't know how to handle // a key. If it does so, it will return YES so we won't send the key twice. - if is_composing || event.keystroke.key.is_empty() { - window_state.as_ref().lock().keystroke_for_do_command = Some(event.keystroke.clone()); + // We also do this for non-printing keys (like arrow keys and escape) as the IME menu + // may need them even if there is no marked text; + // however we skip keys with control or the input handler adds control-characters to the buffer. + if is_composing || (event.keystroke.key_char.is_none() && !event.keystroke.modifiers.control) { + { + let mut lock = window_state.as_ref().lock(); + lock.keystroke_for_do_command = Some(event.keystroke.clone()); + lock.do_command_handled.take(); + drop(lock); + } + let handled: BOOL = unsafe { let input_context: id = msg_send![this, inputContext]; msg_send![input_context, handleEvent: native_event] }; window_state.as_ref().lock().keystroke_for_do_command.take(); - if handled == YES { + if let Some(handled) = window_state.as_ref().lock().do_command_handled.take() { + return handled as BOOL; + } else if handled == YES { return YES; } @@ -1283,18 +1299,17 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: } if event.is_held { - let handled = with_input_handler(&this, |input_handler| { - if !input_handler.apple_press_and_hold_enabled() { - input_handler.replace_text_in_range( - None, - &event.keystroke.ime_key.unwrap_or(event.keystroke.key), - ); + if let Some(key_char) = event.keystroke.key_char.as_ref() { + let handled = with_input_handler(&this, |input_handler| { + if !input_handler.apple_press_and_hold_enabled() { + input_handler.replace_text_in_range(None, &key_char); + return YES; + } + NO + }); + if handled == Some(YES) { return YES; } - NO - }); - if handled == Some(YES) { - return YES; } } @@ -1377,6 +1392,14 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { }; match &event { + PlatformInput::MouseDown(_) => { + drop(lock); + unsafe { + let input_context: id = msg_send![this, inputContext]; + msg_send![input_context, handleEvent: native_event] + } + lock = window_state.as_ref().lock(); + } PlatformInput::MouseMove( event @ MouseMoveEvent { pressed_button: Some(_), @@ -1428,29 +1451,6 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { } } -// Allows us to receive `cmd-.` (the shortcut for closing a dialog) -// https://bugs.eclipse.org/bugs/show_bug.cgi?id=300620#c6 -extern "C" fn cancel_operation(this: &Object, _sel: Sel, _sender: id) { - let window_state = unsafe { get_window_state(this) }; - let mut lock = window_state.as_ref().lock(); - - let keystroke = Keystroke { - modifiers: Default::default(), - key: ".".into(), - ime_key: None, - }; - let event = PlatformInput::KeyDown(KeyDownEvent { - keystroke: keystroke.clone(), - is_held: false, - }); - - if let Some(mut callback) = lock.event_callback.take() { - drop(lock); - callback(event); - window_state.lock().event_callback = Some(callback); - } -} - extern "C" fn window_did_change_occlusion_state(this: &Object, _: Sel, _: id) { let window_state = unsafe { get_window_state(this) }; let lock = &mut *window_state.lock(); @@ -1683,7 +1683,10 @@ extern "C" fn first_rect_for_character_range( let lock = state.lock(); let mut frame = NSWindow::frame(lock.native_window); let content_layout_rect: CGRect = msg_send![lock.native_window, contentLayoutRect]; - frame.origin.y -= frame.size.height - content_layout_rect.size.height; + let style_mask: NSWindowStyleMask = msg_send![lock.native_window, styleMask]; + if !style_mask.contains(NSWindowStyleMask::NSFullSizeContentViewWindowMask) { + frame.origin.y -= frame.size.height - content_layout_rect.size.height; + } frame }; with_input_handler(this, |input_handler| { @@ -1755,15 +1758,21 @@ extern "C" fn attributed_substring_for_proposed_range( this: &Object, _: Sel, range: NSRange, - _actual_range: *mut c_void, + actual_range: *mut c_void, ) -> id { with_input_handler(this, |input_handler| { let range = range.to_range()?; if range.is_empty() { return None; } + let mut adjusted: Option> = None; - let selected_text = input_handler.text_for_range(range.clone())?; + let selected_text = input_handler.text_for_range(range.clone(), &mut adjusted)?; + if let Some(adjusted) = adjusted { + if adjusted != range { + unsafe { (actual_range as *mut NSRange).write(NSRange::from(adjusted)) }; + } + } unsafe { let string: id = msg_send![class!(NSAttributedString), alloc]; let string: id = msg_send![string, initWithString: ns_string(&selected_text)]; @@ -1784,10 +1793,11 @@ extern "C" fn do_command_by_selector(this: &Object, _: Sel, _: Sel) { drop(lock); if let Some((keystroke, mut callback)) = keystroke.zip(event_callback.as_mut()) { - (callback)(PlatformInput::KeyDown(KeyDownEvent { + let handled = (callback)(PlatformInput::KeyDown(KeyDownEvent { keystroke, is_held: false, })); + state.as_ref().lock().do_command_handled = Some(!handled.propagate); } state.as_ref().lock().event_callback = event_callback; diff --git a/crates/gpui/src/platform/test.rs b/crates/gpui/src/platform/test.rs index d17739239e..70462cb5e2 100644 --- a/crates/gpui/src/platform/test.rs +++ b/crates/gpui/src/platform/test.rs @@ -7,3 +7,5 @@ pub(crate) use dispatcher::*; pub(crate) use display::*; pub(crate) use platform::*; pub(crate) use window::*; + +pub use platform::TestScreenCaptureSource; diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index aadbe9b595..50ad24a520 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -1,7 +1,7 @@ use crate::{ - AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, Keymap, - Platform, PlatformDisplay, PlatformTextSystem, Task, TestDisplay, TestWindow, WindowAppearance, - WindowParams, + px, size, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, + Keymap, Platform, PlatformDisplay, PlatformTextSystem, ScreenCaptureFrame, ScreenCaptureSource, + ScreenCaptureStream, Task, TestDisplay, TestWindow, WindowAppearance, WindowParams, }; use anyhow::Result; use collections::VecDeque; @@ -31,6 +31,7 @@ pub(crate) struct TestPlatform { #[cfg(any(target_os = "linux", target_os = "freebsd"))] current_primary_item: Mutex>, pub(crate) prompts: RefCell, + screen_capture_sources: RefCell>, pub opened_url: RefCell>, pub text_system: Arc, #[cfg(target_os = "windows")] @@ -38,6 +39,31 @@ pub(crate) struct TestPlatform { weak: Weak, } +#[derive(Clone)] +/// A fake screen capture source, used for testing. +pub struct TestScreenCaptureSource {} + +pub struct TestScreenCaptureStream {} + +impl ScreenCaptureSource for TestScreenCaptureSource { + fn resolution(&self) -> Result> { + Ok(size(px(1.), px(1.))) + } + + fn stream( + &self, + _frame_callback: Box, + ) -> oneshot::Receiver>> { + let (mut tx, rx) = oneshot::channel(); + let stream = TestScreenCaptureStream {}; + tx.send(Ok(Box::new(stream) as Box)) + .ok(); + rx + } +} + +impl ScreenCaptureStream for TestScreenCaptureStream {} + #[derive(Default)] pub(crate) struct TestPrompts { multiple_choice: VecDeque>, @@ -72,6 +98,7 @@ impl TestPlatform { background_executor: executor, foreground_executor, prompts: Default::default(), + screen_capture_sources: Default::default(), active_cursor: Default::default(), active_display: Rc::new(TestDisplay::new()), active_window: Default::default(), @@ -114,6 +141,10 @@ impl TestPlatform { !self.prompts.borrow().multiple_choice.is_empty() } + pub(crate) fn set_screen_capture_sources(&self, sources: Vec) { + *self.screen_capture_sources.borrow_mut() = sources; + } + pub(crate) fn prompt(&self, msg: &str, detail: Option<&str>) -> oneshot::Receiver { let (tx, rx) = oneshot::channel(); self.background_executor() @@ -202,6 +233,20 @@ impl Platform for TestPlatform { Some(self.active_display.clone()) } + fn screen_capture_sources( + &self, + ) -> oneshot::Receiver>>> { + let (mut tx, rx) = oneshot::channel(); + tx.send(Ok(self + .screen_capture_sources + .borrow() + .iter() + .map(|source| Box::new(source.clone()) as Box) + .collect())) + .ok(); + rx + } + fn active_window(&self) -> Option { self.active_window .borrow() @@ -254,6 +299,10 @@ impl Platform for TestPlatform { rx } + fn can_select_mixed_files_and_dirs(&self) -> bool { + true + } + fn reveal_path(&self, _path: &std::path::Path) { unimplemented!() } @@ -330,6 +379,13 @@ impl Platform for TestPlatform { } } +impl TestScreenCaptureSource { + /// Create a fake screen capture source, for testing. + pub fn new() -> Self { + Self {} + } +} + #[cfg(target_os = "windows")] impl Drop for TestPlatform { fn drop(&mut self) { diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index d8ec6a718b..89aab79c1d 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -1,5 +1,5 @@ use crate::{ - AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GPUSpecs, + AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GpuSpecs, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, RequestFrameOptions, ScaledPixels, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowParams, @@ -276,7 +276,7 @@ impl PlatformWindow for TestWindow { fn update_ime_position(&self, _bounds: Bounds) {} - fn gpu_specs(&self) -> Option { + fn gpu_specs(&self) -> Option { None } } @@ -339,4 +339,9 @@ impl PlatformAtlas for TestAtlas { Ok(Some(state.tiles[key].clone())) } + + fn remove(&self, key: &AtlasKey) { + let mut state = self.0.lock(); + state.tiles.remove(key); + } } diff --git a/crates/gpui/src/platform/windows.rs b/crates/gpui/src/platform/windows.rs index 84cf107c70..51d09f0013 100644 --- a/crates/gpui/src/platform/windows.rs +++ b/crates/gpui/src/platform/windows.rs @@ -21,3 +21,5 @@ pub(crate) use window::*; pub(crate) use wrapper::*; pub(crate) use windows::Win32::Foundation::HWND; + +pub(crate) type PlatformScreenCaptureFrame = (); diff --git a/crates/gpui/src/platform/windows/clipboard.rs b/crates/gpui/src/platform/windows/clipboard.rs index 7e607b6a8c..2ef2c04ca7 100644 --- a/crates/gpui/src/platform/windows/clipboard.rs +++ b/crates/gpui/src/platform/windows/clipboard.rs @@ -357,7 +357,7 @@ impl From for image::ImageFormat { ImageFormat::Jpeg => image::ImageFormat::Jpeg, ImageFormat::Webp => image::ImageFormat::WebP, ImageFormat::Gif => image::ImageFormat::Gif, - // ImageFormat::Svg => todo!(), + // TODO: ImageFormat::Svg ImageFormat::Bmp => image::ImageFormat::Bmp, ImageFormat::Tiff => image::ImageFormat::Tiff, _ => unreachable!(), diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 1f501f3341..efaf7a148a 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -7,6 +7,7 @@ use windows::Win32::{ Graphics::Gdi::*, System::SystemServices::*, UI::{ + Controls::*, HiDpi::*, Input::{Ime::*, KeyboardAndMouse::*}, WindowsAndMessaging::*, @@ -32,7 +33,7 @@ pub(crate) fn handle_msg( WM_ACTIVATE => handle_activate_msg(handle, wparam, state_ptr), WM_CREATE => handle_create_msg(handle, state_ptr), WM_MOVE => handle_move_msg(handle, lparam, state_ptr), - WM_SIZE => handle_size_msg(lparam, state_ptr), + WM_SIZE => handle_size_msg(wparam, lparam, state_ptr), WM_ENTERSIZEMOVE | WM_ENTERMENULOOP => handle_size_move_loop(handle), WM_EXITSIZEMOVE | WM_EXITMENULOOP => handle_size_move_loop_exit(handle), WM_TIMER => handle_timer_msg(handle, wparam, state_ptr), @@ -43,8 +44,10 @@ pub(crate) fn handle_msg( WM_PAINT => handle_paint_msg(handle, state_ptr), WM_CLOSE => handle_close_msg(state_ptr), WM_DESTROY => handle_destroy_msg(handle, state_ptr), - WM_MOUSEMOVE => handle_mouse_move_msg(lparam, wparam, state_ptr), + WM_MOUSEMOVE => handle_mouse_move_msg(handle, lparam, wparam, state_ptr), + WM_MOUSELEAVE => handle_mouse_leave_msg(state_ptr), WM_NCMOUSEMOVE => handle_nc_mouse_move_msg(handle, lparam, state_ptr), + WM_NCMOUSELEAVE => handle_nc_mouse_leave_msg(state_ptr), WM_NCLBUTTONDOWN => { handle_nc_mouse_down_msg(handle, MouseButton::Left, wparam, lparam, state_ptr) } @@ -134,13 +137,31 @@ fn handle_move_msg( Some(0) } -fn handle_size_msg(lparam: LPARAM, state_ptr: Rc) -> Option { +fn handle_size_msg( + wparam: WPARAM, + lparam: LPARAM, + state_ptr: Rc, +) -> Option { + let mut lock = state_ptr.state.borrow_mut(); + + // Don't resize the renderer when the window is minimized, but record that it was minimized so + // that on restore the swap chain can be recreated via `update_drawable_size_even_if_unchanged`. + if wparam.0 == SIZE_MINIMIZED as usize { + lock.restore_from_minimized = lock.callbacks.request_frame.take(); + return Some(0); + } + let width = lparam.loword().max(1) as i32; let height = lparam.hiword().max(1) as i32; - let mut lock = state_ptr.state.borrow_mut(); let new_size = size(DevicePixels(width), DevicePixels(height)); let scale_factor = lock.scale_factor; - lock.renderer.update_drawable_size(new_size); + if lock.restore_from_minimized.is_some() { + lock.renderer + .update_drawable_size_even_if_unchanged(new_size); + lock.callbacks.request_frame = lock.restore_from_minimized.take(); + } else { + lock.renderer.update_drawable_size(new_size); + } let new_size = new_size.to_pixels(scale_factor); lock.logical_size = new_size; if let Some(mut callback) = lock.callbacks.resize.take() { @@ -234,10 +255,32 @@ fn handle_destroy_msg(handle: HWND, state_ptr: Rc) -> Opt } fn handle_mouse_move_msg( + handle: HWND, lparam: LPARAM, wparam: WPARAM, state_ptr: Rc, ) -> Option { + let mut lock = state_ptr.state.borrow_mut(); + if !lock.hovered { + lock.hovered = true; + unsafe { + TrackMouseEvent(&mut TRACKMOUSEEVENT { + cbSize: std::mem::size_of::() as u32, + dwFlags: TME_LEAVE, + hwndTrack: handle, + dwHoverTime: HOVER_DEFAULT, + }) + .log_err() + }; + if let Some(mut callback) = lock.callbacks.hovered_status_change.take() { + drop(lock); + callback(true); + state_ptr.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + } + } else { + drop(lock); + } + let mut lock = state_ptr.state.borrow_mut(); if let Some(mut callback) = lock.callbacks.input.take() { let scale_factor = lock.scale_factor; @@ -272,6 +315,30 @@ fn handle_mouse_move_msg( Some(1) } +fn handle_nc_mouse_leave_msg(state_ptr: Rc) -> Option { + let mut lock = state_ptr.state.borrow_mut(); + lock.hovered = false; + if let Some(mut callback) = lock.callbacks.hovered_status_change.take() { + drop(lock); + callback(false); + state_ptr.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + } + + Some(0) +} + +fn handle_mouse_leave_msg(state_ptr: Rc) -> Option { + let mut lock = state_ptr.state.borrow_mut(); + lock.hovered = false; + if let Some(mut callback) = lock.callbacks.hovered_status_change.take() { + drop(lock); + callback(false); + state_ptr.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + } + + Some(0) +} + fn handle_syskeydown_msg( wparam: WPARAM, lparam: LPARAM, @@ -386,7 +453,7 @@ fn handle_char_msg( return Some(1); }; drop(lock); - let ime_key = keystroke.ime_key.clone(); + let key_char = keystroke.key_char.clone(); let event = KeyDownEvent { keystroke, is_held: lparam.0 & (0x1 << 30) > 0, @@ -397,7 +464,7 @@ fn handle_char_msg( if dispatch_event_result.default_prevented || !dispatch_event_result.propagate { return Some(0); } - let Some(ime_char) = ime_key else { + let Some(ime_char) = key_char else { return Some(1); }; with_input_handler(&state_ptr, |input_handler| { @@ -917,6 +984,27 @@ fn handle_nc_mouse_move_msg( return None; } + let mut lock = state_ptr.state.borrow_mut(); + if !lock.hovered { + lock.hovered = true; + unsafe { + TrackMouseEvent(&mut TRACKMOUSEEVENT { + cbSize: std::mem::size_of::() as u32, + dwFlags: TME_LEAVE | TME_NONCLIENT, + hwndTrack: handle, + dwHoverTime: HOVER_DEFAULT, + }) + .log_err() + }; + if let Some(mut callback) = lock.callbacks.hovered_status_change.take() { + drop(lock); + callback(true); + state_ptr.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + } + } else { + drop(lock); + } + let mut lock = state_ptr.state.borrow_mut(); if let Some(mut callback) = lock.callbacks.input.take() { let scale_factor = lock.scale_factor; @@ -1160,6 +1248,8 @@ fn parse_syskeydown_msg_keystroke(wparam: WPARAM) -> Option { VK_END => "end", VK_PRIOR => "pageup", VK_NEXT => "pagedown", + VK_BROWSER_BACK => "back", + VK_BROWSER_FORWARD => "forward", VK_ESCAPE => "escape", VK_INSERT => "insert", VK_DELETE => "delete", @@ -1170,7 +1260,7 @@ fn parse_syskeydown_msg_keystroke(wparam: WPARAM) -> Option { Some(Keystroke { modifiers, key, - ime_key: None, + key_char: None, }) } @@ -1196,6 +1286,8 @@ fn parse_keydown_msg_keystroke(wparam: WPARAM) -> Option { VK_END => "end", VK_PRIOR => "pageup", VK_NEXT => "pagedown", + VK_BROWSER_BACK => "back", + VK_BROWSER_FORWARD => "forward", VK_ESCAPE => "escape", VK_INSERT => "insert", VK_DELETE => "delete", @@ -1216,7 +1308,7 @@ fn parse_keydown_msg_keystroke(wparam: WPARAM) -> Option { return Some(KeystrokeOrModifier::Keystroke(Keystroke { modifiers, key: format!("f{}", offset + 1), - ime_key: None, + key_char: None, })); }; return None; @@ -1227,7 +1319,7 @@ fn parse_keydown_msg_keystroke(wparam: WPARAM) -> Option { Some(KeystrokeOrModifier::Keystroke(Keystroke { modifiers, key, - ime_key: None, + key_char: None, })) } @@ -1249,7 +1341,7 @@ fn parse_char_msg_keystroke(wparam: WPARAM) -> Option { Some(Keystroke { modifiers, key, - ime_key: Some(first_char.to_string()), + key_char: Some(first_char.to_string()), }) } } @@ -1323,7 +1415,7 @@ fn basic_vkcode_to_string(code: u16, modifiers: Modifiers) -> Option Some(Keystroke { modifiers, key, - ime_key: None, + key_char: None, }) } diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 29443afabb..b01851ab86 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -6,7 +6,7 @@ use std::{ sync::Arc, }; -use ::util::ResultExt; +use ::util::{paths::SanitizedPath, ResultExt}; use anyhow::{anyhow, Context, Result}; use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; @@ -28,11 +28,12 @@ use windows::{ UI::ViewManagement::UISettings, }; -use crate::*; +use crate::{platform::blade::BladeContext, *}; pub(crate) struct WindowsPlatform { state: RefCell, raw_window_handles: RwLock>, + gpu_context: BladeContext, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, main_receiver: flume::Receiver, @@ -47,6 +48,7 @@ pub(crate) struct WindowsPlatform { pub(crate) struct WindowsPlatformState { callbacks: PlatformCallbacks, + menus: Vec, // NOTE: standard cursor handles don't need to close. pub(crate) current_cursor: HCURSOR, } @@ -69,6 +71,7 @@ impl WindowsPlatformState { Self { callbacks, current_cursor, + menus: Vec::new(), } } } @@ -94,12 +97,14 @@ impl WindowsPlatform { let icon = load_icon().unwrap_or_default(); let state = RefCell::new(WindowsPlatformState::new()); let raw_window_handles = RwLock::new(SmallVec::new()); + let gpu_context = BladeContext::new().expect("Unable to init GPU context"); let windows_version = WindowsVersion::new().expect("Error retrieve windows version"); let validation_number = rand::random::(); Self { state, raw_window_handles, + gpu_context, icon, main_receiver, dispatch_event, @@ -292,7 +297,7 @@ impl Platform for WindowsPlatform { pid, app_path.display(), ); - let restart_process = std::process::Command::new("powershell.exe") + let restart_process = util::command::new_std_command("powershell.exe") .arg("-command") .arg(script) .spawn(); @@ -325,6 +330,14 @@ impl Platform for WindowsPlatform { WindowsDisplay::primary_monitor().map(|display| Rc::new(display) as Rc) } + fn screen_capture_sources( + &self, + ) -> oneshot::Receiver>>> { + let (mut tx, rx) = oneshot::channel(); + tx.send(Err(anyhow!("screen capture not implemented"))).ok(); + rx + } + fn active_window(&self) -> Option { let active_window_hwnd = unsafe { GetActiveWindow() }; self.try_get_windows_inner_from_hwnd(active_window_hwnd) @@ -336,7 +349,12 @@ impl Platform for WindowsPlatform { handle: AnyWindowHandle, options: WindowParams, ) -> Result> { - let window = WindowsWindow::new(handle, options, self.generate_creation_info())?; + let window = WindowsWindow::new( + handle, + options, + self.generate_creation_info(), + &self.gpu_context, + )?; let handle = window.get_raw_handle(); self.raw_window_handles.write().push(handle); @@ -389,6 +407,11 @@ impl Platform for WindowsPlatform { rx } + fn can_select_mixed_files_and_dirs(&self) -> bool { + // The FOS_PICKFOLDERS flag toggles between "only files" and "only folders". + false + } + fn reveal_path(&self, path: &Path) { let Ok(file_full_path) = path.canonicalize() else { log::error!("unable to parse file path"); @@ -433,8 +456,15 @@ impl Platform for WindowsPlatform { self.state.borrow_mut().callbacks.reopen = Some(callback); } + fn set_menus(&self, menus: Vec, _keymap: &Keymap) { + self.state.borrow_mut().menus = menus.into_iter().map(|menu| menu.owned()).collect(); + } + + fn get_menus(&self) -> Option> { + Some(self.state.borrow().menus.clone()) + } + // todo(windows) - fn set_menus(&self, _menus: Vec, _keymap: &Keymap) {} fn set_dock_menu(&self, _menus: Vec, _keymap: &Keymap) {} fn on_app_menu_action(&self, callback: Box) { @@ -645,13 +675,11 @@ fn file_save_dialog(directory: PathBuf) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; if !directory.to_string_lossy().is_empty() { if let Some(full_path) = directory.canonicalize().log_err() { - let full_path = full_path.to_string_lossy(); - let full_path_str = full_path.trim_start_matches("\\\\?\\"); - if !full_path_str.is_empty() { - let path_item: IShellItem = - unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_str), None)? }; - unsafe { dialog.SetFolder(&path_item).log_err() }; - } + let full_path = SanitizedPath::from(full_path); + let full_path_string = full_path.to_string(); + let path_item: IShellItem = + unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_string), None)? }; + unsafe { dialog.SetFolder(&path_item).log_err() }; } } unsafe { diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index f2600d3c6f..e2389f0dba 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -27,7 +27,7 @@ use windows::{ }, }; -use crate::platform::blade::BladeRenderer; +use crate::platform::blade::{BladeContext, BladeRenderer}; use crate::*; pub(crate) struct WindowsWindow(pub Rc); @@ -38,10 +38,12 @@ pub struct WindowsWindowState { pub fullscreen_restore_bounds: Bounds, pub border_offset: WindowBorderOffset, pub scale_factor: f32, + pub restore_from_minimized: Option>, pub callbacks: Callbacks, pub input_handler: Option, pub system_key_handled: bool, + pub hovered: bool, pub renderer: BladeRenderer, @@ -76,6 +78,7 @@ impl WindowsWindowState { cs: &CREATESTRUCTW, current_cursor: HCURSOR, display: WindowsDisplay, + gpu_context: &BladeContext, ) -> Result { let scale_factor = { let monitor_dpi = unsafe { GetDpiForWindow(hwnd) } as f32; @@ -91,10 +94,12 @@ impl WindowsWindowState { size: logical_size, }; let border_offset = WindowBorderOffset::default(); - let renderer = windows_renderer::windows_renderer(hwnd, transparent)?; + let restore_from_minimized = None; + let renderer = windows_renderer::init(gpu_context, hwnd, transparent)?; let callbacks = Callbacks::default(); let input_handler = None; let system_key_handled = false; + let hovered = false; let click_state = ClickState::new(); let system_settings = WindowsSystemSettings::new(display); let nc_button_pressed = None; @@ -107,9 +112,11 @@ impl WindowsWindowState { fullscreen_restore_bounds, border_offset, scale_factor, + restore_from_minimized, callbacks, input_handler, system_key_handled, + hovered, renderer, click_state, system_settings, @@ -221,6 +228,7 @@ impl WindowsWindowStatePtr { cs, context.current_cursor, context.display, + context.gpu_context, )?); Ok(Rc::new_cyclic(|this| Self { @@ -326,6 +334,7 @@ pub(crate) struct Callbacks { pub(crate) request_frame: Option>, pub(crate) input: Option DispatchEventResult>>, pub(crate) active_status_change: Option>, + pub(crate) hovered_status_change: Option>, pub(crate) resize: Option, f32)>>, pub(crate) moved: Option>, pub(crate) should_close: Option bool>>, @@ -333,7 +342,7 @@ pub(crate) struct Callbacks { pub(crate) appearance_changed: Option>, } -struct WindowCreateContext { +struct WindowCreateContext<'a> { inner: Option>>, handle: AnyWindowHandle, hide_title_bar: bool, @@ -345,6 +354,7 @@ struct WindowCreateContext { windows_version: WindowsVersion, validation_number: usize, main_receiver: flume::Receiver, + gpu_context: &'a BladeContext, } impl WindowsWindow { @@ -352,6 +362,7 @@ impl WindowsWindow { handle: AnyWindowHandle, params: WindowParams, creation_info: WindowCreationInfo, + gpu_context: &BladeContext, ) -> Result { let WindowCreationInfo { icon, @@ -403,6 +414,7 @@ impl WindowsWindow { windows_version, validation_number, main_receiver, + gpu_context, }; let lpparam = Some(&context as *const _ as *const _); let creation_result = unsafe { @@ -635,9 +647,8 @@ impl PlatformWindow for WindowsWindow { self.0.hwnd == unsafe { GetActiveWindow() } } - // is_hovered is unused on Windows. See WindowContext::is_window_hovered. fn is_hovered(&self) -> bool { - false + self.0.state.borrow().hovered } fn set_title(&mut self, title: &str) { @@ -728,7 +739,9 @@ impl PlatformWindow for WindowsWindow { self.0.state.borrow_mut().callbacks.active_status_change = Some(callback); } - fn on_hover_status_change(&self, _: Box) {} + fn on_hover_status_change(&self, callback: Box) { + self.0.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + } fn on_resize(&self, callback: Box, f32)>) { self.0.state.borrow_mut().callbacks.resize = Some(callback); @@ -762,7 +775,7 @@ impl PlatformWindow for WindowsWindow { self.0.hwnd } - fn gpu_specs(&self) -> Option { + fn gpu_specs(&self) -> Option { Some(self.0.state.borrow().renderer.gpu_specs()) } @@ -1060,7 +1073,7 @@ unsafe extern "system" fn wnd_proc( let weak = Box::new(Rc::downgrade(creation_result.as_ref().unwrap())); unsafe { set_window_long(hwnd, GWLP_USERDATA, Box::into_raw(weak) as isize) }; ctx.inner = Some(creation_result); - return LRESULT(1); + return unsafe { DefWindowProcW(hwnd, msg, wparam, lparam) }; } let ptr = unsafe { get_window_long(hwnd, GWLP_USERDATA) } as *mut Weak; if ptr.is_null() { @@ -1228,38 +1241,24 @@ fn set_window_composition_attribute(hwnd: HWND, color: Option, state: u32 } mod windows_renderer { - use std::{num::NonZeroIsize, sync::Arc}; - - use blade_graphics as gpu; + use crate::platform::blade::{BladeContext, BladeRenderer, BladeSurfaceConfig}; use raw_window_handle as rwh; + use std::num::NonZeroIsize; use windows::Win32::{Foundation::HWND, UI::WindowsAndMessaging::GWLP_HINSTANCE}; - use crate::{ - get_window_long, - platform::blade::{BladeRenderer, BladeSurfaceConfig}, - }; + use crate::get_window_long; - pub(super) fn windows_renderer(hwnd: HWND, transparent: bool) -> anyhow::Result { + pub(super) fn init( + context: &BladeContext, + hwnd: HWND, + transparent: bool, + ) -> anyhow::Result { let raw = RawWindow { hwnd }; - let gpu: Arc = Arc::new( - unsafe { - gpu::Context::init_windowed( - &raw, - gpu::ContextDesc { - validation: false, - capture: false, - overlay: false, - }, - ) - } - .map_err(|e| anyhow::anyhow!("{:?}", e))?, - ); let config = BladeSurfaceConfig { - size: gpu::Extent::default(), + size: Default::default(), transparent, }; - - Ok(BladeRenderer::new(gpu, config)) + BladeRenderer::new(context, &raw, config) } struct RawWindow { diff --git a/crates/gpui/src/prelude.rs b/crates/gpui/src/prelude.rs index e1cc14e93e..f00b4e958b 100644 --- a/crates/gpui/src/prelude.rs +++ b/crates/gpui/src/prelude.rs @@ -3,7 +3,7 @@ //! application to avoid having to import each trait individually. pub use crate::{ - util::FluentBuilder, BorrowAppContext, BorrowWindow, Context, Element, FocusableElement, + util::FluentBuilder, BorrowAppContext, BorrowWindow, Context as _, Element, FocusableElement, InteractiveElement, IntoElement, ParentElement, Refineable, Render, RenderOnce, StatefulInteractiveElement, Styled, StyledImage, VisualContext, }; diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index 9787ec5d87..778a5d1f27 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -2,8 +2,8 @@ #![cfg_attr(windows, allow(dead_code))] use crate::{ - bounds_tree::BoundsTree, point, AtlasTextureId, AtlasTile, Bounds, ContentMask, Corners, Edges, - Hsla, Pixels, Point, Radians, ScaledPixels, Size, + bounds_tree::BoundsTree, point, AtlasTextureId, AtlasTile, Background, Bounds, ContentMask, + Corners, Edges, Hsla, Pixels, Point, Radians, ScaledPixels, Size, }; use std::{fmt::Debug, iter::Peekable, ops::Range, slice}; @@ -128,13 +128,15 @@ impl Scene { } pub fn finish(&mut self) { - self.shadows.sort(); - self.quads.sort(); - self.paths.sort(); - self.underlines.sort(); - self.monochrome_sprites.sort(); - self.polychrome_sprites.sort(); - self.surfaces.sort(); + self.shadows.sort_by_key(|shadow| shadow.order); + self.quads.sort_by_key(|quad| quad.order); + self.paths.sort_by_key(|path| path.order); + self.underlines.sort_by_key(|underline| underline.order); + self.monochrome_sprites + .sort_by_key(|sprite| (sprite.order, sprite.tile.tile_id)); + self.polychrome_sprites + .sort_by_key(|sprite| (sprite.order, sprite.tile.tile_id)); + self.surfaces.sort_by_key(|surface| surface.order); } #[cfg_attr( @@ -196,7 +198,7 @@ pub(crate) enum PaintOperation { EndLayer, } -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)] +#[derive(Clone)] pub(crate) enum Primitive { Shadow(Shadow), Quad(Quad), @@ -449,38 +451,26 @@ pub(crate) enum PrimitiveBatch<'a> { Surfaces(&'a [PaintSurface]), } -#[derive(Default, Debug, Clone, Eq, PartialEq)] +#[derive(Default, Debug, Clone)] #[repr(C)] pub(crate) struct Quad { pub order: DrawOrder, pub pad: u32, // align to 8 bytes pub bounds: Bounds, pub content_mask: ContentMask, - pub background: Hsla, + pub background: Background, pub border_color: Hsla, pub corner_radii: Corners, pub border_widths: Edges, } -impl Ord for Quad { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.order.cmp(&other.order) - } -} - -impl PartialOrd for Quad { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl From for Primitive { fn from(quad: Quad) -> Self { Primitive::Quad(quad) } } -#[derive(Debug, Clone, Eq, PartialEq)] +#[derive(Debug, Clone)] #[repr(C)] pub(crate) struct Underline { pub order: DrawOrder, @@ -492,25 +482,13 @@ pub(crate) struct Underline { pub wavy: bool, } -impl Ord for Underline { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.order.cmp(&other.order) - } -} - -impl PartialOrd for Underline { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl From for Primitive { fn from(underline: Underline) -> Self { Primitive::Underline(underline) } } -#[derive(Debug, Clone, Eq, PartialEq)] +#[derive(Debug, Clone)] #[repr(C)] pub(crate) struct Shadow { pub order: DrawOrder, @@ -521,18 +499,6 @@ pub(crate) struct Shadow { pub color: Hsla, } -impl Ord for Shadow { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.order.cmp(&other.order) - } -} - -impl PartialOrd for Shadow { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl From for Primitive { fn from(shadow: Shadow) -> Self { Primitive::Shadow(shadow) @@ -642,7 +608,7 @@ impl Default for TransformationMatrix { } } -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug)] #[repr(C)] pub(crate) struct MonochromeSprite { pub order: DrawOrder, @@ -654,28 +620,13 @@ pub(crate) struct MonochromeSprite { pub transformation: TransformationMatrix, } -impl Ord for MonochromeSprite { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - match self.order.cmp(&other.order) { - std::cmp::Ordering::Equal => self.tile.tile_id.cmp(&other.tile.tile_id), - order => order, - } - } -} - -impl PartialOrd for MonochromeSprite { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl From for Primitive { fn from(sprite: MonochromeSprite) -> Self { Primitive::MonochromeSprite(sprite) } } -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug)] #[repr(C)] pub(crate) struct PolychromeSprite { pub order: DrawOrder, @@ -687,22 +638,6 @@ pub(crate) struct PolychromeSprite { pub corner_radii: Corners, pub tile: AtlasTile, } -impl Eq for PolychromeSprite {} - -impl Ord for PolychromeSprite { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - match self.order.cmp(&other.order) { - std::cmp::Ordering::Equal => self.tile.tile_id.cmp(&other.tile.tile_id), - order => order, - } - } -} - -impl PartialOrd for PolychromeSprite { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} impl From for Primitive { fn from(sprite: PolychromeSprite) -> Self { @@ -710,7 +645,7 @@ impl From for Primitive { } } -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug)] pub(crate) struct PaintSurface { pub order: DrawOrder, pub bounds: Bounds, @@ -719,18 +654,6 @@ pub(crate) struct PaintSurface { pub image_buffer: media::core_video::CVImageBuffer, } -impl Ord for PaintSurface { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.order.cmp(&other.order) - } -} - -impl PartialOrd for PaintSurface { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl From for Primitive { fn from(surface: PaintSurface) -> Self { Primitive::Surface(surface) @@ -748,7 +671,7 @@ pub struct Path { pub(crate) bounds: Bounds

, pub(crate) content_mask: ContentMask

, pub(crate) vertices: Vec>, - pub(crate) color: Hsla, + pub(crate) color: Background, start: Point

, current: Point

, contour_count: usize, @@ -859,26 +782,6 @@ impl Path { } } -impl Eq for Path {} - -impl PartialEq for Path { - fn eq(&self, other: &Self) -> bool { - self.order == other.order - } -} - -impl Ord for Path { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.order.cmp(&other.order) - } -} - -impl PartialOrd for Path { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl From> for Primitive { fn from(path: Path) -> Self { Primitive::Path(path) diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index 1fbd337bd0..b5210efa50 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -81,6 +81,12 @@ impl<'a> PartialEq<&'a str> for SharedString { } } +impl From<&SharedString> for SharedString { + fn from(value: &SharedString) -> Self { + value.clone() + } +} + impl From for Arc { fn from(val: SharedString) -> Self { match val.0 { diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index cfe1034891..87c3de5ca2 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -5,10 +5,11 @@ use std::{ }; use crate::{ - black, phi, point, quad, rems, size, AbsoluteLength, Bounds, ContentMask, Corners, - CornersRefinement, CursorStyle, DefiniteLength, DevicePixels, Edges, EdgesRefinement, Font, - FontFallbacks, FontFeatures, FontStyle, FontWeight, Hsla, Length, Pixels, Point, - PointRefinement, Rgba, SharedString, Size, SizeRefinement, Styled, TextRun, WindowContext, + black, phi, point, quad, rems, size, AbsoluteLength, Background, BackgroundTag, Bounds, + ContentMask, Corners, CornersRefinement, CursorStyle, DefiniteLength, DevicePixels, Edges, + EdgesRefinement, Font, FontFallbacks, FontFeatures, FontStyle, FontWeight, Hsla, Length, + Pixels, Point, PointRefinement, Rgba, SharedString, Size, SizeRefinement, Styled, TextRun, + WindowContext, }; use collections::HashSet; use refineable::Refineable; @@ -508,7 +509,7 @@ impl Style { } => None, _ => { let mut min = bounds.origin; - let mut max = bounds.lower_right(); + let mut max = bounds.bottom_right(); if self .border_color @@ -529,12 +530,12 @@ impl Style { // x visible, y hidden (true, false) => Bounds::from_corners( point(min.x, bounds.origin.y), - point(max.x, bounds.lower_right().y), + point(max.x, bounds.bottom_right().y), ), // x hidden, y visible (false, true) => Bounds::from_corners( point(bounds.origin.x, min.y), - point(bounds.lower_right().x, max.y), + point(bounds.bottom_right().x, max.y), ), // both hidden (false, false) => Bounds::from_corners(min, max), @@ -572,7 +573,17 @@ impl Style { let background_color = self.background.as_ref().and_then(Fill::color); if background_color.map_or(false, |color| !color.is_transparent()) { - let mut border_color = background_color.unwrap_or_default(); + let mut border_color = match background_color { + Some(color) => match color.tag { + BackgroundTag::Solid => color.solid, + BackgroundTag::LinearGradient => color + .colors + .first() + .map(|stop| stop.color) + .unwrap_or_default(), + }, + None => Hsla::default(), + }; border_color.a = 0.; cx.paint_quad(quad( bounds, @@ -593,19 +604,19 @@ impl Style { let top_bounds = Bounds::from_corners( bounds.origin, - bounds.upper_right() + point(Pixels::ZERO, max_border_width.max(max_corner_radius)), + bounds.top_right() + point(Pixels::ZERO, max_border_width.max(max_corner_radius)), ); let bottom_bounds = Bounds::from_corners( - bounds.lower_left() - point(Pixels::ZERO, max_border_width.max(max_corner_radius)), - bounds.lower_right(), + bounds.bottom_left() - point(Pixels::ZERO, max_border_width.max(max_corner_radius)), + bounds.bottom_right(), ); let left_bounds = Bounds::from_corners( - top_bounds.lower_left(), + top_bounds.bottom_left(), bottom_bounds.origin + point(max_border_width, Pixels::ZERO), ); let right_bounds = Bounds::from_corners( - top_bounds.lower_right() - point(max_border_width, Pixels::ZERO), - bottom_bounds.upper_right(), + top_bounds.bottom_right() - point(max_border_width, Pixels::ZERO), + bottom_bounds.top_right(), ); let mut background = self.border_color.unwrap_or_default(); @@ -737,12 +748,14 @@ pub struct StrikethroughStyle { #[derive(Clone, Debug)] pub enum Fill { /// A solid color fill. - Color(Hsla), + Color(Background), } impl Fill { /// Unwrap this fill into a solid color, if it is one. - pub fn color(&self) -> Option { + /// + /// If the fill is not a solid color, this method returns `None`. + pub fn color(&self) -> Option { match self { Fill::Color(color) => Some(*color), } @@ -751,13 +764,13 @@ impl Fill { impl Default for Fill { fn default() -> Self { - Self::Color(Hsla::default()) + Self::Color(Background::default()) } } impl From for Fill { fn from(color: Hsla) -> Self { - Self::Color(color) + Self::Color(color.into()) } } @@ -767,6 +780,12 @@ impl From for Fill { } } +impl From for Fill { + fn from(background: Background) -> Self { + Self::Color(background) + } +} + impl From for HighlightStyle { fn from(other: TextStyle) -> Self { Self::from(&other) diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index 27e7e55982..b695aac21c 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -356,7 +356,7 @@ impl WindowTextSystem { }); } - let layout = self.layout_line(text.as_ref(), font_size, runs)?; + let layout = self.layout_line(&text, font_size, runs)?; Ok(ShapedLine { layout, @@ -483,12 +483,16 @@ impl WindowTextSystem { /// Subsets of the line can be styled independently with the `runs` parameter. /// Generally, you should prefer to use `TextLayout::shape_line` instead, which /// can be painted directly. - pub fn layout_line( + pub fn layout_line( &self, - text: &str, + text: Text, font_size: Pixels, runs: &[TextRun], - ) -> Result> { + ) -> Result> + where + Text: AsRef, + SharedString: From, + { let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); for run in runs.iter() { let font_id = self.resolve_font(&run.font); diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index b8b698a042..7c18684cbc 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -44,6 +44,21 @@ impl ShapedLine { self.layout.len } + /// Override the len, useful if you're rendering text a + /// as text b (e.g. rendering invisibles). + pub fn with_len(mut self, len: usize) -> Self { + let layout = self.layout.as_ref(); + self.layout = Arc::new(LineLayout { + font_size: layout.font_size, + width: layout.width, + ascent: layout.ascent, + descent: layout.descent, + runs: layout.runs.clone(), + len, + }); + self + } + /// Paint the line of text to the window. pub fn paint( &self, diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index 7e5a43dee8..a78c07fa26 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -1,4 +1,4 @@ -use crate::{point, px, FontId, GlyphId, Pixels, PlatformTextSystem, Point, Size}; +use crate::{point, px, FontId, GlyphId, Pixels, PlatformTextSystem, Point, SharedString, Size}; use collections::FxHashMap; use parking_lot::{Mutex, RwLock, RwLockUpgradableReadGuard}; use smallvec::SmallVec; @@ -29,7 +29,7 @@ pub struct LineLayout { } /// A run of text that has been shaped . -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct ShapedRun { /// The font id for this run pub font_id: FontId, @@ -420,15 +420,19 @@ impl LineLayoutCache { curr_frame.used_wrapped_lines.clear(); } - pub fn layout_wrapped_line( + pub fn layout_wrapped_line( &self, - text: &str, + text: Text, font_size: Pixels, runs: &[FontRun], wrap_width: Option, - ) -> Arc { + ) -> Arc + where + Text: AsRef, + SharedString: From, + { let key = &CacheKeyRef { - text, + text: text.as_ref(), font_size, runs, wrap_width, @@ -449,8 +453,8 @@ impl LineLayoutCache { layout } else { drop(current_frame); - - let unwrapped_layout = self.layout_line(text, font_size, runs); + let text = SharedString::from(text); + let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs); let wrap_boundaries = if let Some(wrap_width) = wrap_width { unwrapped_layout.compute_wrap_boundaries(text.as_ref(), wrap_width) } else { @@ -462,7 +466,7 @@ impl LineLayoutCache { wrap_width, }); let key = Arc::new(CacheKey { - text: text.into(), + text, font_size, runs: SmallVec::from(runs), wrap_width, @@ -478,9 +482,18 @@ impl LineLayoutCache { } } - pub fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> Arc { + pub fn layout_line( + &self, + text: Text, + font_size: Pixels, + runs: &[FontRun], + ) -> Arc + where + Text: AsRef, + SharedString: From, + { let key = &CacheKeyRef { - text, + text: text.as_ref(), font_size, runs, wrap_width: None, @@ -497,9 +510,13 @@ impl LineLayoutCache { current_frame.used_lines.push(key); layout } else { - let layout = Arc::new(self.platform_text_system.layout_line(text, font_size, runs)); + let text = SharedString::from(text); + let layout = Arc::new( + self.platform_text_system + .layout_line(&text, font_size, runs), + ); let key = Arc::new(CacheKey { - text: text.into(), + text, font_size, runs: SmallVec::from(runs), wrap_width: None, @@ -524,7 +541,7 @@ trait AsCacheKeyRef { #[derive(Clone, Debug, Eq)] struct CacheKey { - text: String, + text: SharedString, font_size: Pixels, runs: SmallVec<[FontRun; 1]>, wrap_width: Option, diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 3d38ca315c..1b99165eee 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -1,4 +1,4 @@ -use crate::{px, FontId, FontRun, Pixels, PlatformTextSystem, SharedString}; +use crate::{px, FontId, FontRun, Pixels, PlatformTextSystem, SharedString, TextRun}; use collections::HashMap; use std::{iter, sync::Arc}; @@ -104,6 +104,7 @@ impl LineWrapper { line: SharedString, truncate_width: Pixels, ellipsis: Option<&str>, + runs: &mut Vec, ) -> SharedString { let mut width = px(0.); let mut ellipsis_width = px(0.); @@ -124,15 +125,15 @@ impl LineWrapper { width += char_width; if width.floor() > truncate_width { - return SharedString::from(format!( - "{}{}", - &line[..truncate_ix], - ellipsis.unwrap_or("") - )); + let ellipsis = ellipsis.unwrap_or(""); + let result = SharedString::from(format!("{}{}", &line[..truncate_ix], ellipsis)); + update_runs_after_truncation(&result, ellipsis, runs); + + return result; } } - line.clone() + line } pub(crate) fn is_word_char(c: char) -> bool { @@ -195,6 +196,23 @@ impl LineWrapper { } } +fn update_runs_after_truncation(result: &str, ellipsis: &str, runs: &mut Vec) { + let mut truncate_at = result.len() - ellipsis.len(); + let mut run_end = None; + for (run_index, run) in runs.iter_mut().enumerate() { + if run.len <= truncate_at { + truncate_at -= run.len; + } else { + run.len = truncate_at + ellipsis.len(); + run_end = Some(run_index + 1); + break; + } + } + if let Some(run_end) = run_end { + runs.truncate(run_end); + } +} + /// A boundary between two lines of text. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Boundary { @@ -213,7 +231,9 @@ impl Boundary { #[cfg(test)] mod tests { use super::*; - use crate::{font, TestAppContext, TestDispatcher}; + use crate::{ + font, Font, FontFeatures, FontStyle, FontWeight, Hsla, TestAppContext, TestDispatcher, + }; #[cfg(target_os = "macos")] use crate::{TextRun, WindowTextSystem, WrapBoundary}; use rand::prelude::*; @@ -232,6 +252,26 @@ mod tests { LineWrapper::new(id, px(16.), cx.text_system().platform_text_system.clone()) } + fn generate_test_runs(input_run_len: &[usize]) -> Vec { + input_run_len + .iter() + .map(|run_len| TextRun { + len: *run_len, + font: Font { + family: "Dummy".into(), + features: FontFeatures::default(), + fallbacks: None, + weight: FontWeight::default(), + style: FontStyle::Normal, + }, + color: Hsla::default(), + background_color: None, + underline: None, + strikethrough: None, + }) + .collect() + } + #[test] fn test_wrap_line() { let mut wrapper = build_wrapper(); @@ -293,28 +333,135 @@ mod tests { fn test_truncate_line() { let mut wrapper = build_wrapper(); - assert_eq!( - wrapper.truncate_line("aa bbb cccc ddddd eeee ffff gggg".into(), px(220.), None), - "aa bbb cccc ddddd eeee" + fn perform_test( + wrapper: &mut LineWrapper, + text: &'static str, + result: &'static str, + ellipsis: Option<&str>, + ) { + let dummy_run_lens = vec![text.len()]; + let mut dummy_runs = generate_test_runs(&dummy_run_lens); + assert_eq!( + wrapper.truncate_line(text.into(), px(220.), ellipsis, &mut dummy_runs), + result + ); + assert_eq!(dummy_runs.first().unwrap().len, result.len()); + } + + perform_test( + &mut wrapper, + "aa bbb cccc ddddd eeee ffff gggg", + "aa bbb cccc ddddd eeee", + None, ); - assert_eq!( - wrapper.truncate_line( - "aa bbb cccc ddddd eeee ffff gggg".into(), - px(220.), - Some("…") - ), - "aa bbb cccc ddddd eee…" + perform_test( + &mut wrapper, + "aa bbb cccc ddddd eeee ffff gggg", + "aa bbb cccc ddddd eee…", + Some("…"), ); - assert_eq!( - wrapper.truncate_line( - "aa bbb cccc ddddd eeee ffff gggg".into(), - px(220.), - Some("......") - ), - "aa bbb cccc dddd......" + perform_test( + &mut wrapper, + "aa bbb cccc ddddd eeee ffff gggg", + "aa bbb cccc dddd......", + Some("......"), ); } + #[test] + fn test_truncate_multiple_runs() { + let mut wrapper = build_wrapper(); + + fn perform_test( + wrapper: &mut LineWrapper, + text: &'static str, + result: &str, + run_lens: &[usize], + result_run_len: &[usize], + line_width: Pixels, + ) { + let mut dummy_runs = generate_test_runs(run_lens); + assert_eq!( + wrapper.truncate_line(text.into(), line_width, Some("…"), &mut dummy_runs), + result + ); + for (run, result_len) in dummy_runs.iter().zip(result_run_len) { + assert_eq!(run.len, *result_len); + } + } + // Case 0: Normal + // Text: abcdefghijkl + // Runs: Run0 { len: 12, ... } + // + // Truncate res: abcd… (truncate_at = 4) + // Run res: Run0 { string: abcd…, len: 7, ... } + perform_test(&mut wrapper, "abcdefghijkl", "abcd…", &[12], &[7], px(50.)); + // Case 1: Drop some runs + // Text: abcdefghijkl + // Runs: Run0 { len: 4, ... }, Run1 { len: 4, ... }, Run2 { len: 4, ... } + // + // Truncate res: abcdef… (truncate_at = 6) + // Runs res: Run0 { string: abcd, len: 4, ... }, Run1 { string: ef…, len: + // 5, ... } + perform_test( + &mut wrapper, + "abcdefghijkl", + "abcdef…", + &[4, 4, 4], + &[4, 5], + px(70.), + ); + // Case 2: Truncate at start of some run + // Text: abcdefghijkl + // Runs: Run0 { len: 4, ... }, Run1 { len: 4, ... }, Run2 { len: 4, ... } + // + // Truncate res: abcdefgh… (truncate_at = 8) + // Runs res: Run0 { string: abcd, len: 4, ... }, Run1 { string: efgh, len: + // 4, ... }, Run2 { string: …, len: 3, ... } + perform_test( + &mut wrapper, + "abcdefghijkl", + "abcdefgh…", + &[4, 4, 4], + &[4, 4, 3], + px(90.), + ); + } + + #[test] + fn test_update_run_after_truncation() { + fn perform_test(result: &str, run_lens: &[usize], result_run_lens: &[usize]) { + let mut dummy_runs = generate_test_runs(run_lens); + update_runs_after_truncation(result, "…", &mut dummy_runs); + for (run, result_len) in dummy_runs.iter().zip(result_run_lens) { + assert_eq!(run.len, *result_len); + } + } + // Case 0: Normal + // Text: abcdefghijkl + // Runs: Run0 { len: 12, ... } + // + // Truncate res: abcd… (truncate_at = 4) + // Run res: Run0 { string: abcd…, len: 7, ... } + perform_test("abcd…", &[12], &[7]); + // Case 1: Drop some runs + // Text: abcdefghijkl + // Runs: Run0 { len: 4, ... }, Run1 { len: 4, ... }, Run2 { len: 4, ... } + // + // Truncate res: abcdef… (truncate_at = 6) + // Runs res: Run0 { string: abcd, len: 4, ... }, Run1 { string: ef…, len: + // 5, ... } + perform_test("abcdef…", &[4, 4, 4], &[4, 5]); + // Case 2: Truncate at start of some run + // Text: abcdefghijkl + // Runs: Run0 { len: 4, ... }, Run1 { len: 4, ... }, Run2 { len: 4, ... } + // + // Truncate res: abcdefgh… (truncate_at = 8) + // Runs res: Run0 { string: abcd, len: 4, ... }, Run1 { string: efgh, len: + // 4, ... }, Run2 { string: …, len: 3, ... } + perform_test("abcdefgh…", &[4, 4, 4], &[4, 4, 3]); + } + #[test] fn test_is_word_char() { #[track_caller] diff --git a/crates/gpui/src/view.rs b/crates/gpui/src/view.rs index 7f10eb25c3..f28c06044b 100644 --- a/crates/gpui/src/view.rs +++ b/crates/gpui/src/view.rs @@ -7,6 +7,7 @@ use crate::{ }; use anyhow::{Context, Result}; use refineable::Refineable; +use std::mem; use std::{ any::{type_name, TypeId}, fmt, @@ -68,7 +69,7 @@ impl View { pub fn update( &self, cx: &mut C, - f: impl FnOnce(&mut V, &mut ViewContext<'_, V>) -> R, + f: impl FnOnce(&mut V, &mut ViewContext) -> R, ) -> C::Result where C: VisualContext, @@ -182,7 +183,7 @@ impl WeakView { pub fn update( &self, cx: &mut C, - f: impl FnOnce(&mut V, &mut ViewContext<'_, V>) -> R, + f: impl FnOnce(&mut V, &mut ViewContext) -> R, ) -> Result where C: VisualContext, @@ -341,11 +342,13 @@ impl Element for AnyView { } } + let refreshing = mem::replace(&mut cx.window.refreshing, true); let prepaint_start = cx.prepaint_index(); let mut element = (self.render)(self, cx); element.layout_as_root(bounds.size.into(), cx); element.prepaint_at(bounds.origin, cx); let prepaint_end = cx.prepaint_index(); + cx.window.refreshing = refreshing; ( Some(element), @@ -382,7 +385,9 @@ impl Element for AnyView { let paint_start = cx.paint_index(); if let Some(element) = element { + let refreshing = mem::replace(&mut cx.window.refreshing, true); element.paint(cx); + cx.window.refreshing = refreshing; } else { cx.reuse_paint(element_state.paint_range.clone()); } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 0f2be2497a..2a4a664361 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -1,9 +1,9 @@ use crate::{ point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip, - AnyView, AppContext, Arena, Asset, AsyncWindowContext, AvailableSpace, Bounds, BoxShadow, - Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener, + AnyView, AppContext, Arena, Asset, AsyncWindowContext, AvailableSpace, Background, Bounds, + BoxShadow, Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener, DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter, - FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, InputHandler, + FileDropEvent, Flatten, FontId, Global, GlobalElementId, GlyphId, GpuSpecs, Hsla, InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, KeystrokeObserver, LayoutId, LineLayoutIndex, Model, ModelContext, Modifiers, ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent, @@ -531,7 +531,6 @@ pub struct Window { pub(crate) tooltip_bounds: Option, next_frame_callbacks: Rc>>, pub(crate) dirty_views: FxHashSet, - pub(crate) focus_handles: Arc>>, focus_listeners: SubscriberSet<(), AnyWindowFocusListener>, focus_lost_listeners: SubscriberSet<(), AnyObserver>, default_prevented: bool, @@ -809,7 +808,6 @@ impl Window { next_tooltip_id: TooltipId::default(), tooltip_bounds: None, dirty_views: FxHashSet::default(), - focus_handles: Arc::new(RwLock::new(SlotMap::with_key())), focus_listeners: SubscriberSet::new(), focus_lost_listeners: SubscriberSet::new(), default_prevented: true, @@ -931,17 +929,11 @@ impl<'a> WindowContext<'a> { self.window.removed = true; } - /// Obtain a new [`FocusHandle`], which allows you to track and manipulate the keyboard focus - /// for elements rendered within this window. - pub fn focus_handle(&self) -> FocusHandle { - FocusHandle::new(&self.window.focus_handles) - } - /// Obtain the currently focused [`FocusHandle`]. If no elements are focused, returns `None`. pub fn focused(&self) -> Option { self.window .focus - .and_then(|id| FocusHandle::for_id(id, &self.window.focus_handles)) + .and_then(|id| FocusHandle::for_id(id, &self.app.focus_handles)) } /// Move focus to the element associated with the given [`FocusHandle`]. @@ -1007,6 +999,11 @@ impl<'a> WindowContext<'a> { self.window.platform_window.window_bounds() } + /// Return the `WindowBounds` excluding insets (Wayland and X11) + pub fn inner_window_bounds(&self) -> WindowBounds { + self.window.platform_window.inner_window_bounds() + } + /// Dispatch the given action on the currently focused element. pub fn dispatch_action(&mut self, action: Box) { let focus_handle = self.focused(); @@ -1241,7 +1238,11 @@ impl<'a> WindowContext<'a> { /// that currently owns the mouse cursor. /// On mac, this is equivalent to `is_window_active`. pub fn is_window_hovered(&self) -> bool { - if cfg!(any(target_os = "linux", target_os = "freebsd")) { + if cfg!(any( + target_os = "windows", + target_os = "linux", + target_os = "freebsd" + )) { self.window.hovered.get() } else { self.is_window_active() @@ -1585,6 +1586,19 @@ impl<'a> WindowContext<'a> { } } + // Element's parent can get hidden (e.g. via the `visible_on_hover` method), + // and element's `paint` won't be called (ergo, mouse listeners also won't be active) to detect that the tooltip has to be removed. + // Ensure it's not stuck around in such cases. + let invalidate_tooltip = !tooltip_request + .tooltip + .origin_bounds + .contains(&self.mouse_position()) + && (!tooltip_request.tooltip.hoverable + || !tooltip_bounds.contains(&self.mouse_position())); + if invalidate_tooltip { + return None; + } + self.with_absolute_element_offset(tooltip_bounds.origin, |cx| element.prepaint(cx)); self.window.tooltip_bounds = Some(TooltipBounds { @@ -1758,6 +1772,7 @@ impl<'a> WindowContext<'a> { .iter() .map(|(id, type_id)| (GlobalElementId(id.0.clone()), *type_id)), ); + window .text_system .reuse_layouts(range.start.line_layout_index..range.end.line_layout_index); @@ -2271,9 +2286,7 @@ impl<'a> WindowContext<'a> { let content_mask = self.content_mask(); let opacity = self.element_opacity(); for shadow in shadows { - let mut shadow_bounds = bounds; - shadow_bounds.origin += shadow.offset; - shadow_bounds.dilate(shadow.spread_radius); + let shadow_bounds = (bounds + shadow.offset).dilate(shadow.spread_radius); self.window.next_frame.scene.insert_primitive(Shadow { order: 0, blur_radius: shadow.blur_radius.scale(scale_factor), @@ -2315,7 +2328,7 @@ impl<'a> WindowContext<'a> { /// Paint the given `Path` into the scene for the next frame at the current z-index. /// /// This method should only be called as part of the paint phase of element drawing. - pub fn paint_path(&mut self, mut path: Path, color: impl Into) { + pub fn paint_path(&mut self, mut path: Path, color: impl Into) { debug_assert_eq!( self.window.draw_phase, DrawPhase::Paint, @@ -2326,7 +2339,8 @@ impl<'a> WindowContext<'a> { let content_mask = self.content_mask(); let opacity = self.element_opacity(); path.content_mask = content_mask; - path.color = color.into().opacity(opacity); + let color: Background = color.into(); + path.color = color.opacity(opacity); self.window .next_frame .scene @@ -2685,6 +2699,20 @@ impl<'a> WindowContext<'a> { }); } + /// Removes an image from the sprite atlas. + pub fn drop_image(&mut self, data: Arc) -> Result<()> { + for frame_index in 0..data.frame_count() { + let params = RenderImageParams { + image_id: data.id, + frame_index, + }; + + self.window.sprite_atlas.remove(¶ms.clone().into()); + } + + Ok(()) + } + #[must_use] /// Add a node to the layout tree for the current frame. Takes the `Style` of the element for which /// layout is being requested, along with the layout ids of any children. This method is called during @@ -2998,7 +3026,7 @@ impl<'a> WindowContext<'a> { let event = FocusOutEvent { blurred: WeakFocusHandle { id: blurred_id, - handles: Arc::downgrade(&cx.window.focus_handles), + handles: Arc::downgrade(&cx.app.focus_handles), }, }; listener(event, cx) @@ -3038,7 +3066,7 @@ impl<'a> WindowContext<'a> { return true; } - if let Some(input) = keystroke.ime_key { + if let Some(input) = keystroke.key_char { if let Some(mut input_handler) = self.window.platform_window.take_input_handler() { input_handler.dispatch_input(&input, self); self.window.platform_window.set_input_handler(input_handler); @@ -3112,7 +3140,7 @@ impl<'a> WindowContext<'a> { self.window.mouse_position = position; if self.active_drag.is_none() { self.active_drag = Some(AnyDrag { - value: Box::new(paths.clone()), + value: Arc::new(paths.clone()), view: self.new_view(|_| paths).into(), cursor_offset: position, }); @@ -3247,7 +3275,7 @@ impl<'a> WindowContext<'a> { if let Some(key) = key { keystroke = Some(Keystroke { key: key.to_string(), - ime_key: None, + key_char: None, modifiers: Modifiers::default(), }); } @@ -3462,7 +3490,7 @@ impl<'a> WindowContext<'a> { if !self.propagate_event { continue 'replay; } - if let Some(input) = replay.keystroke.ime_key.as_ref().cloned() { + if let Some(input) = replay.keystroke.key_char.as_ref().cloned() { if let Some(mut input_handler) = self.window.platform_window.take_input_handler() { input_handler.dispatch_input(&input, self); self.window.platform_window.set_input_handler(input_handler) @@ -3793,7 +3821,7 @@ impl<'a> WindowContext<'a> { /// Read information about the GPU backing this window. /// Currently returns None on Mac and Windows. - pub fn gpu_specs(&self) -> Option { + pub fn gpu_specs(&self) -> Option { self.window.platform_window.gpu_specs() } } @@ -4416,7 +4444,7 @@ impl<'a, V: 'static> ViewContext<'a, V> { let event = FocusOutEvent { blurred: WeakFocusHandle { id: blurred_id, - handles: Arc::downgrade(&cx.window.focus_handles), + handles: Arc::downgrade(&cx.app.focus_handles), }, }; listener(view, event, cx) @@ -4852,6 +4880,8 @@ pub enum ElementId { FocusHandle(FocusId), /// A combination of a name and an integer. NamedInteger(SharedString, usize), + /// A path + Path(Arc), } impl Display for ElementId { @@ -4863,6 +4893,7 @@ impl Display for ElementId { ElementId::FocusHandle(_) => write!(f, "FocusHandle")?, ElementId::NamedInteger(s, i) => write!(f, "{}-{}", s, i)?, ElementId::Uuid(uuid) => write!(f, "{}", uuid)?, + ElementId::Path(path) => write!(f, "{}", path.display())?, } Ok(()) @@ -4899,6 +4930,12 @@ impl From for ElementId { } } +impl From> for ElementId { + fn from(path: Arc) -> Self { + ElementId::Path(path) + } +} + impl From<&'static str> for ElementId { fn from(name: &'static str) -> Self { ElementId::Name(name.into()) @@ -4956,7 +4993,7 @@ pub struct PaintQuad { /// The radii of the quad's corners. pub corner_radii: Corners, /// The background color of the quad. - pub background: Hsla, + pub background: Background, /// The widths of the quad's borders. pub border_widths: Edges, /// The color of the quad's borders. @@ -4989,7 +5026,7 @@ impl PaintQuad { } /// Sets the background color of the quad. - pub fn background(self, background: impl Into) -> Self { + pub fn background(self, background: impl Into) -> Self { PaintQuad { background: background.into(), ..self @@ -5001,7 +5038,7 @@ impl PaintQuad { pub fn quad( bounds: Bounds, corner_radii: impl Into>, - background: impl Into, + background: impl Into, border_widths: impl Into>, border_color: impl Into, ) -> PaintQuad { @@ -5015,7 +5052,7 @@ pub fn quad( } /// Creates a filled quad with the given bounds and background color. -pub fn fill(bounds: impl Into>, background: impl Into) -> PaintQuad { +pub fn fill(bounds: impl Into>, background: impl Into) -> PaintQuad { PaintQuad { bounds: bounds.into(), corner_radii: (0.).into(), @@ -5030,7 +5067,7 @@ pub fn outline(bounds: impl Into>, border_color: impl Into) PaintQuad { bounds: bounds.into(), corner_radii: (0.).into(), - background: transparent_black(), + background: transparent_black().into(), border_widths: (1.).into(), border_color: border_color.into(), } diff --git a/crates/gpui_macros/src/derive_render.rs b/crates/gpui_macros/src/derive_render.rs index 2b39248f80..e76466fd10 100644 --- a/crates/gpui_macros/src/derive_render.rs +++ b/crates/gpui_macros/src/derive_render.rs @@ -11,7 +11,7 @@ pub fn derive_render(input: TokenStream) -> TokenStream { impl #impl_generics gpui::Render for #type_name #type_generics #where_clause { - fn render(&mut self, _cx: &mut gpui::ViewContext) -> impl gpui::Element { + fn render(&mut self, _cx: &mut ViewContext) -> impl gpui::Element { gpui::Empty } } diff --git a/crates/gpui_macros/src/register_action.rs b/crates/gpui_macros/src/register_action.rs index 7ec1d6dd4b..7fc8158e9b 100644 --- a/crates/gpui_macros/src/register_action.rs +++ b/crates/gpui_macros/src/register_action.rs @@ -32,6 +32,7 @@ pub(crate) fn register_action(type_name: &Ident) -> proc_macro2::TokenStream { fn #action_builder_fn_name() -> gpui::ActionData { gpui::ActionData { name: <#type_name as gpui::Action>::debug_name(), + aliases: <#type_name as gpui::Action>::deprecated_aliases(), type_id: ::std::any::TypeId::of::<#type_name>(), build: <#type_name as gpui::Action>::build, } diff --git a/crates/gpui_macros/src/styles.rs b/crates/gpui_macros/src/styles.rs index f261e3643c..b6f8806c05 100644 --- a/crates/gpui_macros/src/styles.rs +++ b/crates/gpui_macros/src/styles.rs @@ -271,6 +271,20 @@ pub fn cursor_style_methods(input: TokenStream) -> TokenStream { self } + /// Sets cursor style when hovering over an element to `nesw-resize`. + /// [Docs](https://tailwindcss.com/docs/cursor) + #visibility fn cursor_nesw_resize(mut self) -> Self { + self.style().mouse_cursor = Some(gpui::CursorStyle::ResizeUpRightDownLeft); + self + } + + /// Sets cursor style when hovering over an element to `nwse-resize`. + /// [Docs](https://tailwindcss.com/docs/cursor) + #visibility fn cursor_nwse_resize(mut self) -> Self { + self.style().mouse_cursor = Some(gpui::CursorStyle::ResizeUpLeftDownRight); + self + } + /// Sets cursor style when hovering over an element to `col-resize`. /// [Docs](https://tailwindcss.com/docs/cursor) #visibility fn cursor_col_resize(mut self) -> Self { diff --git a/crates/html_to_markdown/src/markdown_writer.rs b/crates/html_to_markdown/src/markdown_writer.rs index 579e576116..a9caf7afa7 100644 --- a/crates/html_to_markdown/src/markdown_writer.rs +++ b/crates/html_to_markdown/src/markdown_writer.rs @@ -1,7 +1,6 @@ -use std::cell::RefCell; use std::collections::VecDeque; use std::rc::Rc; -use std::sync::OnceLock; +use std::{cell::RefCell, sync::LazyLock}; use anyhow::Result; use markup5ever_rcdom::{Handle, NodeData}; @@ -10,13 +9,14 @@ use regex::Regex; use crate::html_element::HtmlElement; fn empty_line_regex() -> &'static Regex { - static REGEX: OnceLock = OnceLock::new(); - REGEX.get_or_init(|| Regex::new(r"^\s*$").unwrap()) + static REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"^\s*$").expect("Failed to create empty_line_regex")); + ®EX } fn more_than_three_newlines_regex() -> &'static Regex { - static REGEX: OnceLock = OnceLock::new(); - REGEX.get_or_init(|| Regex::new(r"\n{3,}").unwrap()) + static REGEX: LazyLock = LazyLock::new(|| Regex::new(r"\n{3,}").unwrap()); + ®EX } pub enum StartTagOutcome { diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index ac8e254b84..a4f10cff18 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -20,7 +20,7 @@ bytes.workspace = true anyhow.workspace = true derive_more.workspace = true futures.workspace = true -http = "1.1" +http.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/image_viewer/Cargo.toml b/crates/image_viewer/Cargo.toml index 9c431e5edc..f408d31177 100644 --- a/crates/image_viewer/Cargo.toml +++ b/crates/image_viewer/Cargo.toml @@ -15,6 +15,7 @@ doctest = false [dependencies] anyhow.workspace = true db.workspace = true +editor.workspace = true file_icons.workspace = true gpui.workspace = true project.workspace = true @@ -23,3 +24,6 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true + +[features] +test-support = ["gpui/test-support"] diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 5e58cc49fb..b78f1bd085 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -1,6 +1,7 @@ use std::path::PathBuf; use anyhow::Context as _; +use editor::items::entry_git_aware_label_color; use gpui::{ canvas, div, fill, img, opaque_grey, point, size, AnyElement, AppContext, Bounds, EventEmitter, FocusHandle, FocusableView, InteractiveElement, IntoElement, Model, ObjectFit, ParentElement, @@ -16,7 +17,7 @@ use settings::Settings; use util::paths::PathExt; use workspace::{ item::{BreadcrumbText, Item, ProjectItem, SerializableItem, TabContentParams}, - ItemId, ItemSettings, Pane, ToolbarItemLocation, Workspace, WorkspaceId, + ItemId, ItemSettings, ToolbarItemLocation, Workspace, WorkspaceId, }; const IMAGE_VIEWER_KIND: &str = "ImageView"; @@ -78,7 +79,7 @@ impl Item for ImageView { fn for_each_project_item( &self, cx: &AppContext, - f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item), + f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), ) { f(self.image_item.entity_id(), self.image_item.read(cx)) } @@ -94,15 +95,35 @@ impl Item for ImageView { } fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { - let path = self.image_item.read(cx).file.path(); - let title = path - .file_name() - .unwrap_or_else(|| path.as_os_str()) + let project_path = self.image_item.read(cx).project_path(cx); + + let label_color = if ItemSettings::get_global(cx).git_status { + let git_status = self + .project + .read(cx) + .project_path_git_status(&project_path, cx); + + self.project + .read(cx) + .entry_for_path(&project_path, cx) + .map(|entry| { + entry_git_aware_label_color(git_status, entry.is_ignored, params.selected) + }) + .unwrap_or_else(|| params.text_color()) + } else { + params.text_color() + }; + + let title = self + .image_item + .read(cx) + .file + .file_name(cx) .to_string_lossy() .to_string(); Label::new(title) .single_line() - .color(params.text_color()) + .color(label_color) .italic(params.preview) .into_any_element() } @@ -116,7 +137,7 @@ impl Item for ImageView { .map(Icon::from_path) } - fn breadcrumb_location(&self) -> ToolbarItemLocation { + fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation { ToolbarItemLocation::PrimaryLeft } @@ -146,7 +167,7 @@ impl Item for ImageView { } fn breadcrumbs_text_for_image(project: &Project, image: &ImageItem, cx: &AppContext) -> String { - let path = image.path(); + let path = image.file.file_name(cx); if project.visible_worktrees(cx).count() <= 1 { return path.to_string_lossy().to_string(); } @@ -172,9 +193,9 @@ impl SerializableItem for ImageView { _workspace: WeakView, workspace_id: WorkspaceId, item_id: ItemId, - cx: &mut ViewContext, + cx: &mut WindowContext, ) -> Task>> { - cx.spawn(|_pane, mut cx| async move { + cx.spawn(|mut cx| async move { let image_path = IMAGE_VIEWER .get_image_path(item_id, workspace_id)? .ok_or_else(|| anyhow::anyhow!("No image path found"))?; @@ -301,7 +322,8 @@ impl Render for ImageView { img(image) .object_fit(ObjectFit::ScaleDown) .max_w_full() - .max_h_full(), + .max_h_full() + .id("img"), ), ) } diff --git a/crates/indexed_docs/src/extension_indexed_docs_provider.rs b/crates/indexed_docs/src/extension_indexed_docs_provider.rs index ed006546fe..25b0f16357 100644 --- a/crates/indexed_docs/src/extension_indexed_docs_provider.rs +++ b/crates/indexed_docs/src/extension_indexed_docs_provider.rs @@ -3,9 +3,33 @@ use std::sync::Arc; use anyhow::Result; use async_trait::async_trait; -use extension::Extension; +use extension::{Extension, ExtensionHostProxy, ExtensionIndexedDocsProviderProxy}; +use gpui::AppContext; -use crate::{IndexedDocsDatabase, IndexedDocsProvider, PackageName, ProviderId}; +use crate::{ + IndexedDocsDatabase, IndexedDocsProvider, IndexedDocsRegistry, PackageName, ProviderId, +}; + +pub fn init(cx: &mut AppContext) { + let proxy = ExtensionHostProxy::default_global(cx); + proxy.register_indexed_docs_provider_proxy(IndexedDocsRegistryProxy { + indexed_docs_registry: IndexedDocsRegistry::global(cx), + }); +} + +struct IndexedDocsRegistryProxy { + indexed_docs_registry: Arc, +} + +impl ExtensionIndexedDocsProviderProxy for IndexedDocsRegistryProxy { + fn register_indexed_docs_provider(&self, extension: Arc, provider_id: Arc) { + self.indexed_docs_registry + .register_provider(Box::new(ExtensionIndexedDocsProvider::new( + extension, + ProviderId(provider_id), + ))); + } +} pub struct ExtensionIndexedDocsProvider { extension: Arc, diff --git a/crates/indexed_docs/src/indexed_docs.rs b/crates/indexed_docs/src/indexed_docs.rs index 95e5c62335..42672cd220 100644 --- a/crates/indexed_docs/src/indexed_docs.rs +++ b/crates/indexed_docs/src/indexed_docs.rs @@ -3,7 +3,14 @@ mod providers; mod registry; mod store; +use gpui::AppContext; + pub use crate::extension_indexed_docs_provider::ExtensionIndexedDocsProvider; pub use crate::providers::rustdoc::*; pub use crate::registry::*; pub use crate::store::*; + +pub fn init(cx: &mut AppContext) { + IndexedDocsRegistry::init_global(cx); + extension_indexed_docs_provider::init(cx); +} diff --git a/crates/indexed_docs/src/registry.rs b/crates/indexed_docs/src/registry.rs index fa3425466c..6332e6c4b0 100644 --- a/crates/indexed_docs/src/registry.rs +++ b/crates/indexed_docs/src/registry.rs @@ -20,7 +20,7 @@ impl IndexedDocsRegistry { GlobalIndexedDocsRegistry::global(cx).0.clone() } - pub fn init_global(cx: &mut AppContext) { + pub(crate) fn init_global(cx: &mut AppContext) { GlobalIndexedDocsRegistry::set_global( cx, GlobalIndexedDocsRegistry(Arc::new(Self::new(cx.background_executor().clone()))), diff --git a/crates/indexed_docs/src/store.rs b/crates/indexed_docs/src/store.rs index 059ee69dcd..fa80bf527f 100644 --- a/crates/indexed_docs/src/store.rs +++ b/crates/indexed_docs/src/store.rs @@ -208,7 +208,7 @@ impl IndexedDocsStore { let candidates = items .iter() .enumerate() - .map(|(ix, item_path)| StringMatchCandidate::new(ix, item_path.clone())) + .map(|(ix, item_path)| StringMatchCandidate::new(ix, &item_path)) .collect::>(); let matches = fuzzy::match_strings( diff --git a/crates/inline_completion/Cargo.toml b/crates/inline_completion/Cargo.toml new file mode 100644 index 0000000000..cdcf71c230 --- /dev/null +++ b/crates/inline_completion/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "inline_completion" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/inline_completion.rs" + +[dependencies] +gpui.workspace = true +language.workspace = true diff --git a/crates/inline_completion/LICENSE-GPL b/crates/inline_completion/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/inline_completion/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/editor/src/inline_completion_provider.rs b/crates/inline_completion/src/inline_completion.rs similarity index 58% rename from crates/editor/src/inline_completion_provider.rs rename to crates/inline_completion/src/inline_completion.rs index 1085a6294e..17b77ca4bf 100644 --- a/crates/editor/src/inline_completion_provider.rs +++ b/crates/inline_completion/src/inline_completion.rs @@ -1,28 +1,34 @@ -use crate::Direction; use gpui::{AppContext, Model, ModelContext}; use language::Buffer; use std::ops::Range; -use text::{Anchor, Rope}; -pub enum InlayProposal { - Hint(Anchor, project::InlayHint), - Suggestion(Anchor, Rope), +// TODO: Find a better home for `Direction`. +// +// This should live in an ancestor crate of `editor` and `inline_completion`, +// but at time of writing there isn't an obvious spot. +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum Direction { + Prev, + Next, } -pub struct CompletionProposal { - pub inlays: Vec, - pub text: Rope, - pub delete_range: Option>, +#[derive(Clone)] +pub struct InlineCompletion { + pub edits: Vec<(Range, String)>, } pub trait InlineCompletionProvider: 'static + Sized { fn name() -> &'static str; + fn display_name() -> &'static str; + fn show_completions_in_menu() -> bool; + fn show_completions_in_normal_mode() -> bool; fn is_enabled( &self, buffer: &Model, cursor_position: language::Anchor, cx: &AppContext, ) -> bool; + fn is_refreshing(&self) -> bool; fn refresh( &mut self, buffer: Model, @@ -38,22 +44,27 @@ pub trait InlineCompletionProvider: 'static + Sized { cx: &mut ModelContext, ); fn accept(&mut self, cx: &mut ModelContext); - fn discard(&mut self, should_report_inline_completion_event: bool, cx: &mut ModelContext); - fn active_completion_text<'a>( - &'a self, + fn discard(&mut self, cx: &mut ModelContext); + fn suggest( + &mut self, buffer: &Model, cursor_position: language::Anchor, - cx: &'a AppContext, - ) -> Option; + cx: &mut ModelContext, + ) -> Option; } pub trait InlineCompletionProviderHandle { + fn name(&self) -> &'static str; + fn display_name(&self) -> &'static str; fn is_enabled( &self, buffer: &Model, cursor_position: language::Anchor, cx: &AppContext, ) -> bool; + fn show_completions_in_menu(&self) -> bool; + fn show_completions_in_normal_mode(&self) -> bool; + fn is_refreshing(&self, cx: &AppContext) -> bool; fn refresh( &self, buffer: Model, @@ -69,19 +80,35 @@ pub trait InlineCompletionProviderHandle { cx: &mut AppContext, ); fn accept(&self, cx: &mut AppContext); - fn discard(&self, should_report_inline_completion_event: bool, cx: &mut AppContext); - fn active_completion_text<'a>( - &'a self, + fn discard(&self, cx: &mut AppContext); + fn suggest( + &self, buffer: &Model, cursor_position: language::Anchor, - cx: &'a AppContext, - ) -> Option; + cx: &mut AppContext, + ) -> Option; } impl InlineCompletionProviderHandle for Model where T: InlineCompletionProvider, { + fn name(&self) -> &'static str { + T::name() + } + + fn display_name(&self) -> &'static str { + T::display_name() + } + + fn show_completions_in_menu(&self) -> bool { + T::show_completions_in_menu() + } + + fn show_completions_in_normal_mode(&self) -> bool { + T::show_completions_in_normal_mode() + } + fn is_enabled( &self, buffer: &Model, @@ -91,6 +118,10 @@ where self.read(cx).is_enabled(buffer, cursor_position, cx) } + fn is_refreshing(&self, cx: &AppContext) -> bool { + self.read(cx).is_refreshing() + } + fn refresh( &self, buffer: Model, @@ -119,19 +150,16 @@ where self.update(cx, |this, cx| this.accept(cx)) } - fn discard(&self, should_report_inline_completion_event: bool, cx: &mut AppContext) { - self.update(cx, |this, cx| { - this.discard(should_report_inline_completion_event, cx) - }) + fn discard(&self, cx: &mut AppContext) { + self.update(cx, |this, cx| this.discard(cx)) } - fn active_completion_text<'a>( - &'a self, + fn suggest( + &self, buffer: &Model, cursor_position: language::Anchor, - cx: &'a AppContext, - ) -> Option { - self.read(cx) - .active_completion_text(buffer, cursor_position, cx) + cx: &mut AppContext, + ) -> Option { + self.update(cx, |this, cx| this.suggest(buffer, cursor_position, cx)) } } diff --git a/crates/inline_completion_button/Cargo.toml b/crates/inline_completion_button/Cargo.toml index 13b2bfa2ea..2416e42a9c 100644 --- a/crates/inline_completion_button/Cargo.toml +++ b/crates/inline_completion_button/Cargo.toml @@ -16,16 +16,18 @@ doctest = false anyhow.workspace = true copilot.workspace = true editor.workspace = true +feature_flags.workspace = true fs.workspace = true gpui.workspace = true +inline_completion.workspace = true language.workspace = true paths.workspace = true settings.workspace = true supermaven.workspace = true ui.workspace = true -util.workspace = true workspace.workspace = true zed_actions.workspace = true +zeta.workspace = true [dev-dependencies] copilot = { workspace = true, features = ["test-support"] } diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index 8f727fd2fe..dc616833c4 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -1,10 +1,12 @@ use anyhow::Result; -use copilot::{Copilot, CopilotCodeVerification, Status}; +use copilot::{Copilot, Status}; use editor::{scroll::Autoscroll, Editor}; +use feature_flags::{FeatureFlagAppExt, ZetaFeatureFlag}; use fs::Fs; use gpui::{ - div, Action, AnchorCorner, AppContext, AsyncWindowContext, Entity, IntoElement, ParentElement, - Render, Subscription, View, ViewContext, WeakView, WindowContext, + actions, div, pulsating_between, Action, Animation, AnimationExt, AppContext, + AsyncWindowContext, Corner, Entity, IntoElement, ParentElement, Render, Subscription, View, + ViewContext, WeakView, WindowContext, }; use language::{ language_settings::{ @@ -13,9 +15,8 @@ use language::{ File, Language, }; use settings::{update_settings_file, Settings, SettingsStore}; -use std::{path::Path, sync::Arc}; +use std::{path::Path, sync::Arc, time::Duration}; use supermaven::{AccountStatus, Supermaven}; -use util::ResultExt; use workspace::{ create_and_open_local_file, item::ItemHandle, @@ -26,11 +27,12 @@ use workspace::{ StatusItemView, Toast, Workspace, }; use zed_actions::OpenBrowser; +use zeta::RateCompletionModal; + +actions!(zeta, [RateCompletions]); const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot"; -struct CopilotStartingToast; - struct CopilotErrorToast; pub struct InlineCompletionButton { @@ -38,7 +40,9 @@ pub struct InlineCompletionButton { editor_enabled: Option, language: Option>, file: Option>, + inline_completion_provider: Option>, fs: Arc, + workspace: WeakView, } enum SupermavenButtonStatus { @@ -121,7 +125,7 @@ impl Render for InlineCompletionButton { _ => this.update(cx, |this, cx| this.build_copilot_start_menu(cx)), }) }) - .anchor(AnchorCorner::BottomRight) + .anchor(Corner::BottomRight) .trigger( IconButton::new("copilot-icon", icon) .tooltip(|cx| Tooltip::text("GitHub Copilot", cx)), @@ -189,19 +193,59 @@ impl Render for InlineCompletionButton { ), _ => None, }) - .anchor(AnchorCorner::BottomRight) + .anchor(Corner::BottomRight) .trigger( IconButton::new("supermaven-icon", icon) .tooltip(move |cx| Tooltip::text(tooltip_text.clone(), cx)), ), ); } + + InlineCompletionProvider::Zeta => { + if !cx.has_flag::() { + return div(); + } + + let this = cx.view().clone(); + let button = IconButton::new("zeta", IconName::ZedPredict) + .tooltip(|cx| Tooltip::text("Zed Predict", cx)); + + let is_refreshing = self + .inline_completion_provider + .as_ref() + .map_or(false, |provider| provider.is_refreshing(cx)); + + let mut popover_menu = PopoverMenu::new("zeta") + .menu(move |cx| { + Some(this.update(cx, |this, cx| this.build_zeta_context_menu(cx))) + }) + .anchor(Corner::BottomRight); + if is_refreshing { + popover_menu = popover_menu.trigger( + button.with_animation( + "pulsating-label", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.2, 1.0)), + |icon_button, delta| icon_button.alpha(delta), + ), + ); + } else { + popover_menu = popover_menu.trigger(button); + } + + div().child(popover_menu.into_any_element()) + } } } } impl InlineCompletionButton { - pub fn new(fs: Arc, cx: &mut ViewContext) -> Self { + pub fn new( + workspace: WeakView, + fs: Arc, + cx: &mut ViewContext, + ) -> Self { if let Some(copilot) = Copilot::global(cx) { cx.observe(&copilot, |_, _, cx| cx.notify()).detach() } @@ -214,6 +258,8 @@ impl InlineCompletionButton { editor_enabled: None, language: None, file: None, + inline_completion_provider: None, + workspace, fs, } } @@ -221,7 +267,7 @@ impl InlineCompletionButton { pub fn build_copilot_start_menu(&mut self, cx: &mut ViewContext) -> View { let fs = self.fs.clone(); ContextMenu::build(cx, |menu, _| { - menu.entry("Sign In", None, initiate_sign_in) + menu.entry("Sign In", None, copilot::initiate_sign_in) .entry("Disable Copilot", None, { let fs = fs.clone(); move |cx| hide_copilot(fs.clone(), cx) @@ -328,6 +374,25 @@ impl InlineCompletionButton { }) } + fn build_zeta_context_menu(&self, cx: &mut ViewContext) -> View { + let workspace = self.workspace.clone(); + ContextMenu::build(cx, |menu, cx| { + self.build_language_settings_menu(menu, cx) + .separator() + .entry( + "Rate Completions", + Some(RateCompletions.boxed_clone()), + move |cx| { + workspace + .update(cx, |workspace, cx| { + RateCompletionModal::toggle(workspace, cx) + }) + .ok(); + }, + ) + }) + } + pub fn update_enabled(&mut self, editor: View, cx: &mut ViewContext) { let editor = editor.read(cx); let snapshot = editor.buffer().read(cx).snapshot(cx); @@ -345,6 +410,7 @@ impl InlineCompletionButton { ), ) }; + self.inline_completion_provider = editor.inline_completion_provider(); self.language = language.cloned(); self.file = file; @@ -484,68 +550,3 @@ fn hide_copilot(fs: Arc, cx: &mut AppContext) { .inline_completion_provider = Some(InlineCompletionProvider::None); }); } - -pub fn initiate_sign_in(cx: &mut WindowContext) { - let Some(copilot) = Copilot::global(cx) else { - return; - }; - let status = copilot.read(cx).status(); - let Some(workspace) = cx.window_handle().downcast::() else { - return; - }; - match status { - Status::Starting { task } => { - let Some(workspace) = cx.window_handle().downcast::() else { - return; - }; - - let Ok(workspace) = workspace.update(cx, |workspace, cx| { - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "Copilot is starting...", - ), - cx, - ); - workspace.weak_handle() - }) else { - return; - }; - - cx.spawn(|mut cx| async move { - task.await; - if let Some(copilot) = cx.update(|cx| Copilot::global(cx)).ok().flatten() { - workspace - .update(&mut cx, |workspace, cx| match copilot.read(cx).status() { - Status::Authorized => workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "Copilot has started!", - ), - cx, - ), - _ => { - workspace.dismiss_toast( - &NotificationId::unique::(), - cx, - ); - copilot - .update(cx, |copilot, cx| copilot.sign_in(cx)) - .detach_and_log_err(cx); - } - }) - .log_err(); - } - }) - .detach(); - } - _ => { - copilot.update(cx, |this, cx| this.sign_in(cx)).detach(); - workspace - .update(cx, |this, cx| { - this.toggle_modal(cx, |cx| CopilotCodeVerification::new(&copilot, cx)); - }) - .ok(); - } - } -} diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 41285d8222..d3cb1cfda2 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -31,9 +31,9 @@ async-watch.workspace = true clock.workspace = true collections.workspace = true ec4rs.workspace = true +fs.workspace = true futures.workspace = true fuzzy.workspace = true -git.workspace = true globset.workspace = true gpui.workspace = true http_client.workspace = true diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 77fc53705f..c5033d2251 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -14,13 +14,15 @@ use crate::{ SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint, }, task_context::RunnableRange, - LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, + LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject, + TreeSitterOptions, }; use anyhow::{anyhow, Context, Result}; use async_watch as watch; use clock::Lamport; pub use clock::ReplicaId; use collections::HashMap; +use fs::MTime; use futures::channel::oneshot; use gpui::{ AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext, @@ -51,7 +53,7 @@ use std::{ path::{Path, PathBuf}, str, sync::{Arc, LazyLock}, - time::{Duration, Instant, SystemTime}, + time::{Duration, Instant}, vec, }; use sum_tree::TreeMap; @@ -66,7 +68,7 @@ pub use text::{ use theme::SyntaxTheme; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; -use util::{debug_panic, RangeExt}; +use util::{debug_panic, maybe, RangeExt}; #[cfg(any(test, feature = "test-support"))] pub use {tree_sitter_rust, tree_sitter_typescript}; @@ -88,26 +90,16 @@ pub enum Capability { pub type BufferRow = u32; -#[derive(Clone)] -enum BufferDiffBase { - Git(Rope), - PastBufferVersion { - buffer: Model, - rope: Rope, - merged_operations: Vec, - }, -} - /// An in-memory representation of a source code file, including its text, /// syntax trees, git status, and diagnostics. pub struct Buffer { text: TextBuffer, - diff_base: Option, - git_diff: git::diff::BufferDiff, + branch_state: Option, + /// Filesystem state, `None` when there is no path. file: Option>, /// The mtime of the file when this buffer was last loaded from /// or saved to disk. - saved_mtime: Option, + saved_mtime: Option, /// The version vector when this buffer was last loaded from /// or saved to disk. saved_version: clock::Global, @@ -132,7 +124,6 @@ pub struct Buffer { deferred_ops: OperationQueue, capability: Capability, has_conflict: bool, - diff_base_version: usize, /// Memoize calls to has_changes_since(saved_version). /// The contents of a cell are (self.version, has_changes) at the time of a last call. has_unsaved_edits: Cell<(clock::Global, bool)>, @@ -145,11 +136,15 @@ pub enum ParseStatus { Parsing, } +struct BufferBranchState { + base_buffer: Model, + merged_operations: Vec, +} + /// An immutable, cheaply cloneable representation of a fixed /// state of a buffer. pub struct BufferSnapshot { text: text::BufferSnapshot, - git_diff: git::diff::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>, @@ -342,10 +337,6 @@ pub enum BufferEvent { Reloaded, /// The buffer is in need of a reload ReloadNeeded, - /// The buffer's diff_base changed. - DiffBaseChanged, - /// Buffer's excerpts for a certain diff base were recalculated. - DiffUpdated, /// The buffer's language was changed. LanguageChanged, /// The buffer's syntax trees were updated. @@ -371,8 +362,9 @@ pub trait File: Send + Sync { self.as_local().is_some() } - /// Returns the file's mtime. - fn mtime(&self) -> Option; + /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata + /// only available in some states, such as modification time. + fn disk_state(&self) -> DiskState; /// Returns the path of this file relative to the worktree's root directory. fn path(&self) -> &Arc; @@ -390,14 +382,6 @@ pub trait File: Send + Sync { /// This is needed for looking up project-specific settings. fn worktree_id(&self, cx: &AppContext) -> WorktreeId; - /// Returns whether the file has been deleted. - fn is_deleted(&self) -> bool; - - /// Returns whether the file existed on disk at one point - fn is_created(&self) -> bool { - self.mtime().is_some() - } - /// Converts this file into an [`Any`] trait object. fn as_any(&self) -> &dyn Any; @@ -408,6 +392,31 @@ pub trait File: Send + Sync { fn is_private(&self) -> bool; } +/// The file's storage status - whether it's stored (`Present`), and if so when it was last +/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the +/// UI these two states are distinguished. For example, the buffer tab does not display a deletion +/// indicator for new files. +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum DiskState { + /// File created in Zed that has not been saved. + New, + /// File present on the filesystem. + Present { mtime: MTime }, + /// Deleted file that was previously present. + Deleted, +} + +impl DiskState { + /// Returns the file's last known modification time on disk. + pub fn mtime(self) -> Option { + match self { + DiskState::New => None, + DiskState::Present { mtime } => Some(mtime), + DiskState::Deleted => None, + } + } +} + /// The file associated with a buffer, in the case where the file is on the local disk. pub trait LocalFile: File { /// Returns the absolute path of this file @@ -418,11 +427,6 @@ pub trait LocalFile: File { /// Loads the file's contents from disk. fn load_bytes(&self, cx: &AppContext) -> Task>>; - - /// Returns true if the file should not be shared with collaborators. - fn is_private(&self, _: &AppContext) -> bool { - false - } } /// The auto-indent behavior associated with an editing operation. @@ -447,6 +451,7 @@ struct AutoindentRequest { before_edit: BufferSnapshot, entries: Vec, is_block_mode: bool, + ignore_empty_lines: bool, } #[derive(Debug, Clone)] @@ -553,7 +558,7 @@ impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> { pub struct Diff { pub(crate) base_version: clock::Global, line_ending: LineEnding, - edits: Vec<(Range, Arc)>, + pub edits: Vec<(Range, Arc)>, } #[derive(Clone, Copy)] @@ -604,7 +609,6 @@ impl Buffer { Self::build( TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()), None, - None, Capability::ReadWrite, ) } @@ -623,7 +627,6 @@ impl Buffer { base_text_normalized, ), None, - None, Capability::ReadWrite, ) } @@ -638,7 +641,6 @@ impl Buffer { Self::build( TextBuffer::new(replica_id, remote_id, base_text.into()), None, - None, capability, ) } @@ -654,7 +656,7 @@ impl Buffer { let buffer_id = BufferId::new(message.id) .with_context(|| anyhow!("Could not deserialize buffer_id"))?; let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text); - let mut this = Self::build(buffer, message.diff_base, file, capability); + let mut this = Self::build(buffer, file, capability); this.text.set_line_ending(proto::deserialize_line_ending( rpc::proto::LineEnding::from_i32(message.line_ending) .ok_or_else(|| anyhow!("missing line_ending"))?, @@ -670,7 +672,6 @@ impl Buffer { id: self.remote_id().into(), file: self.file.as_ref().map(|f| f.to_proto(cx)), base_text: self.base_text().to_string(), - diff_base: self.diff_base().as_ref().map(|h| h.to_string()), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, saved_version: proto::serialize_version(&self.saved_version), saved_mtime: self.saved_mtime.map(|time| time.into()), @@ -744,15 +745,9 @@ impl Buffer { } /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`]. - pub fn build( - buffer: TextBuffer, - diff_base: Option, - file: Option>, - capability: Capability, - ) -> Self { - let saved_mtime = file.as_ref().and_then(|file| file.mtime()); + pub fn build(buffer: TextBuffer, file: Option>, capability: Capability) -> Self { + let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime()); let snapshot = buffer.snapshot(); - let git_diff = git::diff::BufferDiff::new(&snapshot); let syntax_map = Mutex::new(SyntaxMap::new(&snapshot)); Self { saved_mtime, @@ -763,12 +758,7 @@ impl Buffer { was_dirty_before_starting_transaction: None, has_unsaved_edits: Cell::new((buffer.version(), false)), text: buffer, - diff_base: diff_base.map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - BufferDiffBase::Git(Rope::from(raw_diff_base)) - }), - diff_base_version: 0, - git_diff, + branch_state: None, file, capability, syntax_map, @@ -802,7 +792,6 @@ impl Buffer { BufferSnapshot { text, syntax, - git_diff: self.git_diff.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -815,21 +804,15 @@ impl Buffer { let this = cx.handle(); cx.new_model(|cx| { let mut branch = Self { - diff_base: Some(BufferDiffBase::PastBufferVersion { - buffer: this.clone(), - rope: self.as_rope().clone(), + branch_state: Some(BufferBranchState { + base_buffer: this.clone(), merged_operations: Default::default(), }), language: self.language.clone(), has_conflict: self.has_conflict, has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()), _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)], - ..Self::build( - self.text.branch(), - None, - self.file.clone(), - self.capability(), - ) + ..Self::build(self.text.branch(), self.file.clone(), self.capability()) }; if let Some(language_registry) = self.language_registry() { branch.set_language_registry(language_registry); @@ -848,7 +831,7 @@ impl Buffer { /// If `ranges` is empty, then all changes will be applied. This buffer must /// be a branch buffer to call this method. pub fn merge_into_base(&mut self, ranges: Vec>, cx: &mut ModelContext) { - let Some(base_buffer) = self.diff_base_buffer() else { + let Some(base_buffer) = self.base_buffer() else { debug_panic!("not a branch buffer"); return; }; @@ -884,14 +867,14 @@ impl Buffer { } let operation = base_buffer.update(cx, |base_buffer, cx| { - cx.emit(BufferEvent::DiffBaseChanged); + // cx.emit(BufferEvent::DiffBaseChanged); base_buffer.edit(edits, None, cx) }); if let Some(operation) = operation { - if let Some(BufferDiffBase::PastBufferVersion { + if let Some(BufferBranchState { merged_operations, .. - }) = &mut self.diff_base + }) = &mut self.branch_state { merged_operations.push(operation); } @@ -907,9 +890,9 @@ impl Buffer { let BufferEvent::Operation { operation, .. } = event else { return; }; - let Some(BufferDiffBase::PastBufferVersion { + let Some(BufferBranchState { merged_operations, .. - }) = &mut self.diff_base + }) = &mut self.branch_state else { return; }; @@ -928,8 +911,6 @@ impl Buffer { let counts = [(timestamp, u32::MAX)].into_iter().collect(); self.undo_operations(counts, cx); } - - self.diff_base_version += 1; } #[cfg(test)] @@ -954,7 +935,7 @@ impl Buffer { } /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk. - pub fn saved_mtime(&self) -> Option { + pub fn saved_mtime(&self) -> Option { self.saved_mtime } @@ -989,7 +970,7 @@ impl Buffer { pub fn did_save( &mut self, version: clock::Global, - mtime: Option, + mtime: Option, cx: &mut ModelContext, ) { self.saved_version = version; @@ -1014,7 +995,7 @@ impl Buffer { self.reload_task = Some(cx.spawn(|this, mut cx| async move { let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| { let file = this.file.as_ref()?.as_local()?; - Some((file.mtime(), file.load(cx))) + Some((file.disk_state().mtime(), file.load(cx))) })? else { return Ok(()); @@ -1055,7 +1036,7 @@ impl Buffer { &mut self, version: clock::Global, line_ending: LineEnding, - mtime: Option, + mtime: Option, cx: &mut ModelContext, ) { self.saved_version = version; @@ -1070,6 +1051,7 @@ impl Buffer { /// Updates the [`File`] backing this buffer. This should be called when /// the file has changed or has been deleted. pub fn file_updated(&mut self, new_file: Arc, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); let mut file_changed = false; if let Some(old_file) = self.file.as_ref() { @@ -1077,21 +1059,12 @@ impl Buffer { file_changed = true; } - if new_file.is_deleted() { - if !old_file.is_deleted() { - file_changed = true; - if !self.is_dirty() { - cx.emit(BufferEvent::DirtyChanged); - } - } - } else { - let new_mtime = new_file.mtime(); - if new_mtime != old_file.mtime() { - file_changed = true; - - if !self.is_dirty() { - cx.emit(BufferEvent::ReloadNeeded); - } + let old_state = old_file.disk_state(); + let new_state = new_file.disk_state(); + if old_state != new_state { + file_changed = true; + if !was_dirty && matches!(new_state, DiskState::Present { .. }) { + cx.emit(BufferEvent::ReloadNeeded) } } } else { @@ -1101,79 +1074,16 @@ impl Buffer { self.file = Some(new_file); if file_changed { self.non_text_state_update_count += 1; + if was_dirty != self.is_dirty() { + cx.emit(BufferEvent::DirtyChanged); + } cx.emit(BufferEvent::FileHandleChanged); cx.notify(); } } - /// Returns the current diff base, see [`Buffer::set_diff_base`]. - pub fn diff_base(&self) -> Option<&Rope> { - match self.diff_base.as_ref()? { - BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => { - Some(rope) - } - } - } - - /// Sets the text that will be used to compute a Git diff - /// against the buffer text. - pub fn set_diff_base(&mut self, diff_base: Option, cx: &ModelContext) { - self.diff_base = diff_base.map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - BufferDiffBase::Git(Rope::from(raw_diff_base)) - }); - self.diff_base_version += 1; - if let Some(recalc_task) = self.recalculate_diff(cx) { - cx.spawn(|buffer, mut cx| async move { - recalc_task.await; - buffer - .update(&mut cx, |_, cx| { - cx.emit(BufferEvent::DiffBaseChanged); - }) - .ok(); - }) - .detach(); - } - } - - /// Returns a number, unique per diff base set to the buffer. - pub fn diff_base_version(&self) -> usize { - self.diff_base_version - } - - pub fn diff_base_buffer(&self) -> Option> { - match self.diff_base.as_ref()? { - BufferDiffBase::Git(_) => None, - BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()), - } - } - - /// Recomputes the diff. - pub fn recalculate_diff(&self, cx: &ModelContext) -> Option> { - let diff_base_rope = match self.diff_base.as_ref()? { - BufferDiffBase::Git(rope) => rope.clone(), - BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(), - }; - - let snapshot = self.snapshot(); - let mut diff = self.git_diff.clone(); - let diff = cx.background_executor().spawn(async move { - diff.update(&diff_base_rope, &snapshot).await; - (diff, diff_base_rope) - }); - - Some(cx.spawn(|this, mut cx| async move { - let (buffer_diff, diff_base_rope) = diff.await; - this.update(&mut cx, |this, cx| { - this.git_diff = buffer_diff; - this.non_text_state_update_count += 1; - if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base { - *rope = diff_base_rope; - } - cx.emit(BufferEvent::DiffUpdated); - }) - .ok(); - })) + pub fn base_buffer(&self) -> Option> { + Some(self.branch_state.as_ref()?.base_buffer.clone()) } /// Returns the primary [`Language`] assigned to this [`Buffer`]. @@ -1366,7 +1276,7 @@ impl Buffer { let autoindent_requests = self.autoindent_requests.clone(); Some(async move { - let mut indent_sizes = BTreeMap::new(); + let mut indent_sizes = BTreeMap::::new(); for request in autoindent_requests { // Resolve each edited range to its row in the current buffer and in the // buffer before this batch of edits. @@ -1460,10 +1370,12 @@ impl Buffer { let suggested_indent = indent_sizes .get(&suggestion.basis_row) .copied() + .map(|e| e.0) .unwrap_or_else(|| { snapshot.indent_size_for_line(suggestion.basis_row) }) .with_delta(suggestion.delta, language_indent_size); + if old_suggestions.get(&new_row).map_or( true, |(old_indentation, was_within_error)| { @@ -1471,7 +1383,10 @@ impl Buffer { && (!suggestion.within_error || *was_within_error) }, ) { - indent_sizes.insert(new_row, suggested_indent); + indent_sizes.insert( + new_row, + (suggested_indent, request.ignore_empty_lines), + ); } } } @@ -1479,10 +1394,12 @@ impl Buffer { if let (true, Some(original_indent_column)) = (request.is_block_mode, original_indent_column) { - let new_indent = indent_sizes - .get(&row_range.start) - .copied() - .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start)); + let new_indent = + if let Some((indent, _)) = indent_sizes.get(&row_range.start) { + *indent + } else { + snapshot.indent_size_for_line(row_range.start) + }; let delta = new_indent.len as i64 - original_indent_column as i64; if delta != 0 { for row in row_range.skip(1) { @@ -1497,7 +1414,7 @@ impl Buffer { Ordering::Equal => {} } } - size + (size, request.ignore_empty_lines) }); } } @@ -1508,6 +1425,15 @@ impl Buffer { } indent_sizes + .into_iter() + .filter_map(|(row, (indent, ignore_empty_lines))| { + if ignore_empty_lines && snapshot.line_len(row) == 0 { + None + } else { + Some((row, indent)) + } + }) + .collect() }) } @@ -1742,15 +1668,10 @@ impl Buffer { pub fn is_dirty(&self) -> bool { self.capability != Capability::ReadOnly && (self.has_conflict - || self.has_unsaved_edits() - || self - .file - .as_ref() - .map_or(false, |file| file.is_deleted() || !file.is_created())) - } - - pub fn is_deleted(&self) -> bool { - self.file.as_ref().map_or(false, |file| file.is_deleted()) + || self.file.as_ref().map_or(false, |file| { + matches!(file.disk_state(), DiskState::New | DiskState::Deleted) + }) + || self.has_unsaved_edits()) } /// Checks if the buffer and its file have both changed since the buffer @@ -1762,7 +1683,16 @@ impl Buffer { let Some(file) = self.file.as_ref() else { return false; }; - file.is_deleted() || (file.mtime() > self.saved_mtime && self.has_unsaved_edits()) + match file.disk_state() { + DiskState::New => false, + DiskState::Present { mtime } => match self.saved_mtime { + Some(saved_mtime) => { + mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits() + } + None => true, + }, + DiskState::Deleted => true, + } } /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text. @@ -2048,6 +1978,7 @@ impl Buffer { before_edit, entries, is_block_mode: matches!(mode, AutoindentMode::Block { .. }), + ignore_empty_lines: false, })); } @@ -2075,6 +2006,30 @@ impl Buffer { cx.notify(); } + pub fn autoindent_ranges(&mut self, ranges: I, cx: &mut ModelContext) + where + I: IntoIterator>, + T: ToOffset + Copy, + { + let before_edit = self.snapshot(); + let entries = ranges + .into_iter() + .map(|range| AutoindentRequestEntry { + range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end), + first_line_is_new: true, + indent_size: before_edit.language_indent_size_at(range.start, cx), + original_indent_column: None, + }) + .collect(); + self.autoindent_requests.push(Arc::new(AutoindentRequest { + before_edit, + entries, + is_block_mode: false, + ignore_empty_lines: true, + })); + self.request_autoindent(cx); + } + // Inserts newlines at the given position to create an empty line, returning the start of the new line. // You can also request the insertion of empty lines above and below the line starting at the returned point. pub fn insert_empty_line( @@ -2968,10 +2923,13 @@ impl BufferSnapshot { (start..end, word_kind) } - /// Returns the range for the closes syntax node enclosing the given range. - pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + /// Returns the closest syntax node enclosing the given range. + pub fn syntax_ancestor<'a, T: ToOffset>( + &'a self, + range: Range, + ) -> Option> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut result: Option> = None; + let mut result: Option> = None; 'outer: for layer in self .syntax .layers_for_range(range.clone(), &self.text, true) @@ -3001,7 +2959,7 @@ impl BufferSnapshot { } let left_node = cursor.node(); - let mut layer_result = left_node.byte_range(); + let mut layer_result = left_node; // For an empty range, try to find another node immediately to the right of the range. if left_node.end_byte() == range.start { @@ -3024,13 +2982,13 @@ impl BufferSnapshot { // If both nodes are the same in that regard, favor the right one. if let Some(right_node) = right_node { if right_node.is_named() || !left_node.is_named() { - layer_result = right_node.byte_range(); + layer_result = right_node; } } } if let Some(previous_result) = &result { - if previous_result.len() < layer_result.len() { + if previous_result.byte_range().len() < layer_result.byte_range().len() { continue; } } @@ -3073,6 +3031,48 @@ impl BufferSnapshot { Some(items) } + pub fn outline_range_containing(&self, range: Range) -> Option> { + let range = range.to_offset(self); + let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| { + grammar.outline_config.as_ref().map(|c| &c.query) + }); + let configs = matches + .grammars() + .iter() + .map(|g| g.outline_config.as_ref().unwrap()) + .collect::>(); + + while let Some(mat) = matches.peek() { + let config = &configs[mat.grammar_index]; + let containing_item_node = maybe!({ + let item_node = mat.captures.iter().find_map(|cap| { + if cap.index == config.item_capture_ix { + Some(cap.node) + } else { + None + } + })?; + + let item_byte_range = item_node.byte_range(); + if item_byte_range.end < range.start || item_byte_range.start > range.end { + None + } else { + Some(item_node) + } + }); + + if let Some(item_node) = containing_item_node { + return Some( + Point::from_ts_point(item_node.start_position()) + ..Point::from_ts_point(item_node.end_position()), + ); + } + + matches.advance(); + } + None + } + pub fn outline_items_containing( &self, range: Range, @@ -3293,6 +3293,14 @@ impl BufferSnapshot { }) } + pub fn function_body_fold_ranges( + &self, + within: Range, + ) -> impl Iterator> + '_ { + self.text_object_ranges(within, TreeSitterOptions::default()) + .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range)) + } + /// For each grammar in the language, runs the provided /// [`tree_sitter::Query`] against the given range. pub fn matches( @@ -3351,6 +3359,72 @@ impl BufferSnapshot { }) } + pub fn text_object_ranges( + &self, + range: Range, + options: TreeSitterOptions, + ) -> impl Iterator, TextObject)> + '_ { + let range = range.start.to_offset(self).saturating_sub(1) + ..self.len().min(range.end.to_offset(self) + 1); + + let mut matches = + self.syntax + .matches_with_options(range.clone(), &self.text, options, |grammar| { + grammar.text_object_config.as_ref().map(|c| &c.query) + }); + + let configs = matches + .grammars() + .iter() + .map(|grammar| grammar.text_object_config.as_ref()) + .collect::>(); + + let mut captures = Vec::<(Range, TextObject)>::new(); + + iter::from_fn(move || loop { + while let Some(capture) = captures.pop() { + if capture.0.overlaps(&range) { + return Some(capture); + } + } + + let mat = matches.peek()?; + + let Some(config) = configs[mat.grammar_index].as_ref() else { + matches.advance(); + continue; + }; + + for capture in mat.captures { + let Some(ix) = config + .text_objects_by_capture_ix + .binary_search_by_key(&capture.index, |e| e.0) + .ok() + else { + continue; + }; + let text_object = config.text_objects_by_capture_ix[ix].1; + let byte_range = capture.node.byte_range(); + + let mut found = false; + for (range, existing) in captures.iter_mut() { + if existing == &text_object { + range.start = range.start.min(byte_range.start); + range.end = range.end.max(byte_range.end); + found = true; + break; + } + } + + if !found { + captures.push((byte_range, text_object)); + } + } + + matches.advance(); + }) + } + /// Returns enclosing bracket ranges containing the given range pub fn enclosing_bracket_ranges( &self, @@ -3856,38 +3930,6 @@ impl BufferSnapshot { }) } - /// Whether the buffer contains any Git changes. - pub fn has_git_diff(&self) -> bool { - !self.git_diff.is_empty() - } - - /// Returns all the Git diff hunks intersecting the given row range. - #[cfg(any(test, feature = "test-support"))] - pub fn git_diff_hunks_in_row_range( - &self, - range: Range, - ) -> impl '_ + Iterator { - self.git_diff.hunks_in_row_range(range, self) - } - - /// Returns all the Git diff hunks intersecting the given - /// range. - pub fn git_diff_hunks_intersecting_range( - &self, - range: Range, - ) -> impl '_ + Iterator { - self.git_diff.hunks_intersecting_range(range, self) - } - - /// Returns all the Git diff hunks intersecting the given - /// range, in reverse order. - pub fn git_diff_hunks_intersecting_range_rev( - &self, - range: Range, - ) -> impl '_ + Iterator { - self.git_diff.hunks_intersecting_range_rev(range, self) - } - /// Returns if the buffer contains any diagnostics. pub fn has_diagnostics(&self) -> bool { !self.diagnostics.is_empty() @@ -3901,14 +3943,14 @@ impl BufferSnapshot { ) -> impl 'a + Iterator> where T: 'a + Clone + ToOffset, - O: 'a + FromAnchor + Ord, + O: 'a + FromAnchor, { let mut iterators: Vec<_> = self .diagnostics .iter() .map(|(_, collection)| { collection - .range::(search_range.clone(), self, true, reversed) + .range::(search_range.clone(), self, true, reversed) .peekable() }) .collect(); @@ -3922,7 +3964,7 @@ impl BufferSnapshot { let cmp = a .range .start - .cmp(&b.range.start) + .cmp(&b.range.start, self) // when range is equal, sort by diagnostic severity .then(a.diagnostic.severity.cmp(&b.diagnostic.severity)) // and stabilize order with group_id @@ -3933,7 +3975,13 @@ impl BufferSnapshot { cmp } })?; - iterators[next_ix].next() + iterators[next_ix] + .next() + .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry { + diagnostic, + range: FromAnchor::from_anchor(&range.start, self) + ..FromAnchor::from_anchor(&range.end, self), + }) }) } @@ -3971,12 +4019,12 @@ impl BufferSnapshot { } /// Returns an iterator over the diagnostics for the given group. - pub fn diagnostic_group<'a, O>( - &'a self, + pub fn diagnostic_group( + &self, group_id: usize, - ) -> impl 'a + Iterator> + ) -> impl Iterator> + '_ where - O: 'a + FromAnchor, + O: FromAnchor + 'static, { self.diagnostics .iter() @@ -4032,7 +4080,6 @@ impl Clone for BufferSnapshot { fn clone(&self) -> Self { Self { text: self.text.clone(), - git_diff: self.git_diff.clone(), syntax: self.syntax.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), @@ -4403,7 +4450,7 @@ impl File for TestFile { None } - fn mtime(&self) -> Option { + fn disk_state(&self) -> DiskState { unimplemented!() } @@ -4415,10 +4462,6 @@ impl File for TestFile { WorktreeId::from_usize(0) } - fn is_deleted(&self) -> bool { - unimplemented!() - } - fn as_any(&self) -> &dyn std::any::Any { unimplemented!() } @@ -4500,7 +4543,7 @@ impl CharClassifier { self.kind(c) == CharKind::Punctuation } - pub fn kind(&self, c: char) -> CharKind { + pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind { if c.is_whitespace() { return CharKind::Whitespace; } else if c.is_alphanumeric() || c == '_' { @@ -4510,7 +4553,7 @@ impl CharClassifier { if let Some(scope) = &self.scope { if let Some(characters) = scope.word_characters() { if characters.contains(&c) { - if c == '-' && !self.for_completion && !self.ignore_punctuation { + if c == '-' && !self.for_completion && !ignore_punctuation { return CharKind::Punctuation; } return CharKind::Word; @@ -4518,12 +4561,16 @@ impl CharClassifier { } } - if self.ignore_punctuation { + if ignore_punctuation { CharKind::Word } else { CharKind::Punctuation } } + + pub fn kind(&self, c: char) -> CharKind { + self.kind_with(c, self.ignore_punctuation) + } } /// Find all of the ranges of whitespace that occur at the ends of lines diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index a33a21cb0f..a90651d0d7 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -6,7 +6,6 @@ use crate::Buffer; use clock::ReplicaId; use collections::BTreeMap; use futures::FutureExt as _; -use git::diff::assert_hunks; use gpui::{AppContext, BorrowAppContext, Model}; use gpui::{Context, TestAppContext}; use indoc::indoc; @@ -20,6 +19,7 @@ use std::{ sync::LazyLock, time::{Duration, Instant}, }; +use syntax_map::TreeSitterOptions; use text::network::Network; use text::{BufferId, LineEnding, LineIndent}; use text::{Point, ToPoint}; @@ -915,6 +915,39 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) { } } +#[gpui::test] +fn test_text_objects(cx: &mut AppContext) { + let (text, ranges) = marked_text_ranges( + indoc! {r#" + impl Hello { + fn say() -> u8 { return /* ˇhi */ 1 } + }"# + }, + false, + ); + + let buffer = + cx.new_model(|cx| Buffer::local(text.clone(), cx).with_language(Arc::new(rust_lang()), cx)); + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); + + let matches = snapshot + .text_object_ranges(ranges[0].clone(), TreeSitterOptions::default()) + .map(|(range, text_object)| (&text[range], text_object)) + .collect::>(); + + assert_eq!( + matches, + &[ + ("/* hi */", TextObject::AroundComment), + ("return /* hi */ 1", TextObject::InsideFunction), + ( + "fn say() -> u8 { return /* hi */ 1 }", + TextObject::AroundFunction + ), + ], + ) +} + #[gpui::test] fn test_enclosing_bracket_ranges(cx: &mut AppContext) { let mut assert = |selection_text, range_markers| { @@ -1071,20 +1104,32 @@ fn test_range_for_syntax_ancestor(cx: &mut AppContext) { let snapshot = buffer.snapshot(); assert_eq!( - snapshot.range_for_syntax_ancestor(empty_range_at(text, "|")), - Some(range_of(text, "|")) + snapshot + .syntax_ancestor(empty_range_at(text, "|")) + .unwrap() + .byte_range(), + range_of(text, "|") ); assert_eq!( - snapshot.range_for_syntax_ancestor(range_of(text, "|")), - Some(range_of(text, "|c|")) + snapshot + .syntax_ancestor(range_of(text, "|")) + .unwrap() + .byte_range(), + range_of(text, "|c|") ); assert_eq!( - snapshot.range_for_syntax_ancestor(range_of(text, "|c|")), - Some(range_of(text, "|c| {}")) + snapshot + .syntax_ancestor(range_of(text, "|c|")) + .unwrap() + .byte_range(), + range_of(text, "|c| {}") ); assert_eq!( - snapshot.range_for_syntax_ancestor(range_of(text, "|c| {}")), - Some(range_of(text, "(|c| {})")) + snapshot + .syntax_ancestor(range_of(text, "|c| {}")) + .unwrap() + .byte_range(), + range_of(text, "(|c| {})") ); buffer @@ -2574,15 +2619,6 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { ); }); - // The branch buffer maintains a diff with respect to its base buffer. - start_recalculating_diff(&branch, cx); - cx.run_until_parked(); - assert_diff_hunks( - &branch, - cx, - &[(1..2, "", "1.5\n"), (3..4, "three\n", "THREE\n")], - ); - // Edits to the base are applied to the branch. base.update(cx, |buffer, cx| { buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx) @@ -2592,21 +2628,6 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { assert_eq!(buffer.text(), "ZERO\none\n1.5\ntwo\nTHREE\n"); }); - // Until the git diff recalculation is complete, the git diff references - // the previous content of the base buffer, so that it stays in sync. - start_recalculating_diff(&branch, cx); - assert_diff_hunks( - &branch, - cx, - &[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")], - ); - cx.run_until_parked(); - assert_diff_hunks( - &branch, - cx, - &[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")], - ); - // Edits to any replica of the base are applied to the branch. base_replica.update(cx, |buffer, cx| { buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "2.5\n")], None, cx) @@ -2697,29 +2718,6 @@ fn test_undo_after_merge_into_base(cx: &mut TestAppContext) { branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk")); } -fn start_recalculating_diff(buffer: &Model, cx: &mut TestAppContext) { - buffer - .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) - .detach(); -} - -#[track_caller] -fn assert_diff_hunks( - buffer: &Model, - cx: &mut TestAppContext, - expected_hunks: &[(Range, &str, &str)], -) { - let (snapshot, diff_base) = buffer.read_with(cx, |buffer, _| { - (buffer.snapshot(), buffer.diff_base().unwrap().to_string()) - }); - assert_hunks( - snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), - &snapshot, - &diff_base, - expected_hunks, - ); -} - #[gpui::test(iterations = 100)] fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { let min_peers = env::var("MIN_PEERS") @@ -3117,8 +3115,8 @@ fn html_lang() -> Language { .with_injection_query( r#" (script_element - (raw_text) @content - (#set! "language" "javascript")) + (raw_text) @injection.content + (#set! injection.language "javascript")) "#, ) .unwrap() @@ -3140,15 +3138,15 @@ fn erb_lang() -> Language { .with_injection_query( r#" ( - (code) @content - (#set! "language" "ruby") - (#set! "combined") + (code) @injection.content + (#set! injection.language "ruby") + (#set! injection.combined) ) ( - (content) @content - (#set! "language" "html") - (#set! "combined") + (content) @injection.content + (#set! injection.language "html") + (#set! injection.combined) ) "#, ) @@ -3182,6 +3180,20 @@ fn rust_lang() -> Language { "#, ) .unwrap() + .with_text_object_query( + r#" + (function_item + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + + (line_comment)+ @comment.around + + (block_comment) @comment.around + "#, + ) + .unwrap() .with_outline_query( r#" (line_comment) @annotation @@ -3266,11 +3278,11 @@ pub fn markdown_lang() -> Language { r#" (fenced_code_block (info_string - (language) @language) - (code_fence_content) @content) + (language) @injection.language) + (code_fence_content) @injection.content) - ((inline) @content - (#set! "language" "markdown-inline")) + ((inline) @injection.content + (#set! injection.language "markdown-inline")) "#, ) .unwrap() diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 38d4216bee..2319cb1bfb 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -1,4 +1,5 @@ use crate::{range_to_lsp, Diagnostic}; +use anyhow::Result; use collections::HashMap; use lsp::LanguageServerId; use std::{ @@ -52,18 +53,18 @@ pub struct Summary { } impl DiagnosticEntry { - /// Returns a raw LSP diagnostic ssed to provide diagnostic context to LSP + /// Returns a raw LSP diagnostic used to provide diagnostic context to LSP /// codeAction request - pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic { + pub fn to_lsp_diagnostic_stub(&self) -> Result { let code = self .diagnostic .code .clone() .map(lsp::NumberOrString::String); - let range = range_to_lsp(self.range.clone()); + let range = range_to_lsp(self.range.clone())?; - lsp::Diagnostic { + Ok(lsp::Diagnostic { code, range, severity: Some(self.diagnostic.severity), @@ -71,7 +72,7 @@ impl DiagnosticEntry { message: self.diagnostic.message.clone(), data: self.diagnostic.data.clone(), ..Default::default() - } + }) } } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 580955a98b..793513e025 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -30,7 +30,10 @@ use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; pub use language_registry::{LanguageName, LoadedLanguage}; -use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName}; +use lsp::{ + CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions, + LanguageServerName, +}; use parking_lot::Mutex; use regex::Regex; use schemars::{ @@ -75,7 +78,7 @@ pub use language_registry::{ }; pub use lsp::LanguageServerId; pub use outline::*; -pub use syntax_map::{OwnedSyntaxLayer, SyntaxLayer}; +pub use syntax_map::{OwnedSyntaxLayer, SyntaxLayer, ToTreeSitterPoint, TreeSitterOptions}; pub use text::{AnchorRangeExt, LineEnding}; pub use tree_sitter::{Node, Parser, Tree, TreeCursor}; @@ -126,6 +129,10 @@ pub static PLAIN_TEXT: LazyLock> = LazyLock::new(|| { LanguageConfig { name: "Plain Text".into(), soft_wrap: Some(SoftWrap::EditorWidth), + matcher: LanguageMatcher { + path_suffixes: vec!["txt".to_owned()], + first_line_pattern: None, + }, ..Default::default() }, None, @@ -201,13 +208,14 @@ impl CachedLspAdapter { pub async fn get_language_server_command( self: Arc, delegate: Arc, + toolchains: Arc, binary_options: LanguageServerBinaryOptions, cx: &mut AsyncAppContext, ) -> Result { let cached_binary = self.cached_binary.lock().await; self.adapter .clone() - .get_language_server_command(delegate, binary_options, cached_binary, cx) + .get_language_server_command(delegate, toolchains, binary_options, cached_binary, cx) .await } @@ -281,6 +289,7 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, delegate: Arc, + toolchains: Arc, binary_options: LanguageServerBinaryOptions, mut cached_binary: futures::lock::MutexGuard<'a, Option>, cx: &'a mut AsyncAppContext, @@ -298,7 +307,7 @@ pub trait LspAdapter: 'static + Send + Sync { // because we don't want to download and overwrite our global one // for each worktree we might have open. if binary_options.allow_path_lookup { - if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { + if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), toolchains, cx).await { log::info!( "found user-installed language server for {}. path: {:?}, arguments: {:?}", self.name().0, @@ -357,6 +366,7 @@ pub trait LspAdapter: 'static + Send + Sync { async fn check_if_user_installed( &self, _: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { None @@ -375,6 +385,15 @@ pub trait LspAdapter: 'static + Send + Sync { None } + async fn check_if_version_installed( + &self, + _version: &(dyn 'static + Send + Any), + _container_dir: &PathBuf, + _delegate: &dyn LspAdapterDelegate, + ) -> Option { + None + } + async fn fetch_server_binary( &self, latest_version: Box, @@ -481,6 +500,11 @@ pub trait LspAdapter: 'static + Send + Sync { fn language_ids(&self) -> HashMap { Default::default() } + + /// Support custom initialize params. + fn prepare_initialize_params(&self, original: InitializeParams) -> Result { + Ok(original) + } } async fn try_fetch_server_binary( @@ -501,14 +525,23 @@ async fn try_fetch_server_binary .fetch_latest_server_version(delegate.as_ref()) .await?; - log::info!("downloading language server {:?}", name.0); - delegate.update_status(adapter.name(), LanguageServerBinaryStatus::Downloading); - let binary = adapter - .fetch_server_binary(latest_version, container_dir, delegate.as_ref()) - .await; + if let Some(binary) = adapter + .check_if_version_installed(latest_version.as_ref(), &container_dir, delegate.as_ref()) + .await + { + log::info!("language server {:?} is already installed", name.0); + delegate.update_status(name.clone(), LanguageServerBinaryStatus::None); + Ok(binary) + } else { + log::info!("downloading language server {:?}", name.0); + delegate.update_status(adapter.name(), LanguageServerBinaryStatus::Downloading); + let binary = adapter + .fetch_server_binary(latest_version, container_dir, delegate.as_ref()) + .await; - delegate.update_status(name.clone(), LanguageServerBinaryStatus::None); - binary + delegate.update_status(name.clone(), LanguageServerBinaryStatus::None); + binary + } } #[derive(Clone, Debug, Default, PartialEq, Eq)] @@ -833,6 +866,7 @@ pub struct Grammar { pub(crate) runnable_config: Option, pub(crate) indents_config: Option, pub outline_config: Option, + pub text_object_config: Option, pub embedding_config: Option, pub(crate) injection_config: Option, pub(crate) override_config: Option, @@ -858,6 +892,44 @@ pub struct OutlineConfig { pub annotation_capture_ix: Option, } +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum TextObject { + InsideFunction, + AroundFunction, + InsideClass, + AroundClass, + InsideComment, + AroundComment, +} + +impl TextObject { + pub fn from_capture_name(name: &str) -> Option { + match name { + "function.inside" => Some(TextObject::InsideFunction), + "function.around" => Some(TextObject::AroundFunction), + "class.inside" => Some(TextObject::InsideClass), + "class.around" => Some(TextObject::AroundClass), + "comment.inside" => Some(TextObject::InsideComment), + "comment.around" => Some(TextObject::AroundComment), + _ => None, + } + } + + pub fn around(&self) -> Option { + match self { + TextObject::InsideFunction => Some(TextObject::AroundFunction), + TextObject::InsideClass => Some(TextObject::AroundClass), + TextObject::InsideComment => Some(TextObject::AroundComment), + _ => None, + } + } +} + +pub struct TextObjectConfig { + pub query: Query, + pub text_objects_by_capture_ix: Vec<(u32, TextObject)>, +} + #[derive(Debug)] pub struct EmbeddingConfig { pub query: Query, @@ -935,6 +1007,7 @@ impl Language { highlights_query: None, brackets_config: None, outline_config: None, + text_object_config: None, embedding_config: None, indents_config: None, injection_config: None, @@ -1005,7 +1078,12 @@ impl Language { if let Some(query) = queries.runnables { self = self .with_runnable_query(query.as_ref()) - .context("Error loading tests query")?; + .context("Error loading runnables query")?; + } + if let Some(query) = queries.text_objects { + self = self + .with_text_object_query(query.as_ref()) + .context("Error loading textobject query")?; } Ok(self) } @@ -1082,6 +1160,26 @@ impl Language { Ok(self) } + pub fn with_text_object_query(mut self, source: &str) -> Result { + let grammar = self + .grammar_mut() + .ok_or_else(|| anyhow!("cannot mutate grammar"))?; + let query = Query::new(&grammar.ts_language, source)?; + + let mut text_objects_by_capture_ix = Vec::new(); + for (ix, name) in query.capture_names().iter().enumerate() { + if let Some(text_object) = TextObject::from_capture_name(name) { + text_objects_by_capture_ix.push((ix as u32, text_object)); + } + } + + grammar.text_object_config = Some(TextObjectConfig { + query, + text_objects_by_capture_ix, + }); + Ok(self) + } + pub fn with_embedding_query(mut self, source: &str) -> Result { let grammar = self .grammar_mut() @@ -1175,23 +1273,45 @@ impl Language { .ok_or_else(|| anyhow!("cannot mutate grammar"))?; let query = Query::new(&grammar.ts_language, source)?; let mut language_capture_ix = None; + let mut injection_language_capture_ix = None; let mut content_capture_ix = None; + let mut injection_content_capture_ix = None; get_capture_indices( &query, &mut [ ("language", &mut language_capture_ix), + ("injection.language", &mut injection_language_capture_ix), ("content", &mut content_capture_ix), + ("injection.content", &mut injection_content_capture_ix), ], ); + language_capture_ix = match (language_capture_ix, injection_language_capture_ix) { + (None, Some(ix)) => Some(ix), + (Some(_), Some(_)) => { + return Err(anyhow!( + "both language and injection.language captures are present" + )); + } + _ => language_capture_ix, + }; + content_capture_ix = match (content_capture_ix, injection_content_capture_ix) { + (None, Some(ix)) => Some(ix), + (Some(_), Some(_)) => { + return Err(anyhow!( + "both content and injection.content captures are present" + )); + } + _ => content_capture_ix, + }; let patterns = (0..query.pattern_count()) .map(|ix| { let mut config = InjectionPatternConfig::default(); for setting in query.property_settings(ix) { match setting.key.as_ref() { - "language" => { + "language" | "injection.language" => { config.language.clone_from(&setting.value); } - "combined" => { + "combined" | "injection.combined" => { config.combined = true; } _ => {} @@ -1407,6 +1527,10 @@ impl Language { pub fn prettier_parser_name(&self) -> Option<&str> { self.config.prettier_parser_name.as_deref() } + + pub fn config(&self) -> &LanguageConfig { + &self.config + } } impl LanguageScope { @@ -1594,6 +1718,10 @@ impl CodeLabel { pub fn text(&self) -> &str { self.text.as_str() } + + pub fn filter_text(&self) -> &str { + &self.text[self.filter_range.clone()] + } } impl From for CodeLabel { @@ -1665,6 +1793,7 @@ impl LspAdapter for FakeLspAdapter { async fn check_if_user_installed( &self, _: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { Some(self.language_server_binary.clone()) @@ -1673,6 +1802,7 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, _: Arc, + _: Arc, _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, @@ -1741,10 +1871,18 @@ pub fn point_from_lsp(point: lsp::Position) -> Unclipped { Unclipped(PointUtf16::new(point.line, point.character)) } -pub fn range_to_lsp(range: Range) -> lsp::Range { - lsp::Range { - start: point_to_lsp(range.start), - end: point_to_lsp(range.end), +pub fn range_to_lsp(range: Range) -> Result { + if range.start > range.end { + Err(anyhow!( + "Inverted range provided to an LSP request: {:?}-{:?}", + range.start, + range.end + )) + } else { + Ok(lsp::Range { + start: point_to_lsp(range.start), + end: point_to_lsp(range.end), + }) } } @@ -1752,6 +1890,7 @@ pub fn range_from_lsp(range: lsp::Range) -> Range> { let mut start = point_from_lsp(range.start); let mut end = point_from_lsp(range.end); if start > end { + log::warn!("range_from_lsp called with inverted range {start:?}-{end:?}"); mem::swap(&mut start, &mut end); } start..end diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index d8c2b0d510..794ab0784e 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -130,6 +130,7 @@ pub struct AvailableLanguage { name: LanguageName, grammar: Option>, matcher: LanguageMatcher, + hidden: bool, load: Arc Result + 'static + Send + Sync>, loaded: bool, } @@ -142,6 +143,9 @@ impl AvailableLanguage { pub fn matcher(&self) -> &LanguageMatcher { &self.matcher } + pub fn hidden(&self) -> bool { + self.hidden + } } enum AvailableGrammar { @@ -177,6 +181,7 @@ pub const QUERY_FILENAME_PREFIXES: &[( ("overrides", |q| &mut q.overrides), ("redactions", |q| &mut q.redactions), ("runnables", |q| &mut q.runnables), + ("textobjects", |q| &mut q.text_objects), ]; /// Tree-sitter language queries for a given language. @@ -191,6 +196,7 @@ pub struct LanguageQueries { pub overrides: Option>, pub redactions: Option>, pub runnables: Option>, + pub text_objects: Option>, } #[derive(Clone, Default)] @@ -288,6 +294,7 @@ impl LanguageRegistry { config.name.clone(), config.grammar.clone(), config.matcher.clone(), + config.hidden, Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), @@ -436,6 +443,7 @@ impl LanguageRegistry { name: LanguageName, grammar_name: Option>, matcher: LanguageMatcher, + hidden: bool, load: Arc Result + 'static + Send + Sync>, ) { let state = &mut *self.state.write(); @@ -455,6 +463,7 @@ impl LanguageRegistry { grammar: grammar_name, matcher, load, + hidden, loaded: false, }); state.version += 1; @@ -522,6 +531,7 @@ impl LanguageRegistry { name: language.name(), grammar: language.config.grammar.clone(), matcher: language.config.matcher.clone(), + hidden: language.config.hidden, load: Arc::new(|| Err(anyhow!("already loaded"))), loaded: true, }); @@ -590,15 +600,12 @@ impl LanguageRegistry { async move { rx.await? } } - pub fn available_language_for_name( - self: &Arc, - name: &LanguageName, - ) -> Option { + pub fn available_language_for_name(self: &Arc, name: &str) -> Option { let state = self.state.read(); state .available_languages .iter() - .find(|l| &l.name == name) + .find(|l| l.name.0.as_ref() == name) .cloned() } diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index a3ac40b714..cee765f9f9 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -138,6 +138,12 @@ pub struct LanguageSettings { pub linked_edits: bool, /// Task configuration for this language. pub tasks: LanguageTaskConfig, + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. + pub show_completions_on_input: bool, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. + pub show_completion_documentation: bool, } impl LanguageSettings { @@ -197,6 +203,7 @@ pub enum InlineCompletionProvider { #[default] Copilot, Supermaven, + Zeta, } /// The settings for inline completions, such as [GitHub Copilot](https://github.com/features/copilot) @@ -381,6 +388,16 @@ pub struct LanguageSettingsContent { /// /// Default: {} pub tasks: Option, + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. + /// + /// Default: true + pub show_completions_on_input: Option, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. + /// + /// Default: true + pub show_completion_documentation: Option, } /// The contents of the inline completion settings. @@ -1185,6 +1202,14 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent src.extend_comment_on_newline, ); merge(&mut settings.inlay_hints, src.inlay_hints); + merge( + &mut settings.show_completions_on_input, + src.show_completions_on_input, + ); + merge( + &mut settings.show_completion_documentation, + src.show_completion_documentation, + ); } /// Allows to enable/disable formatting with Prettier diff --git a/crates/language/src/markdown.rs b/crates/language/src/markdown.rs index b9393a16ab..0221f0f431 100644 --- a/crates/language/src/markdown.rs +++ b/crates/language/src/markdown.rs @@ -239,12 +239,7 @@ pub async fn parse_markdown_block( Event::Start(tag) => match tag { Tag::Paragraph => new_paragraph(text, &mut list_stack), - Tag::Heading { - level: _, - id: _, - classes: _, - attrs: _, - } => { + Tag::Heading { .. } => { new_paragraph(text, &mut list_stack); bold_depth += 1; } @@ -267,12 +262,7 @@ pub async fn parse_markdown_block( Tag::Strikethrough => strikethrough_depth += 1, - Tag::Link { - link_type: _, - dest_url, - title: _, - id: _, - } => link_url = Some(dest_url.to_string()), + Tag::Link { dest_url, .. } => link_url = Some(dest_url.to_string()), Tag::List(number) => { list_stack.push((number, false)); diff --git a/crates/language/src/outline.rs b/crates/language/src/outline.rs index cd4e1d7fdb..7f62f221a6 100644 --- a/crates/language/src/outline.rs +++ b/crates/language/src/outline.rs @@ -73,8 +73,8 @@ impl Outline { .map(|range| &item.text[range.start..range.end]) .collect::(); - path_candidates.push(StringMatchCandidate::new(id, path_text.clone())); - candidates.push(StringMatchCandidate::new(id, candidate_text)); + path_candidates.push(StringMatchCandidate::new(id, &path_text)); + candidates.push(StringMatchCandidate::new(id, &candidate_text)); } Self { diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 1208925542..f51eeb9688 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -814,6 +814,23 @@ impl SyntaxSnapshot { buffer.as_rope(), self.layers_for_range(range, buffer, true), query, + TreeSitterOptions::default(), + ) + } + + pub fn matches_with_options<'a>( + &'a self, + range: Range, + buffer: &'a BufferSnapshot, + options: TreeSitterOptions, + query: fn(&Grammar) -> Option<&Query>, + ) -> SyntaxMapMatches<'a> { + SyntaxMapMatches::new( + range.clone(), + buffer.as_rope(), + self.layers_for_range(range, buffer, true), + query, + options, ) } @@ -1001,12 +1018,25 @@ impl<'a> SyntaxMapCaptures<'a> { } } +#[derive(Default)] +pub struct TreeSitterOptions { + max_start_depth: Option, +} +impl TreeSitterOptions { + pub fn max_start_depth(max_start_depth: u32) -> Self { + Self { + max_start_depth: Some(max_start_depth), + } + } +} + impl<'a> SyntaxMapMatches<'a> { fn new( range: Range, text: &'a Rope, layers: impl Iterator>, query: fn(&Grammar) -> Option<&Query>, + options: TreeSitterOptions, ) -> Self { let mut result = Self::default(); for layer in layers { @@ -1027,6 +1057,7 @@ impl<'a> SyntaxMapMatches<'a> { query_cursor.deref_mut(), ) }; + cursor.set_max_start_depth(options.max_start_depth); cursor.set_byte_range(range.clone()); let matches = cursor.matches(query, layer.node(), TextProvider(text)); @@ -1814,7 +1845,7 @@ impl Drop for QueryCursorHandle { } } -pub(crate) trait ToTreeSitterPoint { +pub trait ToTreeSitterPoint { fn to_ts_point(self) -> tree_sitter::Point; fn from_ts_point(point: tree_sitter::Point) -> Self; } diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index f6d27bcbd2..8c3517af7a 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -1193,15 +1193,15 @@ fn erb_lang() -> Language { .with_injection_query( r#" ( - (code) @content - (#set! "language" "ruby") - (#set! "combined") + (code) @injection.content + (#set! injection.language "ruby") + (#set! injection.combined) ) ( - (content) @content - (#set! "language" "html") - (#set! "combined") + (content) @injection.content + (#set! injection.language "html") + (#set! injection.combined) ) "#, ) @@ -1230,8 +1230,8 @@ fn rust_lang() -> Language { .with_injection_query( r#" (macro_invocation - (token_tree) @content - (#set! "language" "rust")) + (token_tree) @injection.content + (#set! injection.language "rust")) "#, ) .unwrap() @@ -1277,13 +1277,13 @@ fn heex_lang() -> Language { (partial_expression_value) (expression_value) (ending_expression_value) - ] @content) - (#set! language "elixir") - (#set! combined) + ] @injection.content) + (#set! injection.language "elixir") + (#set! injection.combined) ) - ((expression (expression_value) @content) - (#set! language "elixir")) + ((expression (expression_value) @injection.content) + (#set! injection.language "elixir")) "#, ) .unwrap() diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index cd9a3bc403..5b48157f0f 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -14,21 +14,37 @@ use settings::WorktreeId; use crate::LanguageName; /// Represents a single toolchain. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug)] pub struct Toolchain { /// User-facing label pub name: SharedString, pub path: SharedString, pub language_name: LanguageName, + /// Full toolchain data (including language-specific details) + pub as_json: serde_json::Value, } -#[async_trait(?Send)] +impl PartialEq for Toolchain { + fn eq(&self, other: &Self) -> bool { + // Do not use as_json for comparisons; it shouldn't impact equality, as it's not user-surfaced. + // Thus, there could be multiple entries that look the same in the UI. + (&self.name, &self.path, &self.language_name).eq(&( + &other.name, + &other.path, + &other.language_name, + )) + } +} + +#[async_trait] pub trait ToolchainLister: Send + Sync { async fn list( &self, worktree_root: PathBuf, project_env: Option>, ) -> ToolchainList; + // Returns a term which we should use in UI to refer to a toolchain. + fn term(&self) -> SharedString; } #[async_trait(?Send)] diff --git a/crates/language_extension/Cargo.toml b/crates/language_extension/Cargo.toml new file mode 100644 index 0000000000..3d1e4d0a64 --- /dev/null +++ b/crates/language_extension/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "language_extension" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/language_extension.rs" + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +collections.workspace = true +extension.workspace = true +futures.workspace = true +gpui.workspace = true +language.workspace = true +lsp.workspace = true +serde.workspace = true +serde_json.workspace = true +util.workspace = true diff --git a/crates/language_extension/LICENSE-GPL b/crates/language_extension/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/language_extension/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/extension_host/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs similarity index 91% rename from crates/extension_host/src/extension_lsp_adapter.rs rename to crates/language_extension/src/extension_lsp_adapter.rs index 8f83c68e31..3286e09e2d 100644 --- a/crates/extension_host/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -1,22 +1,28 @@ +use std::any::Any; +use std::ops::Range; +use std::path::PathBuf; +use std::pin::Pin; +use std::sync::Arc; + use anyhow::{Context, Result}; use async_trait::async_trait; use collections::HashMap; -use extension::{Extension, WorktreeDelegate}; +use extension::{Extension, ExtensionLanguageServerProxy, WorktreeDelegate}; use futures::{Future, FutureExt}; use gpui::AsyncAppContext; use language::{ - CodeLabel, HighlightId, Language, LanguageName, LanguageToolchainStore, LspAdapter, - LspAdapterDelegate, + CodeLabel, HighlightId, Language, LanguageName, LanguageServerBinaryStatus, + LanguageToolchainStore, LspAdapter, LspAdapterDelegate, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName}; use serde::Serialize; use serde_json::Value; -use std::ops::Range; -use std::{any::Any, path::PathBuf, pin::Pin, sync::Arc}; use util::{maybe, ResultExt}; +use crate::LanguageServerRegistryProxy; + /// An adapter that allows an [`LspAdapterDelegate`] to be used as a [`WorktreeDelegate`]. -pub struct WorktreeDelegateAdapter(pub Arc); +struct WorktreeDelegateAdapter(pub Arc); #[async_trait] impl WorktreeDelegate for WorktreeDelegateAdapter { @@ -44,10 +50,60 @@ impl WorktreeDelegate for WorktreeDelegateAdapter { } } -pub struct ExtensionLspAdapter { - pub(crate) extension: Arc, - pub(crate) language_server_id: LanguageServerName, - pub(crate) language_name: LanguageName, +impl ExtensionLanguageServerProxy for LanguageServerRegistryProxy { + fn register_language_server( + &self, + extension: Arc, + language_server_id: LanguageServerName, + language: LanguageName, + ) { + self.language_registry.register_lsp_adapter( + language.clone(), + Arc::new(ExtensionLspAdapter::new( + extension, + language_server_id, + language, + )), + ); + } + + fn remove_language_server( + &self, + language: &LanguageName, + language_server_id: &LanguageServerName, + ) { + self.language_registry + .remove_lsp_adapter(language, language_server_id); + } + + fn update_language_server_status( + &self, + language_server_id: LanguageServerName, + status: LanguageServerBinaryStatus, + ) { + self.language_registry + .update_lsp_status(language_server_id, status); + } +} + +struct ExtensionLspAdapter { + extension: Arc, + language_server_id: LanguageServerName, + language_name: LanguageName, +} + +impl ExtensionLspAdapter { + fn new( + extension: Arc, + language_server_id: LanguageServerName, + language_name: LanguageName, + ) -> Self { + Self { + extension, + language_server_id, + language_name, + } + } } #[async_trait(?Send)] @@ -59,6 +115,7 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, delegate: Arc, + _: Arc, _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, diff --git a/crates/language_extension/src/language_extension.rs b/crates/language_extension/src/language_extension.rs new file mode 100644 index 0000000000..59951c87e4 --- /dev/null +++ b/crates/language_extension/src/language_extension.rs @@ -0,0 +1,52 @@ +mod extension_lsp_adapter; + +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::Result; +use extension::{ExtensionGrammarProxy, ExtensionHostProxy, ExtensionLanguageProxy}; +use language::{LanguageMatcher, LanguageName, LanguageRegistry, LoadedLanguage}; + +pub fn init( + extension_host_proxy: Arc, + language_registry: Arc, +) { + let language_server_registry_proxy = LanguageServerRegistryProxy { language_registry }; + extension_host_proxy.register_grammar_proxy(language_server_registry_proxy.clone()); + extension_host_proxy.register_language_proxy(language_server_registry_proxy.clone()); + extension_host_proxy.register_language_server_proxy(language_server_registry_proxy); +} + +#[derive(Clone)] +struct LanguageServerRegistryProxy { + language_registry: Arc, +} + +impl ExtensionGrammarProxy for LanguageServerRegistryProxy { + fn register_grammars(&self, grammars: Vec<(Arc, PathBuf)>) { + self.language_registry.register_wasm_grammars(grammars) + } +} + +impl ExtensionLanguageProxy for LanguageServerRegistryProxy { + fn register_language( + &self, + language: LanguageName, + grammar: Option>, + matcher: LanguageMatcher, + hidden: bool, + load: Arc Result + Send + Sync + 'static>, + ) { + self.language_registry + .register_language(language, grammar, matcher, hidden, load); + } + + fn remove_languages( + &self, + languages_to_remove: &[LanguageName], + grammars_to_remove: &[Arc], + ) { + self.language_registry + .remove_languages(&languages_to_remove, &grammars_to_remove); + } +} diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index e88675bbae..0fc54d509d 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -13,57 +13,30 @@ path = "src/language_model.rs" doctest = false [features] -test-support = [ - "editor/test-support", - "language/test-support", - "project/test-support", - "text/test-support", -] +test-support = [] [dependencies] anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true -client.workspace = true +base64.workspace = true collections.workspace = true -copilot = { workspace = true, features = ["schemars"] } -editor.workspace = true -feature_flags.workspace = true futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true -inline_completion_button.workspace = true +image.workspace = true log.workspace = true -menu.workspace = true ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } parking_lot.workspace = true proto.workspace = true -project.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true -settings.workspace = true smol.workspace = true strum.workspace = true -telemetry_events.workspace = true -theme.workspace = true -thiserror.workspace = true -tiktoken-rs.workspace = true ui.workspace = true util.workspace = true -base64.workspace = true -image.workspace = true - [dev-dependencies] -ctor.workspace = true -editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true -language = { workspace = true, features = ["test-support"] } -log.workspace = true -project = { workspace = true, features = ["test-support"] } -proto = { workspace = true, features = ["test-support"] } -rand.workspace = true -text = { workspace = true, features = ["test-support"] } -unindent.workspace = true +gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/language_model/src/provider/fake.rs b/crates/language_model/src/fake_provider.rs similarity index 97% rename from crates/language_model/src/provider/fake.rs rename to crates/language_model/src/fake_provider.rs index 2044ae520d..bf240b4bda 100644 --- a/crates/language_model/src/provider/fake.rs +++ b/crates/language_model/src/fake_provider.rs @@ -4,12 +4,11 @@ use crate::{ LanguageModelProviderState, LanguageModelRequest, }; use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt}; -use gpui::{AnyView, AppContext, AsyncAppContext, Task}; +use gpui::{AnyView, AppContext, AsyncAppContext, Model, Task, WindowContext}; use http_client::Result; use parking_lot::Mutex; use serde::Serialize; use std::sync::Arc; -use ui::WindowContext; pub fn language_model_id() -> LanguageModelId { LanguageModelId::from("fake".to_string()) @@ -33,7 +32,7 @@ pub struct FakeLanguageModelProvider; impl LanguageModelProviderState for FakeLanguageModelProvider { type ObservableEntity = (); - fn observable_entity(&self) -> Option> { + fn observable_entity(&self) -> Option> { None } } diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index a2f5a072a9..a10c743b35 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -1,23 +1,19 @@ -pub mod logging; mod model; -pub mod provider; mod rate_limiter; mod registry; mod request; mod role; -pub mod settings; + +#[cfg(any(test, feature = "test-support"))] +pub mod fake_provider; use anyhow::Result; -use client::{Client, UserStore}; use futures::FutureExt; use futures::{future::BoxFuture, stream::BoxStream, StreamExt, TryStreamExt as _}; -use gpui::{ - AnyElement, AnyView, AppContext, AsyncAppContext, Model, SharedString, Task, WindowContext, -}; +use gpui::{AnyElement, AnyView, AppContext, AsyncAppContext, SharedString, Task, WindowContext}; pub use model::*; -use project::Fs; use proto::Plan; -pub(crate) use rate_limiter::*; +pub use rate_limiter::*; pub use registry::*; pub use request::*; pub use role::*; @@ -27,14 +23,10 @@ use std::fmt; use std::{future::Future, sync::Arc}; use ui::IconName; -pub fn init( - user_store: Model, - client: Arc, - fs: Arc, - cx: &mut AppContext, -) { - settings::init(fs, cx); - registry::init(user_store, client, cx); +pub const ZED_CLOUD_PROVIDER_ID: &str = "zed.dev"; + +pub fn init(cx: &mut AppContext) { + registry::init(cx); } /// The availability of a [`LanguageModel`]. @@ -63,7 +55,7 @@ pub enum LanguageModelCompletionEvent { StartMessage { message_id: String }, } -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum StopReason { EndTurn, @@ -71,9 +63,27 @@ pub enum StopReason { ToolUse, } +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] +pub struct LanguageModelToolUseId(Arc); + +impl fmt::Display for LanguageModelToolUseId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for LanguageModelToolUseId +where + T: Into>, +{ + fn from(value: T) -> Self { + Self(value.into()) + } +} + #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] pub struct LanguageModelToolUse { - pub id: String, + pub id: LanguageModelToolUseId, pub name: String, pub input: serde_json::Value, } @@ -137,7 +147,7 @@ pub trait LanguageModel: Send + Sync { let events = self.stream_completion(request, cx); async move { - let mut events = events.await?; + let mut events = events.await?.fuse(); let mut message_id = None; let mut first_item_text = None; @@ -184,7 +194,7 @@ pub trait LanguageModel: Send + Sync { } #[cfg(any(test, feature = "test-support"))] - fn as_fake(&self) -> &provider::fake::FakeLanguageModel { + fn as_fake(&self) -> &fake_provider::FakeLanguageModel { unimplemented!() } } diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 9242f80e6e..57ed28d625 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -68,6 +68,7 @@ impl CloudModel { anthropic::Model::Claude3Opus | anthropic::Model::Claude3Sonnet | anthropic::Model::Claude3Haiku + | anthropic::Model::Claude3_5Haiku | anthropic::Model::Custom { .. } => { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } @@ -87,6 +88,7 @@ impl CloudModel { Self::Google(model) => match model { google_ai::Model::Gemini15Pro | google_ai::Model::Gemini15Flash + | google_ai::Model::Gemini20Flash | google_ai::Model::Custom { .. } => { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 72dfd998d4..94999731a6 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -1,76 +1,16 @@ -use crate::provider::cloud::RefreshLlmTokenListener; use crate::{ - provider::{ - anthropic::AnthropicLanguageModelProvider, cloud::CloudLanguageModelProvider, - copilot_chat::CopilotChatLanguageModelProvider, google::GoogleLanguageModelProvider, - ollama::OllamaLanguageModelProvider, open_ai::OpenAiLanguageModelProvider, - }, LanguageModel, LanguageModelId, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderState, }; -use client::{Client, UserStore}; use collections::BTreeMap; -use gpui::{AppContext, EventEmitter, Global, Model, ModelContext}; +use gpui::{prelude::*, AppContext, EventEmitter, Global, Model, ModelContext}; use std::sync::Arc; -use ui::Context; -pub fn init(user_store: Model, client: Arc, cx: &mut AppContext) { - let registry = cx.new_model(|cx| { - let mut registry = LanguageModelRegistry::default(); - register_language_model_providers(&mut registry, user_store, client, cx); - registry - }); +pub fn init(cx: &mut AppContext) { + let registry = cx.new_model(|_cx| LanguageModelRegistry::default()); cx.set_global(GlobalLanguageModelRegistry(registry)); } -fn register_language_model_providers( - registry: &mut LanguageModelRegistry, - user_store: Model, - client: Arc, - cx: &mut ModelContext, -) { - use feature_flags::FeatureFlagAppExt; - - RefreshLlmTokenListener::register(client.clone(), cx); - - registry.register_provider( - AnthropicLanguageModelProvider::new(client.http_client(), cx), - cx, - ); - registry.register_provider( - OpenAiLanguageModelProvider::new(client.http_client(), cx), - cx, - ); - registry.register_provider( - OllamaLanguageModelProvider::new(client.http_client(), cx), - cx, - ); - registry.register_provider( - GoogleLanguageModelProvider::new(client.http_client(), cx), - cx, - ); - registry.register_provider(CopilotChatLanguageModelProvider::new(cx), cx); - - cx.observe_flag::(move |enabled, cx| { - let user_store = user_store.clone(); - let client = client.clone(); - LanguageModelRegistry::global(cx).update(cx, move |registry, cx| { - if enabled { - registry.register_provider( - CloudLanguageModelProvider::new(user_store.clone(), client.clone(), cx), - cx, - ); - } else { - registry.unregister_provider( - LanguageModelProviderId::from(crate::provider::cloud::PROVIDER_ID.to_string()), - cx, - ); - } - }); - }) - .detach(); -} - struct GlobalLanguageModelRegistry(Model); impl Global for GlobalLanguageModelRegistry {} @@ -106,8 +46,8 @@ impl LanguageModelRegistry { } #[cfg(any(test, feature = "test-support"))] - pub fn test(cx: &mut AppContext) -> crate::provider::fake::FakeLanguageModelProvider { - let fake_provider = crate::provider::fake::FakeLanguageModelProvider; + pub fn test(cx: &mut AppContext) -> crate::fake_provider::FakeLanguageModelProvider { + let fake_provider = crate::fake_provider::FakeLanguageModelProvider; let registry = cx.new_model(|cx| { let mut registry = Self::default(); registry.register_provider(fake_provider.clone(), cx); @@ -148,7 +88,7 @@ impl LanguageModelRegistry { } pub fn providers(&self) -> Vec> { - let zed_provider_id = LanguageModelProviderId(crate::provider::cloud::PROVIDER_ID.into()); + let zed_provider_id = LanguageModelProviderId("zed.dev".into()); let mut providers = Vec::with_capacity(self.providers.len()); if let Some(provider) = self.providers.get(&zed_provider_id) { providers.push(provider.clone()); @@ -269,7 +209,7 @@ impl LanguageModelRegistry { #[cfg(test)] mod tests { use super::*; - use crate::provider::fake::FakeLanguageModelProvider; + use crate::fake_provider::FakeLanguageModelProvider; #[gpui::test] fn test_register_providers(cx: &mut AppContext) { @@ -281,10 +221,10 @@ mod tests { let providers = registry.read(cx).providers(); assert_eq!(providers.len(), 1); - assert_eq!(providers[0].id(), crate::provider::fake::provider_id()); + assert_eq!(providers[0].id(), crate::fake_provider::provider_id()); registry.update(cx, |registry, cx| { - registry.unregister_provider(crate::provider::fake::provider_id(), cx); + registry.unregister_provider(crate::fake_provider::provider_id(), cx); }); let providers = registry.read(cx).providers(); diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 06dde1862a..e6f7f210c7 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -347,7 +347,7 @@ impl LanguageModelRequest { } MessageContent::ToolUse(tool_use) => { Some(anthropic::RequestContent::ToolUse { - id: tool_use.id, + id: tool_use.id.to_string(), name: tool_use.name, input: tool_use.input, cache_control, diff --git a/crates/language_model_selector/Cargo.toml b/crates/language_model_selector/Cargo.toml new file mode 100644 index 0000000000..cd00af50c0 --- /dev/null +++ b/crates/language_model_selector/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "language_model_selector" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/language_model_selector.rs" + +[dependencies] +feature_flags.workspace = true +gpui.workspace = true +language_model.workspace = true +picker.workspace = true +proto.workspace = true +ui.workspace = true +workspace.workspace = true +zed_actions.workspace = true diff --git a/crates/language_model_selector/LICENSE-GPL b/crates/language_model_selector/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/language_model_selector/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/assistant/src/model_selector.rs b/crates/language_model_selector/src/language_model_selector.rs similarity index 67% rename from crates/assistant/src/model_selector.rs rename to crates/language_model_selector/src/language_model_selector.rs index 1b26b8b5ad..cf8260c73b 100644 --- a/crates/assistant/src/model_selector.rs +++ b/crates/language_model_selector/src/language_model_selector.rs @@ -1,33 +1,150 @@ -use feature_flags::ZedPro; - -use language_model::{LanguageModel, LanguageModelAvailability, LanguageModelRegistry}; -use proto::Plan; -use workspace::ShowConfiguration; - use std::sync::Arc; -use crate::assistant_settings::AssistantSettings; -use fs::Fs; -use gpui::{Action, AnyElement, DismissEvent, SharedString, Task}; +use feature_flags::ZedPro; +use gpui::{ + Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model, + Subscription, Task, View, WeakView, +}; +use language_model::{LanguageModel, LanguageModelAvailability, LanguageModelRegistry}; use picker::{Picker, PickerDelegate}; -use settings::update_settings_file; +use proto::Plan; use ui::{prelude::*, ListItem, ListItemSpacing, PopoverMenu, PopoverMenuHandle, PopoverTrigger}; +use workspace::ShowConfiguration; const TRY_ZED_PRO_URL: &str = "https://zed.dev/pro"; -#[derive(IntoElement)] -pub struct ModelSelector { - handle: Option>>, - fs: Arc, - trigger: T, - info_text: Option, +type OnModelChanged = Arc, &AppContext) + 'static>; + +pub struct LanguageModelSelector { + picker: View>, + /// The task used to update the picker's matches when there is a change to + /// the language model registry. + update_matches_task: Option>, + _subscriptions: Vec, } -pub struct ModelPickerDelegate { - fs: Arc, - all_models: Vec, - filtered_models: Vec, - selected_index: usize, +impl LanguageModelSelector { + pub fn new( + on_model_changed: impl Fn(Arc, &AppContext) + 'static, + cx: &mut ViewContext, + ) -> Self { + let on_model_changed = Arc::new(on_model_changed); + + let all_models = Self::all_models(cx); + let delegate = LanguageModelPickerDelegate { + language_model_selector: cx.view().downgrade(), + on_model_changed: on_model_changed.clone(), + all_models: all_models.clone(), + filtered_models: all_models, + selected_index: 0, + }; + + let picker = + cx.new_view(|cx| Picker::uniform_list(delegate, cx).max_height(Some(rems(20.).into()))); + + LanguageModelSelector { + picker, + update_matches_task: None, + _subscriptions: vec![cx.subscribe( + &LanguageModelRegistry::global(cx), + Self::handle_language_model_registry_event, + )], + } + } + + fn handle_language_model_registry_event( + &mut self, + _registry: Model, + event: &language_model::Event, + cx: &mut ViewContext, + ) { + match event { + language_model::Event::ProviderStateChanged + | language_model::Event::AddedProvider(_) + | language_model::Event::RemovedProvider(_) => { + let task = self.picker.update(cx, |this, cx| { + let query = this.query(cx); + this.delegate.all_models = Self::all_models(cx); + this.delegate.update_matches(query, cx) + }); + self.update_matches_task = Some(task); + } + _ => {} + } + } + + fn all_models(cx: &AppContext) -> Vec { + LanguageModelRegistry::global(cx) + .read(cx) + .providers() + .iter() + .flat_map(|provider| { + let icon = provider.icon(); + + provider.provided_models(cx).into_iter().map(move |model| { + let model = model.clone(); + let icon = model.icon().unwrap_or(icon); + + ModelInfo { + model: model.clone(), + icon, + availability: model.availability(), + } + }) + }) + .collect::>() + } +} + +impl EventEmitter for LanguageModelSelector {} + +impl FocusableView for LanguageModelSelector { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for LanguageModelSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + self.picker.clone() + } +} + +#[derive(IntoElement)] +pub struct LanguageModelSelectorPopoverMenu +where + T: PopoverTrigger, +{ + language_model_selector: View, + trigger: T, + handle: Option>, +} + +impl LanguageModelSelectorPopoverMenu { + pub fn new(language_model_selector: View, trigger: T) -> Self { + Self { + language_model_selector, + trigger, + handle: None, + } + } + + pub fn with_handle(mut self, handle: PopoverMenuHandle) -> Self { + self.handle = Some(handle); + self + } +} + +impl RenderOnce for LanguageModelSelectorPopoverMenu { + fn render(self, _cx: &mut WindowContext) -> impl IntoElement { + let language_model_selector = self.language_model_selector.clone(); + + PopoverMenu::new("model-switcher") + .menu(move |_cx| Some(language_model_selector.clone())) + .trigger(self.trigger) + .attach(gpui::Corner::BottomLeft) + .when_some(self.handle.clone(), |menu, handle| menu.with_handle(handle)) + } } #[derive(Clone)] @@ -35,31 +152,17 @@ struct ModelInfo { model: Arc, icon: IconName, availability: LanguageModelAvailability, - is_selected: bool, } -impl ModelSelector { - pub fn new(fs: Arc, trigger: T) -> Self { - ModelSelector { - handle: None, - fs, - trigger, - info_text: None, - } - } - - pub fn with_handle(mut self, handle: PopoverMenuHandle>) -> Self { - self.handle = Some(handle); - self - } - - pub fn with_info_text(mut self, text: impl Into) -> Self { - self.info_text = Some(text.into()); - self - } +pub struct LanguageModelPickerDelegate { + language_model_selector: WeakView, + on_model_changed: OnModelChanged, + all_models: Vec, + filtered_models: Vec, + selected_index: usize, } -impl PickerDelegate for ModelPickerDelegate { +impl PickerDelegate for LanguageModelPickerDelegate { type ListItem = ListItem; fn match_count(&self) -> usize { @@ -84,25 +187,25 @@ impl PickerDelegate for ModelPickerDelegate { let llm_registry = LanguageModelRegistry::global(cx); - let configured_models: Vec<_> = llm_registry + let configured_providers = llm_registry .read(cx) .providers() .iter() .filter(|provider| provider.is_authenticated(cx)) .map(|provider| provider.id()) - .collect(); + .collect::>(); cx.spawn(|this, mut cx| async move { let filtered_models = cx .background_executor() .spawn(async move { - let displayed_models = if configured_models.is_empty() { + let displayed_models = if configured_providers.is_empty() { all_models } else { all_models .into_iter() .filter(|model_info| { - configured_models.contains(&model_info.model.provider_id()) + configured_providers.contains(&model_info.model.provider_id()) }) .collect::>() }; @@ -137,27 +240,17 @@ impl PickerDelegate for ModelPickerDelegate { fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext>) { if let Some(model_info) = self.filtered_models.get(self.selected_index) { let model = model_info.model.clone(); - update_settings_file::(self.fs.clone(), cx, move |settings, _| { - settings.set_model(model.clone()) - }); - - // Update the selection status - let selected_model_id = model_info.model.id(); - let selected_provider_id = model_info.model.provider_id(); - for model in &mut self.all_models { - model.is_selected = model.model.id() == selected_model_id - && model.model.provider_id() == selected_provider_id; - } - for model in &mut self.filtered_models { - model.is_selected = model.model.id() == selected_model_id - && model.model.provider_id() == selected_provider_id; - } + (self.on_model_changed)(model.clone(), cx); cx.emit(DismissEvent); } } - fn dismissed(&mut self, _cx: &mut ViewContext>) {} + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.language_model_selector + .update(cx, |_this, cx| cx.emit(DismissEvent)) + .ok(); + } fn render_header(&self, cx: &mut ViewContext>) -> Option { let configured_models_count = LanguageModelRegistry::global(cx) @@ -194,11 +287,22 @@ impl PickerDelegate for ModelPickerDelegate { let model_info = self.filtered_models.get(ix)?; let provider_name: String = model_info.model.provider_name().0.clone().into(); + let active_provider_id = LanguageModelRegistry::read_global(cx) + .active_provider() + .map(|m| m.id()); + + let active_model_id = LanguageModelRegistry::read_global(cx) + .active_model() + .map(|m| m.id()); + + let is_selected = Some(model_info.model.provider_id()) == active_provider_id + && Some(model_info.model.id()) == active_model_id; + Some( ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .start_slot( div().pr_0p5().child( Icon::new(model_info.icon) @@ -234,7 +338,7 @@ impl PickerDelegate for ModelPickerDelegate { }), ), ) - .end_slot(div().when(model_info.is_selected, |this| { + .end_slot(div().when(is_selected, |this| { this.child( Icon::new(IconName::Check) .color(Color::Accent) @@ -295,58 +399,3 @@ impl PickerDelegate for ModelPickerDelegate { ) } } - -impl RenderOnce for ModelSelector { - fn render(self, cx: &mut WindowContext) -> impl IntoElement { - let selected_provider = LanguageModelRegistry::read_global(cx) - .active_provider() - .map(|m| m.id()); - - let selected_model = LanguageModelRegistry::read_global(cx) - .active_model() - .map(|m| m.id()); - - let all_models = LanguageModelRegistry::global(cx) - .read(cx) - .providers() - .iter() - .flat_map(|provider| { - let provider_id = provider.id(); - let icon = provider.icon(); - let selected_model = selected_model.clone(); - let selected_provider = selected_provider.clone(); - - provider.provided_models(cx).into_iter().map(move |model| { - let model = model.clone(); - let icon = model.icon().unwrap_or(icon); - - ModelInfo { - model: model.clone(), - icon, - availability: model.availability(), - is_selected: selected_model.as_ref() == Some(&model.id()) - && selected_provider.as_ref() == Some(&provider_id), - } - }) - }) - .collect::>(); - - let delegate = ModelPickerDelegate { - fs: self.fs.clone(), - all_models: all_models.clone(), - filtered_models: all_models, - selected_index: 0, - }; - - let picker_view = cx.new_view(|cx| { - let picker = Picker::uniform_list(delegate, cx).max_height(Some(rems(20.).into())); - picker - }); - - PopoverMenu::new("model-switcher") - .menu(move |_cx| Some(picker_view.clone())) - .trigger(self.trigger) - .attach(gpui::AnchorCorner::BottomLeft) - .when_some(self.handle, |menu, handle| menu.with_handle(handle)) - } -} diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml new file mode 100644 index 0000000000..00d948bd2d --- /dev/null +++ b/crates/language_models/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "language_models" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/language_models.rs" + +[dependencies] +anthropic = { workspace = true, features = ["schemars"] } +anyhow.workspace = true +client.workspace = true +collections.workspace = true +copilot = { workspace = true, features = ["schemars"] } +editor.workspace = true +feature_flags.workspace = true +fs.workspace = true +futures.workspace = true +google_ai = { workspace = true, features = ["schemars"] } +gpui.workspace = true +http_client.workspace = true +language_model.workspace = true +menu.workspace = true +ollama = { workspace = true, features = ["schemars"] } +open_ai = { workspace = true, features = ["schemars"] } +project.workspace = true +proto.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +smol.workspace = true +strum.workspace = true +telemetry_events.workspace = true +theme.workspace = true +thiserror.workspace = true +tiktoken-rs.workspace = true +ui.workspace = true +util.workspace = true + +[dev-dependencies] +editor = { workspace = true, features = ["test-support"] } +language_model = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } diff --git a/crates/language_models/LICENSE-GPL b/crates/language_models/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/language_models/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs new file mode 100644 index 0000000000..6d618d1ec5 --- /dev/null +++ b/crates/language_models/src/language_models.rs @@ -0,0 +1,82 @@ +use std::sync::Arc; + +use client::{Client, UserStore}; +use fs::Fs; +use gpui::{AppContext, Model, ModelContext}; +use language_model::{LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID}; + +mod logging; +pub mod provider; +mod settings; + +use crate::provider::anthropic::AnthropicLanguageModelProvider; +use crate::provider::cloud::CloudLanguageModelProvider; +pub use crate::provider::cloud::LlmApiToken; +pub use crate::provider::cloud::RefreshLlmTokenListener; +use crate::provider::copilot_chat::CopilotChatLanguageModelProvider; +use crate::provider::google::GoogleLanguageModelProvider; +use crate::provider::ollama::OllamaLanguageModelProvider; +use crate::provider::open_ai::OpenAiLanguageModelProvider; +pub use crate::settings::*; +pub use logging::report_assistant_event; + +pub fn init( + user_store: Model, + client: Arc, + fs: Arc, + cx: &mut AppContext, +) { + crate::settings::init(fs, cx); + let registry = LanguageModelRegistry::global(cx); + registry.update(cx, |registry, cx| { + register_language_model_providers(registry, user_store, client, cx); + }); +} + +fn register_language_model_providers( + registry: &mut LanguageModelRegistry, + user_store: Model, + client: Arc, + cx: &mut ModelContext, +) { + use feature_flags::FeatureFlagAppExt; + + RefreshLlmTokenListener::register(client.clone(), cx); + + registry.register_provider( + AnthropicLanguageModelProvider::new(client.http_client(), cx), + cx, + ); + registry.register_provider( + OpenAiLanguageModelProvider::new(client.http_client(), cx), + cx, + ); + registry.register_provider( + OllamaLanguageModelProvider::new(client.http_client(), cx), + cx, + ); + registry.register_provider( + GoogleLanguageModelProvider::new(client.http_client(), cx), + cx, + ); + registry.register_provider(CopilotChatLanguageModelProvider::new(cx), cx); + + cx.observe_flag::(move |enabled, cx| { + let user_store = user_store.clone(); + let client = client.clone(); + LanguageModelRegistry::global(cx).update(cx, move |registry, cx| { + if enabled { + registry.register_provider( + CloudLanguageModelProvider::new(user_store.clone(), client.clone(), cx), + cx, + ); + } else { + registry.unregister_provider( + LanguageModelProviderId::from(ZED_CLOUD_PROVIDER_ID.to_string()), + cx, + ); + } + }); + }) + .detach(); +} diff --git a/crates/language_model/src/logging.rs b/crates/language_models/src/logging.rs similarity index 100% rename from crates/language_model/src/logging.rs rename to crates/language_models/src/logging.rs diff --git a/crates/language_model/src/provider.rs b/crates/language_models/src/provider.rs similarity index 64% rename from crates/language_model/src/provider.rs rename to crates/language_models/src/provider.rs index d2d162b75e..fb79b12e4d 100644 --- a/crates/language_model/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -1,8 +1,6 @@ pub mod anthropic; pub mod cloud; pub mod copilot_chat; -#[cfg(any(test, feature = "test-support"))] -pub mod fake; pub mod google; pub mod ollama; pub mod open_ai; diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs similarity index 97% rename from crates/language_model/src/provider/anthropic.rs rename to crates/language_models/src/provider/anthropic.rs index 60e238b369..1404a3428e 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -1,9 +1,4 @@ -use crate::{ - settings::AllLanguageModelSettings, LanguageModel, LanguageModelCacheConfiguration, - LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, - LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, -}; -use crate::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason}; +use crate::AllLanguageModelSettings; use anthropic::{AnthropicError, ContentDelta, Event, ResponseContent}; use anyhow::{anyhow, Context as _, Result}; use collections::{BTreeMap, HashMap}; @@ -15,6 +10,12 @@ use gpui::{ View, WhiteSpace, }; use http_client::HttpClient; +use language_model::{ + LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, LanguageModelName, + LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, +}; +use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; @@ -51,6 +52,8 @@ pub struct AvailableModel { pub cache_configuration: Option, pub max_output_tokens: Option, pub default_temperature: Option, + #[serde(default)] + pub extra_beta_headers: Vec, } pub struct AnthropicLanguageModelProvider { @@ -201,6 +204,7 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { }), max_output_tokens: model.max_output_tokens, default_temperature: model.default_temperature, + extra_beta_headers: model.extra_beta_headers.clone(), }, ); } @@ -256,7 +260,7 @@ pub fn count_anthropic_tokens( let mut string_messages = Vec::with_capacity(messages.len()); for message in messages { - use crate::MessageContent; + use language_model::MessageContent; let mut string_contents = String::new(); @@ -497,7 +501,7 @@ pub fn map_to_language_model_completion_events( Some(maybe!({ Ok(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { - id: tool_use.id, + id: tool_use.id.into(), name: tool_use.name, input: if tool_use.input_json.is_empty() { serde_json::Value::Null diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs similarity index 97% rename from crates/language_model/src/provider/cloud.rs rename to crates/language_models/src/provider/cloud.rs index 41e23b56e3..4621236785 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -1,10 +1,4 @@ use super::open_ai::count_open_ai_tokens; -use crate::provider::anthropic::map_to_language_model_completion_events; -use crate::{ - settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration, - LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, -}; use anthropic::AnthropicError; use anyhow::{anyhow, Result}; use client::{ @@ -22,6 +16,14 @@ use gpui::{ ModelContext, ReadGlobal, Subscription, Task, }; use http_client::{AsyncBody, HttpClient, Method, Response, StatusCode}; +use language_model::{ + CloudModel, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, LanguageModelName, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, RateLimiter, ZED_CLOUD_PROVIDER_ID, +}; +use language_model::{ + LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider, +}; use proto::TypedEnvelope; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; @@ -40,11 +42,11 @@ use strum::IntoEnumIterator; use thiserror::Error; use ui::{prelude::*, TintColor}; -use crate::{LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider}; +use crate::provider::anthropic::map_to_language_model_completion_events; +use crate::AllLanguageModelSettings; use super::anthropic::count_anthropic_tokens; -pub const PROVIDER_ID: &str = "zed.dev"; pub const PROVIDER_NAME: &str = "Zed"; const ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: Option<&str> = @@ -92,6 +94,9 @@ pub struct AvailableModel { pub cache_configuration: Option, /// The default temperature to use for this model. pub default_temperature: Option, + /// Any extra beta headers to provide when using the model. + #[serde(default)] + pub extra_beta_headers: Vec, } struct GlobalRefreshLlmTokenListener(Model); @@ -255,7 +260,7 @@ impl LanguageModelProviderState for CloudLanguageModelProvider { impl LanguageModelProvider for CloudLanguageModelProvider { fn id(&self) -> LanguageModelProviderId { - LanguageModelProviderId(PROVIDER_ID.into()) + LanguageModelProviderId(ZED_CLOUD_PROVIDER_ID.into()) } fn name(&self) -> LanguageModelProviderName { @@ -321,6 +326,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { }), default_temperature: model.default_temperature, max_output_tokens: model.max_output_tokens, + extra_beta_headers: model.extra_beta_headers.clone(), }), AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom { name: model.name.clone(), @@ -442,7 +448,7 @@ pub struct CloudLanguageModel { } #[derive(Clone, Default)] -struct LlmApiToken(Arc>>); +pub struct LlmApiToken(Arc>>); #[derive(Error, Debug)] pub struct PaymentRequiredError; @@ -535,7 +541,7 @@ impl LanguageModel for CloudLanguageModel { } fn provider_id(&self) -> LanguageModelProviderId { - LanguageModelProviderId(PROVIDER_ID.into()) + LanguageModelProviderId(ZED_CLOUD_PROVIDER_ID.into()) } fn provider_name(&self) -> LanguageModelProviderName { @@ -812,7 +818,7 @@ fn response_lines( } impl LlmApiToken { - async fn acquire(&self, client: &Arc) -> Result { + pub async fn acquire(&self, client: &Arc) -> Result { let lock = self.0.upgradable_read().await; if let Some(token) = lock.as_ref() { Ok(token.to_string()) @@ -821,7 +827,7 @@ impl LlmApiToken { } } - async fn refresh(&self, client: &Arc) -> Result { + pub async fn refresh(&self, client: &Arc) -> Result { Self::fetch(self.0.write().await, client).await } diff --git a/crates/language_model/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs similarity index 95% rename from crates/language_model/src/provider/copilot_chat.rs rename to crates/language_models/src/provider/copilot_chat.rs index a991e81fbc..e35322d755 100644 --- a/crates/language_model/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -14,20 +14,15 @@ use gpui::{ percentage, svg, Animation, AnimationExt, AnyView, AppContext, AsyncAppContext, Model, Render, Subscription, Task, Transformation, }; +use language_model::{ + LanguageModel, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, + LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, +}; use settings::SettingsStore; use std::time::Duration; use strum::IntoEnumIterator; -use ui::{ - div, h_flex, v_flex, Button, ButtonCommon, Clickable, Color, Context, FixedWidth, Icon, - IconName, IconPosition, IconSize, IntoElement, Label, LabelCommon, ParentElement, Styled, - ViewContext, VisualContext, WindowContext, -}; - -use crate::{ - LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelRequest, RateLimiter, Role, -}; -use crate::{LanguageModelCompletionEvent, LanguageModelProviderState}; +use ui::prelude::*; use super::anthropic::count_anthropic_tokens; use super::open_ai::count_open_ai_tokens; @@ -383,9 +378,7 @@ impl Render for ConfigurationView { .icon_size(IconSize::Medium) .style(ui::ButtonStyle::Filled) .full_width() - .on_click(|_, cx| { - inline_completion_button::initiate_sign_in(cx) - }), + .on_click(|_, cx| copilot::initiate_sign_in(cx)), ) .child( div().flex().w_full().items_center().child( diff --git a/crates/language_model/src/provider/google.rs b/crates/language_models/src/provider/google.rs similarity index 98% rename from crates/language_model/src/provider/google.rs rename to crates/language_models/src/provider/google.rs index 94d5ffca7d..59589605ee 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -8,6 +8,12 @@ use gpui::{ View, WhiteSpace, }; use http_client::HttpClient; +use language_model::LanguageModelCompletionEvent; +use language_model::{ + LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, RateLimiter, +}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; @@ -17,12 +23,7 @@ use theme::ThemeSettings; use ui::{prelude::*, Icon, IconName, Tooltip}; use util::ResultExt; -use crate::LanguageModelCompletionEvent; -use crate::{ - settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName, - LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, -}; +use crate::AllLanguageModelSettings; const PROVIDER_ID: &str = "google"; const PROVIDER_NAME: &str = "Google AI"; diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs similarity index 98% rename from crates/language_model/src/provider/ollama.rs rename to crates/language_models/src/provider/ollama.rs index 3485982781..2eb2b4fffb 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -2,6 +2,12 @@ use anyhow::{anyhow, bail, Result}; use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt}; use gpui::{AnyView, AppContext, AsyncAppContext, ModelContext, Subscription, Task}; use http_client::HttpClient; +use language_model::LanguageModelCompletionEvent; +use language_model::{ + LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, RateLimiter, Role, +}; use ollama::{ get_models, preload_model, stream_chat_completion, ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaToolCall, @@ -13,12 +19,7 @@ use std::{collections::BTreeMap, sync::Arc}; use ui::{prelude::*, ButtonLike, Indicator}; use util::ResultExt; -use crate::LanguageModelCompletionEvent; -use crate::{ - settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName, - LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, -}; +use crate::AllLanguageModelSettings; const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download"; const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library"; @@ -446,7 +447,7 @@ impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let is_authenticated = self.state.read(cx).is_authenticated(); - let ollama_intro = "Get up and running with Llama 3.2, Mistral, Gemma 2, and other large language models with Ollama."; + let ollama_intro = "Get up and running with Llama 3.3, Mistral, Gemma 2, and other large language models with Ollama."; let ollama_reqs = "Ollama must be running with at least one model installed to use it in the assistant."; diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs similarity index 99% rename from crates/language_model/src/provider/open_ai.rs rename to crates/language_models/src/provider/open_ai.rs index 2a51b9a648..5c740f93e6 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -7,6 +7,11 @@ use gpui::{ View, WhiteSpace, }; use http_client::HttpClient; +use language_model::{ + LanguageModel, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, + LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, +}; use open_ai::{ stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition, }; @@ -19,12 +24,7 @@ use theme::ThemeSettings; use ui::{prelude::*, Icon, IconName, Tooltip}; use util::ResultExt; -use crate::LanguageModelCompletionEvent; -use crate::{ - settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName, - LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, -}; +use crate::AllLanguageModelSettings; const PROVIDER_ID: &str = "openai"; const PROVIDER_NAME: &str = "OpenAI"; diff --git a/crates/language_model/src/settings.rs b/crates/language_models/src/settings.rs similarity index 96% rename from crates/language_model/src/settings.rs rename to crates/language_models/src/settings.rs index 275fcf0417..c8ec9f7369 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -2,22 +2,20 @@ use std::sync::Arc; use anyhow::Result; use gpui::AppContext; +use language_model::LanguageModelCacheConfiguration; use project::Fs; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsSources}; -use crate::{ - provider::{ - self, - anthropic::AnthropicSettings, - cloud::{self, ZedDotDevSettings}, - copilot_chat::CopilotChatSettings, - google::GoogleSettings, - ollama::OllamaSettings, - open_ai::OpenAiSettings, - }, - LanguageModelCacheConfiguration, +use crate::provider::{ + self, + anthropic::AnthropicSettings, + cloud::{self, ZedDotDevSettings}, + copilot_chat::CopilotChatSettings, + google::GoogleSettings, + ollama::OllamaSettings, + open_ai::OpenAiSettings, }; /// Initializes the language model settings. @@ -99,6 +97,7 @@ impl AnthropicSettingsContent { cache_configuration, max_output_tokens, default_temperature, + extra_beta_headers, } => Some(provider::anthropic::AvailableModel { name, display_name, @@ -113,6 +112,7 @@ impl AnthropicSettingsContent { ), max_output_tokens, default_temperature, + extra_beta_headers, }), _ => None, }) diff --git a/crates/language_selector/Cargo.toml b/crates/language_selector/Cargo.toml index b864ffc31f..276e9b0d42 100644 --- a/crates/language_selector/Cargo.toml +++ b/crates/language_selector/Cargo.toml @@ -15,11 +15,14 @@ doctest = false [dependencies] anyhow.workspace = true editor.workspace = true +file_finder.workspace = true +file_icons.workspace = true fuzzy.workspace = true gpui.workspace = true language.workspace = true picker.workspace = true project.workspace = true +settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index 1d5f82d285..eeaa403e20 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -4,7 +4,7 @@ use language::LanguageName; use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; use workspace::{item::ItemHandle, StatusItemView, Workspace}; -use crate::LanguageSelector; +use crate::{LanguageSelector, Toggle}; pub struct ActiveBufferLanguage { active_language: Option>, @@ -54,7 +54,7 @@ impl Render for ActiveBufferLanguage { }); } })) - .tooltip(|cx| Tooltip::text("Select Language", cx)), + .tooltip(|cx| Tooltip::for_action("Select Language", &Toggle, cx)), ) }) } diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 489f6fd141..7e935dbdcb 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -3,15 +3,18 @@ mod active_buffer_language; pub use active_buffer_language::ActiveBufferLanguage; use anyhow::anyhow; use editor::Editor; +use file_finder::file_finder_settings::FileFinderSettings; +use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ actions, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model, ParentElement, Render, Styled, View, ViewContext, VisualContext, WeakView, }; -use language::{Buffer, LanguageRegistry}; +use language::{Buffer, LanguageMatcher, LanguageName, LanguageRegistry}; use picker::{Picker, PickerDelegate}; use project::Project; -use std::sync::Arc; +use settings::Settings; +use std::{ops::Not as _, path::Path, sync::Arc}; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::ResultExt; use workspace::{ModalView, Workspace}; @@ -101,8 +104,15 @@ impl LanguageSelectorDelegate { let candidates = language_registry .language_names() .into_iter() + .filter_map(|name| { + language_registry + .available_language_for_name(&name)? + .hidden() + .not() + .then_some(name) + }) .enumerate() - .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name)) + .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, &name)) .collect::>(); Self { @@ -115,13 +125,64 @@ impl LanguageSelectorDelegate { selected_index: 0, } } + + fn language_data_for_match( + &self, + mat: &StringMatch, + cx: &AppContext, + ) -> (String, Option) { + let mut label = mat.string.clone(); + let buffer_language = self.buffer.read(cx).language(); + let need_icon = FileFinderSettings::get_global(cx).file_icons; + if let Some(buffer_language) = buffer_language { + let buffer_language_name = buffer_language.name(); + if buffer_language_name.0.as_ref() == mat.string.as_str() { + label.push_str(" (current)"); + let icon = need_icon + .then(|| self.language_icon(&buffer_language.config().matcher, cx)) + .flatten(); + return (label, icon); + } + } + + if need_icon { + let language_name = LanguageName::new(mat.string.as_str()); + match self + .language_registry + .available_language_for_name(&language_name.0) + { + Some(available_language) => { + let icon = self.language_icon(available_language.matcher(), cx); + (label, icon) + } + None => (label, None), + } + } else { + (label, None) + } + } + + fn language_icon(&self, matcher: &LanguageMatcher, cx: &AppContext) -> Option { + matcher + .path_suffixes + .iter() + .find_map(|extension| { + if extension.contains('.') { + None + } else { + FileIcons::get_icon(Path::new(&format!("file.{extension}")), cx) + } + }) + .map(Icon::from_path) + .map(|icon| icon.color(Color::Muted)) + } } impl PickerDelegate for LanguageSelectorDelegate { type ListItem = ListItem; fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { - "Select a language...".into() + "Select a language…".into() } fn match_count(&self) -> usize { @@ -215,17 +276,13 @@ impl PickerDelegate for LanguageSelectorDelegate { cx: &mut ViewContext>, ) -> Option { let mat = &self.matches[ix]; - let buffer_language_name = self.buffer.read(cx).language().map(|l| l.name()); - let mut label = mat.string.clone(); - if buffer_language_name.map(|n| n.0).as_deref() == Some(mat.string.as_str()) { - label.push_str(" (current)"); - } - + let (label, language_icon) = self.language_data_for_match(mat, cx); Some( ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) + .start_slot::(language_icon) .child(HighlightedLabel::new(label, mat.positions.clone())), ) } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 2e2c0caf40..b488394333 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -3,18 +3,18 @@ use copilot::Copilot; use editor::{actions::MoveToEnd, Editor, EditorEvent}; use futures::{channel::mpsc, StreamExt}; use gpui::{ - actions, div, AnchorCorner, AppContext, Context, EventEmitter, FocusHandle, FocusableView, + actions, div, AppContext, Context, Corner, EventEmitter, FocusHandle, FocusableView, IntoElement, Model, ModelContext, ParentElement, Render, Styled, Subscription, View, ViewContext, VisualContext, WeakModel, WindowContext, }; use language::LanguageServerId; use lsp::{ notification::SetTrace, IoKind, LanguageServer, LanguageServerName, MessageType, - ServerCapabilities, SetTraceParams, TraceValue, + SetTraceParams, TraceValue, }; use project::{search::SearchQuery, Project, WorktreeId}; use std::{borrow::Cow, sync::Arc}; -use ui::{prelude::*, Button, Checkbox, ContextMenu, Label, PopoverMenu, Selection}; +use ui::{prelude::*, Button, Checkbox, ContextMenu, Label, PopoverMenu, ToggleState}; use workspace::{ item::{Item, ItemHandle}, searchable::{SearchEvent, SearchableItem, SearchableItemHandle}, @@ -108,7 +108,6 @@ struct LanguageServerState { rpc_state: Option, trace_level: TraceValue, log_level: MessageType, - capabilities: ServerCapabilities, io_logs_subscription: Option, } @@ -178,7 +177,7 @@ pub enum LogKind { Trace, #[default] Logs, - Capabilities, + ServerInfo, } impl LogKind { @@ -187,7 +186,7 @@ impl LogKind { LogKind::Rpc => RPC_MESSAGES, LogKind::Trace => SERVER_TRACE, LogKind::Logs => SERVER_LOGS, - LogKind::Capabilities => SERVER_CAPABILITIES, + LogKind::ServerInfo => SERVER_INFO, } } } @@ -324,7 +323,11 @@ impl LogStore { *id, Some(name.clone()), *worktree_id, - project.read(cx).language_server_for_id(*id, cx), + project + .read(cx) + .lsp_store() + .read(cx) + .language_server_for_id(*id), cx, ); } @@ -378,7 +381,6 @@ impl LogStore { trace_level: TraceValue::Off, log_level: MessageType::LOG, io_logs_subscription: None, - capabilities: ServerCapabilities::default(), } }); @@ -402,10 +404,6 @@ impl LogStore { })); } - if let Some(server) = server { - server_state.capabilities = server.capabilities(); - } - Some(server_state) } @@ -490,10 +488,6 @@ impl LogStore { Some(&self.language_servers.get(&server_id)?.trace_messages) } - fn server_capabilities(&self, server_id: LanguageServerId) -> Option<&ServerCapabilities> { - Some(&self.language_servers.get(&server_id)?.capabilities) - } - fn server_ids_for_project<'a>( &'a self, lookup_project: &'a WeakModel, @@ -619,9 +613,7 @@ impl LspLogView { LogKind::Rpc => this.show_rpc_trace_for_server(server_id, cx), LogKind::Trace => this.show_trace_for_server(server_id, cx), LogKind::Logs => this.show_logs_for_server(server_id, cx), - LogKind::Capabilities => { - this.show_capabilities_for_server(server_id, cx) - } + LogKind::ServerInfo => this.show_server_info(server_id, cx), } } else { this.current_server_id = None; @@ -638,7 +630,7 @@ impl LspLogView { LogKind::Rpc => this.show_rpc_trace_for_server(server_id, cx), LogKind::Trace => this.show_trace_for_server(server_id, cx), LogKind::Logs => this.show_logs_for_server(server_id, cx), - LogKind::Capabilities => this.show_capabilities_for_server(server_id, cx), + LogKind::ServerInfo => this.show_server_info(server_id, cx), } } @@ -703,42 +695,48 @@ impl LspLogView { }); let editor_subscription = cx.subscribe( &editor, - |_, _, event: &EditorEvent, cx: &mut ViewContext<'_, LspLogView>| { - cx.emit(event.clone()) - }, + |_, _, event: &EditorEvent, cx: &mut ViewContext| cx.emit(event.clone()), ); let search_subscription = cx.subscribe( &editor, - |_, _, event: &SearchEvent, cx: &mut ViewContext<'_, LspLogView>| { - cx.emit(event.clone()) - }, + |_, _, event: &SearchEvent, cx: &mut ViewContext| cx.emit(event.clone()), ); (editor, vec![editor_subscription, search_subscription]) } - fn editor_for_capabilities( - capabilities: ServerCapabilities, + fn editor_for_server_info( + server: &LanguageServer, cx: &mut ViewContext, ) -> (View, Vec) { let editor = cx.new_view(|cx| { let mut editor = Editor::multi_line(cx); - editor.set_text(serde_json::to_string_pretty(&capabilities).unwrap(), cx); - editor.move_to_end(&MoveToEnd, cx); + let server_info = format!( + "* Server: {NAME} (id {ID}) + +* Binary: {BINARY:#?} + +* Running in project: {PATH:?} + +* Capabilities: {CAPABILITIES}", + NAME = server.name(), + ID = server.server_id(), + BINARY = server.binary(), + PATH = server.root_path(), + CAPABILITIES = serde_json::to_string_pretty(&server.capabilities()) + .unwrap_or_else(|e| format!("Failed to serialize capabilities: {e}")), + ); + editor.set_text(server_info, cx); editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); editor }); let editor_subscription = cx.subscribe( &editor, - |_, _, event: &EditorEvent, cx: &mut ViewContext<'_, LspLogView>| { - cx.emit(event.clone()) - }, + |_, _, event: &EditorEvent, cx: &mut ViewContext| cx.emit(event.clone()), ); let search_subscription = cx.subscribe( &editor, - |_, _, event: &SearchEvent, cx: &mut ViewContext<'_, LspLogView>| { - cx.emit(event.clone()) - }, + |_, _, event: &SearchEvent, cx: &mut ViewContext| cx.emit(event.clone()), ); (editor, vec![editor_subscription, search_subscription]) } @@ -935,7 +933,13 @@ impl LspLogView { level: TraceValue, cx: &mut ViewContext, ) { - if let Some(server) = self.project.read(cx).language_server_for_id(server_id, cx) { + if let Some(server) = self + .project + .read(cx) + .lsp_store() + .read(cx) + .language_server_for_id(server_id) + { self.log_store.update(cx, |this, _| { if let Some(state) = this.get_language_server_state(server_id) { state.trace_level = level; @@ -948,22 +952,17 @@ impl LspLogView { } } - fn show_capabilities_for_server( - &mut self, - server_id: LanguageServerId, - cx: &mut ViewContext, - ) { - let capabilities = self.log_store.read(cx).server_capabilities(server_id); - - if let Some(capabilities) = capabilities { - self.current_server_id = Some(server_id); - self.active_entry_kind = LogKind::Capabilities; - let (editor, editor_subscriptions) = - Self::editor_for_capabilities(capabilities.clone(), cx); - self.editor = editor; - self.editor_subscriptions = editor_subscriptions; - cx.notify(); - } + fn show_server_info(&mut self, server_id: LanguageServerId, cx: &mut ViewContext) { + let lsp_store = self.project.read(cx).lsp_store(); + let Some(server) = lsp_store.read(cx).language_server_for_id(server_id) else { + return; + }; + self.current_server_id = Some(server_id); + self.active_entry_kind = LogKind::ServerInfo; + let (editor, editor_subscriptions) = Self::editor_for_server_info(&server, cx); + self.editor = editor; + self.editor_subscriptions = editor_subscriptions; + cx.notify(); cx.focus(&self.focus_handle); } } @@ -1034,7 +1033,7 @@ impl Item for LspLogView { LogKind::Rpc => new_view.show_rpc_trace_for_server(server_id, cx), LogKind::Trace => new_view.show_trace_for_server(server_id, cx), LogKind::Logs => new_view.show_logs_for_server(server_id, cx), - LogKind::Capabilities => new_view.show_capabilities_for_server(server_id, cx), + LogKind::ServerInfo => new_view.show_server_info(server_id, cx), } } new_view @@ -1145,19 +1144,28 @@ impl Render for LspLogToolbarItemView { None } }); - + let available_language_servers: Vec<_> = menu_rows + .iter() + .map(|row| { + ( + row.server_id, + row.server_name.clone(), + row.worktree_root_name.clone(), + row.selected_entry, + ) + }) + .collect(); let log_toolbar_view = cx.view().clone(); let lsp_menu = PopoverMenu::new("LspLogView") - .anchor(AnchorCorner::TopLeft) + .anchor(Corner::TopLeft) .trigger(Button::new( "language_server_menu_header", current_server + .as_ref() .map(|row| { Cow::Owned(format!( - "{} ({}) - {}", - row.server_name.0, - row.worktree_root_name, - row.selected_entry.label() + "{} ({})", + row.server_name.0, row.worktree_root_name, )) }) .unwrap_or_else(|| "No server selected".into()), @@ -1165,36 +1173,69 @@ impl Render for LspLogToolbarItemView { .menu({ let log_view = log_view.clone(); move |cx| { - let menu_rows = menu_rows.clone(); let log_view = log_view.clone(); - let log_toolbar_view = log_toolbar_view.clone(); - ContextMenu::build(cx, move |mut menu, cx| { - for (ix, row) in menu_rows.into_iter().enumerate() { - let server_selected = Some(row.server_id) == current_server_id; - menu = menu - .header(format!( - "{} ({})", - row.server_name.0, row.worktree_root_name - )) - .entry( - SERVER_LOGS, - None, - cx.handler_for(&log_view, move |view, cx| { - view.show_logs_for_server(row.server_id, cx); - }), - ); - // We do not support tracing for remote language servers right now - if row.server_kind.is_remote() { - continue; - } + ContextMenu::build(cx, |mut menu, cx| { + for (server_id, name, worktree_root, active_entry_kind) in + available_language_servers.iter() + { + let label = format!("{} ({})", name, worktree_root); + let server_id = *server_id; + let active_entry_kind = *active_entry_kind; menu = menu.entry( + label, + None, + cx.handler_for(&log_view, move |view, cx| { + view.current_server_id = Some(server_id); + view.active_entry_kind = active_entry_kind; + match view.active_entry_kind { + LogKind::Rpc => { + view.toggle_rpc_trace_for_server(server_id, true, cx); + view.show_rpc_trace_for_server(server_id, cx); + } + LogKind::Trace => view.show_trace_for_server(server_id, cx), + LogKind::Logs => view.show_logs_for_server(server_id, cx), + LogKind::ServerInfo => view.show_server_info(server_id, cx), + } + cx.notify(); + }), + ); + } + menu + }) + .into() + } + }); + let view_selector = current_server.map(|server| { + let server_id = server.server_id; + let is_remote = server.server_kind.is_remote(); + let rpc_trace_enabled = server.rpc_trace_enabled; + let log_view = log_view.clone(); + PopoverMenu::new("LspViewSelector") + .anchor(Corner::TopLeft) + .trigger(Button::new( + "language_server_menu_header", + server.selected_entry.label(), + )) + .menu(move |cx| { + let log_toolbar_view = log_toolbar_view.clone(); + let log_view = log_view.clone(); + Some(ContextMenu::build(cx, move |this, cx| { + this.entry( + SERVER_LOGS, + None, + cx.handler_for(&log_view, move |view, cx| { + view.show_logs_for_server(server_id, cx); + }), + ) + .when(!is_remote, |this| { + this.entry( SERVER_TRACE, None, cx.handler_for(&log_view, move |view, cx| { - view.show_trace_for_server(row.server_id, cx); + view.show_trace_for_server(server_id, cx); }), - ); - menu = menu.custom_entry( + ) + .custom_entry( { let log_toolbar_view = log_toolbar_view.clone(); move |cx| { @@ -1205,11 +1246,11 @@ impl Render for LspLogToolbarItemView { .child( div().child( Checkbox::new( - ix, - if row.rpc_trace_enabled { - Selection::Selected + "LspLogEnableRpcTrace", + if rpc_trace_enabled { + ToggleState::Selected } else { - Selection::Unselected + ToggleState::Unselected }, ) .on_click(cx.listener_for( @@ -1217,12 +1258,10 @@ impl Render for LspLogToolbarItemView { move |view, selection, cx| { let enabled = matches!( selection, - Selection::Selected + ToggleState::Selected ); view.toggle_rpc_logging_for_server( - row.server_id, - enabled, - cx, + server_id, enabled, cx, ); cx.stop_propagation(); }, @@ -1233,42 +1272,148 @@ impl Render for LspLogToolbarItemView { } }, cx.handler_for(&log_view, move |view, cx| { - view.show_rpc_trace_for_server(row.server_id, cx); + view.show_rpc_trace_for_server(server_id, cx); }), - ); - if server_selected && row.selected_entry == LogKind::Rpc { - let selected_ix = menu.select_last(); - // Each language server has: - // 1. A title. - // 2. Server logs. - // 3. Server trace. - // 4. RPC messages. - // 5. Server capabilities - // Thus, if nth server's RPC is selected, the index of selected entry should match this formula - let _expected_index = ix * 5 + 3; - debug_assert_eq!( - Some(_expected_index), - selected_ix, - "Could not scroll to a just added LSP menu item" - ); - } - menu = menu.entry( - SERVER_CAPABILITIES, - None, - cx.handler_for(&log_view, move |view, cx| { - view.show_capabilities_for_server(row.server_id, cx); - }), - ); - } - menu - }) - .into() - } - }); - + ) + }) + .entry( + SERVER_INFO, + None, + cx.handler_for(&log_view, move |view, cx| { + view.show_server_info(server_id, cx); + }), + ) + })) + }) + }); h_flex() .size_full() - .child(lsp_menu) + .justify_between() + .child( + h_flex() + .child(lsp_menu) + .children(view_selector) + .child(log_view.update(cx, |this, _| match this.active_entry_kind { + LogKind::Trace => { + let log_view = log_view.clone(); + div().child( + PopoverMenu::new("lsp-trace-level-menu") + .anchor(Corner::TopLeft) + .trigger(Button::new( + "language_server_trace_level_selector", + "Trace level", + )) + .menu({ + let log_view = log_view.clone(); + + move |cx| { + let id = log_view.read(cx).current_server_id?; + + let trace_level = log_view.update(cx, |this, cx| { + this.log_store.update(cx, |this, _| { + Some( + this.get_language_server_state(id)? + .trace_level, + ) + }) + })?; + + ContextMenu::build(cx, |mut menu, _| { + let log_view = log_view.clone(); + + for (option, label) in [ + (TraceValue::Off, "Off"), + (TraceValue::Messages, "Messages"), + (TraceValue::Verbose, "Verbose"), + ] { + menu = menu.entry(label, None, { + let log_view = log_view.clone(); + move |cx| { + log_view.update(cx, |this, cx| { + if let Some(id) = + this.current_server_id + { + this.update_trace_level( + id, option, cx, + ); + } + }); + } + }); + if option == trace_level { + menu.select_last(); + } + } + + menu + }) + .into() + } + }), + ) + } + LogKind::Logs => { + let log_view = log_view.clone(); + div().child( + PopoverMenu::new("lsp-log-level-menu") + .anchor(Corner::TopLeft) + .trigger(Button::new( + "language_server_log_level_selector", + "Log level", + )) + .menu({ + let log_view = log_view.clone(); + + move |cx| { + let id = log_view.read(cx).current_server_id?; + + let log_level = log_view.update(cx, |this, cx| { + this.log_store.update(cx, |this, _| { + Some( + this.get_language_server_state(id)? + .log_level, + ) + }) + })?; + + ContextMenu::build(cx, |mut menu, _| { + let log_view = log_view.clone(); + + for (option, label) in [ + (MessageType::LOG, "Log"), + (MessageType::INFO, "Info"), + (MessageType::WARNING, "Warning"), + (MessageType::ERROR, "Error"), + ] { + menu = menu.entry(label, None, { + let log_view = log_view.clone(); + move |cx| { + log_view.update(cx, |this, cx| { + if let Some(id) = + this.current_server_id + { + this.update_log_level( + id, option, cx, + ); + } + }); + } + }); + if option == log_level { + menu.select_last(); + } + } + + menu + }) + .into() + } + }), + ) + } + _ => div(), + })), + ) .child( div() .child( @@ -1288,119 +1433,13 @@ impl Render for LspLogToolbarItemView { ) .ml_2(), ) - .child(log_view.update(cx, |this, _| match this.active_entry_kind { - LogKind::Trace => { - let log_view = log_view.clone(); - div().child( - PopoverMenu::new("lsp-trace-level-menu") - .anchor(AnchorCorner::TopLeft) - .trigger(Button::new( - "language_server_trace_level_selector", - "Trace level", - )) - .menu({ - let log_view = log_view.clone(); - - move |cx| { - let id = log_view.read(cx).current_server_id?; - - let trace_level = log_view.update(cx, |this, cx| { - this.log_store.update(cx, |this, _| { - Some(this.get_language_server_state(id)?.trace_level) - }) - })?; - - ContextMenu::build(cx, |mut menu, _| { - let log_view = log_view.clone(); - - for (option, label) in [ - (TraceValue::Off, "Off"), - (TraceValue::Messages, "Messages"), - (TraceValue::Verbose, "Verbose"), - ] { - menu = menu.entry(label, None, { - let log_view = log_view.clone(); - move |cx| { - log_view.update(cx, |this, cx| { - if let Some(id) = this.current_server_id { - this.update_trace_level(id, option, cx); - } - }); - } - }); - if option == trace_level { - menu.select_last(); - } - } - - menu - }) - .into() - } - }), - ) - } - LogKind::Logs => { - let log_view = log_view.clone(); - div().child( - PopoverMenu::new("lsp-log-level-menu") - .anchor(AnchorCorner::TopLeft) - .trigger(Button::new( - "language_server_log_level_selector", - "Log level", - )) - .menu({ - let log_view = log_view.clone(); - - move |cx| { - let id = log_view.read(cx).current_server_id?; - - let log_level = log_view.update(cx, |this, cx| { - this.log_store.update(cx, |this, _| { - Some(this.get_language_server_state(id)?.log_level) - }) - })?; - - ContextMenu::build(cx, |mut menu, _| { - let log_view = log_view.clone(); - - for (option, label) in [ - (MessageType::LOG, "Log"), - (MessageType::INFO, "Info"), - (MessageType::WARNING, "Warning"), - (MessageType::ERROR, "Error"), - ] { - menu = menu.entry(label, None, { - let log_view = log_view.clone(); - move |cx| { - log_view.update(cx, |this, cx| { - if let Some(id) = this.current_server_id { - this.update_log_level(id, option, cx); - } - }); - } - }); - if option == log_level { - menu.select_last(); - } - } - - menu - }) - .into() - } - }), - ) - } - _ => div(), - })) } } const RPC_MESSAGES: &str = "RPC Messages"; const SERVER_LOGS: &str = "Server Logs"; const SERVER_TRACE: &str = "Server Trace"; -const SERVER_CAPABILITIES: &str = "Server Capabilities"; +const SERVER_INFO: &str = "Server Info"; impl Default for LspLogToolbarItemView { fn default() -> Self { diff --git a/crates/language_tools/src/lsp_log_tests.rs b/crates/language_tools/src/lsp_log_tests.rs index 79308b6e10..ad3cc87f2d 100644 --- a/crates/language_tools/src/lsp_log_tests.rs +++ b/crates/language_tools/src/lsp_log_tests.rs @@ -57,7 +57,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) { let _rust_buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/the-root/test.rs", cx) + project.open_local_buffer_with_lsp("/the-root/test.rs", cx) }) .await .unwrap(); diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index a0eb479e6d..97c29b8615 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -128,13 +128,18 @@ impl SyntaxTreeView { fn editor_updated(&mut self, did_reparse: bool, cx: &mut ViewContext) -> Option<()> { // Find which excerpt the cursor is in, and the position within that excerpted buffer. let editor_state = self.editor.as_mut()?; - let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { + let snapshot = editor_state + .editor + .update(cx, |editor, cx| editor.snapshot(cx)); + let (excerpt, buffer, range) = editor_state.editor.update(cx, |editor, cx| { let selection_range = editor.selections.last::(cx).range(); - editor - .buffer() - .read(cx) - .range_to_buffer_ranges(selection_range, cx) - .pop() + let multi_buffer = editor.buffer().read(cx); + let (excerpt, range) = snapshot + .buffer_snapshot + .range_to_buffer_ranges(selection_range) + .pop()?; + let buffer = multi_buffer.buffer(excerpt.buffer_id()).unwrap().clone(); + Some((excerpt, buffer, range)) })?; // If the cursor has moved into a different excerpt, retrieve a new syntax layer @@ -143,16 +148,16 @@ impl SyntaxTreeView { .active_buffer .get_or_insert_with(|| BufferState { buffer: buffer.clone(), - excerpt_id, + excerpt_id: excerpt.id(), active_layer: None, }); let mut prev_layer = None; if did_reparse { prev_layer = buffer_state.active_layer.take(); } - if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id { + if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt.id() { buffer_state.buffer = buffer.clone(); - buffer_state.excerpt_id = excerpt_id; + buffer_state.excerpt_id = excerpt.id(); buffer_state.active_layer = None; } @@ -273,7 +278,7 @@ impl SyntaxTreeView { } impl Render for SyntaxTreeView { - fn render(&mut self, cx: &mut gpui::ViewContext<'_, Self>) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let mut rendered = div().flex_1(); if let Some(layer) = self @@ -422,7 +427,7 @@ impl SyntaxTreeToolbarItemView { } } - fn render_menu(&mut self, cx: &mut ViewContext<'_, Self>) -> Option> { + fn render_menu(&mut self, cx: &mut ViewContext) -> Option> { let tree_view = self.tree_view.as_ref()?; let tree_view = tree_view.read(cx); @@ -492,7 +497,7 @@ fn format_node_range(node: Node) -> String { } impl Render for SyntaxTreeToolbarItemView { - fn render(&mut self, cx: &mut ViewContext<'_, Self>) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { self.render_menu(cx) .unwrap_or_else(|| PopoverMenu::new("Empty Syntax Tree")) } diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 96a44403bc..951423056e 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -29,7 +29,7 @@ load-grammars = [ "tree-sitter-rust", "tree-sitter-typescript", "tree-sitter-yaml", - "tree-sitter" + "tree-sitter", ] [dependencies] diff --git a/crates/languages/src/bash/textobjects.scm b/crates/languages/src/bash/textobjects.scm new file mode 100644 index 0000000000..cca2f7d9e9 --- /dev/null +++ b/crates/languages/src/bash/textobjects.scm @@ -0,0 +1,7 @@ +(function_definition + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + +(comment) @comment.around diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index a0e0f6dadb..c50a16b3e4 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -4,10 +4,11 @@ use futures::StreamExt; use gpui::AsyncAppContext; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; -use lsp::{LanguageServerBinary, LanguageServerName}; +use lsp::{InitializeParams, LanguageServerBinary, LanguageServerName}; +use serde_json::json; use smol::fs::{self, File}; use std::{any::Any, env::consts, path::PathBuf, sync::Arc}; -use util::{fs::remove_matching, maybe, ResultExt}; +use util::{fs::remove_matching, maybe, merge_json_value_into, ResultExt}; pub struct CLspAdapter; @@ -24,6 +25,7 @@ impl super::LspAdapter for CLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; @@ -85,7 +87,7 @@ impl super::LspAdapter for CLspAdapter { } futures::io::copy(response.body_mut(), &mut file).await?; - let unzip_status = smol::process::Command::new("unzip") + let unzip_status = util::command::new_smol_command("unzip") .current_dir(&container_dir) .arg(&zip_path) .output() @@ -256,6 +258,26 @@ impl super::LspAdapter for CLspAdapter { filter_range, }) } + + fn prepare_initialize_params( + &self, + mut original: InitializeParams, + ) -> Result { + // enable clangd's dot-to-arrow feature. + let experimental = json!({ + "textDocument": { + "completion" : { + "editsNearCursor": true + } + } + }); + if let Some(ref mut original_experimental) = original.capabilities.experimental { + merge_json_value_into(experimental, original_experimental); + } else { + original.capabilities.experimental = Some(experimental); + } + Ok(original) + } } async fn get_cached_server_binary(container_dir: PathBuf) -> Option { diff --git a/crates/languages/src/c/injections.scm b/crates/languages/src/c/injections.scm index 2696594af2..73d2628225 100644 --- a/crates/languages/src/c/injections.scm +++ b/crates/languages/src/c/injections.scm @@ -1,7 +1,7 @@ (preproc_def - value: (preproc_arg) @content - (#set! "language" "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) (preproc_function_def - value: (preproc_arg) @content - (#set! "language" "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) diff --git a/crates/languages/src/c/textobjects.scm b/crates/languages/src/c/textobjects.scm new file mode 100644 index 0000000000..832dd62288 --- /dev/null +++ b/crates/languages/src/c/textobjects.scm @@ -0,0 +1,25 @@ +(declaration + declarator: (function_declarator)) @function.around + +(function_definition + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + +(preproc_function_def + value: (_) @function.inside) @function.around + +(comment) @comment.around + +(struct_specifier + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(enum_specifier + body: (_ + "{" + [(_) ","?]* @class.inside + "}")) @class.around diff --git a/crates/languages/src/cpp/config.toml b/crates/languages/src/cpp/config.toml index e78bc8ea6c..6aba8727f3 100644 --- a/crates/languages/src/cpp/config.toml +++ b/crates/languages/src/cpp/config.toml @@ -1,6 +1,6 @@ name = "C++" grammar = "cpp" -path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "ipp", "inl", "cu", "cuh"] +path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "ipp", "inl", "cu", "cuh", "C", "H"] line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ diff --git a/crates/languages/src/cpp/injections.scm b/crates/languages/src/cpp/injections.scm index 57cd3ac7f3..e903e1affd 100644 --- a/crates/languages/src/cpp/injections.scm +++ b/crates/languages/src/cpp/injections.scm @@ -1,11 +1,11 @@ (preproc_def - value: (preproc_arg) @content - (#set! "language" "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (preproc_function_def - value: (preproc_arg) @content - (#set! "language" "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (raw_string_literal - delimiter: (raw_string_delimiter) @language - (raw_string_content) @content) + delimiter: (raw_string_delimiter) @injection.language + (raw_string_content) @injection.content) diff --git a/crates/languages/src/cpp/textobjects.scm b/crates/languages/src/cpp/textobjects.scm new file mode 100644 index 0000000000..11a27b8d58 --- /dev/null +++ b/crates/languages/src/cpp/textobjects.scm @@ -0,0 +1,31 @@ +(declaration + declarator: (function_declarator)) @function.around + +(function_definition + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + +(preproc_function_def + value: (_) @function.inside) @function.around + +(comment) @comment.around + +(struct_specifier + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(enum_specifier + body: (_ + "{" + [(_) ","?]* @class.inside + "}")) @class.around + +(class_specifier + body: (_ + "{" + [(_) ":"? ";"?]* @class.inside + "}"?)) @class.around diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 536f339664..148f6acced 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -26,6 +26,7 @@ pub struct CssLspAdapter { } impl CssLspAdapter { + const PACKAGE_NAME: &str = "vscode-langservers-extracted"; pub fn new(node: NodeRuntime) -> Self { CssLspAdapter { node } } @@ -56,18 +57,13 @@ impl LspAdapter for CssLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = "vscode-langservers-extracted"; - let should_install_language_server = self - .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) - .await; - - if should_install_language_server { - self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) - .await?; - } + self.node + .npm_install_packages( + &container_dir, + &[(Self::PACKAGE_NAME, latest_version.as_str())], + ) + .await?; Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -76,6 +72,31 @@ impl LspAdapter for CssLspAdapter { }) } + async fn check_if_version_installed( + &self, + version: &(dyn 'static + Send + Any), + container_dir: &PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + let version = version.downcast_ref::().unwrap(); + let server_path = container_dir.join(SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .await; + + if should_install_language_server { + None + } else { + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: server_binary_arguments(&server_path), + }) + } + } + async fn cached_server_binary( &self, container_dir: PathBuf, diff --git a/crates/languages/src/css/config.toml b/crates/languages/src/css/config.toml index 9b0c9c703c..d6ea2f9c7f 100644 --- a/crates/languages/src/css/config.toml +++ b/crates/languages/src/css/config.toml @@ -1,6 +1,6 @@ name = "CSS" grammar = "css" -path_suffixes = ["css", "postcss"] +path_suffixes = ["css", "postcss", "pcss"] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, diff --git a/crates/languages/src/css/textobjects.scm b/crates/languages/src/css/textobjects.scm new file mode 100644 index 0000000000..c9c6207b85 --- /dev/null +++ b/crates/languages/src/css/textobjects.scm @@ -0,0 +1,30 @@ +(comment) @comment.around + +(rule_set + (block ( + "{" + (_)* @function.inside + "}" ))) @function.around +(keyframe_block + (block ( + "{" + (_)* @function.inside + "}" ))) @function.around + +(media_statement + (block ( + "{" + (_)* @class.inside + "}" ))) @class.around + +(supports_statement + (block ( + "{" + (_)* @class.inside + "}" ))) @class.around + +(keyframes_statement + (keyframe_block_list ( + "{" + (_)* @class.inside + "}" ))) @class.around diff --git a/crates/languages/src/diff/highlights.scm b/crates/languages/src/diff/highlights.scm index d8e600882c..70ec01aa11 100644 --- a/crates/languages/src/diff/highlights.scm +++ b/crates/languages/src/diff/highlights.scm @@ -1,3 +1,5 @@ +(comment) @comment + [ (addition) (new_file) @@ -12,4 +14,35 @@ (location) @attribute -(command) @function +(command + "diff" @function + (argument) @variable.parameter) + +(filename) @string.special.path + +(mode) @number + +([ + ".." + "+" + "++" + "+++" + "++++" + "-" + "--" + "---" + "----" +] @punctuation.special) + +[ + (binary_change) + (similarity) + (file_change) +] @label + +(index + "index" @keyword) + +(similarity + (score) @number + "%" @number) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 669f6918a9..d56c720eb0 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -8,13 +8,14 @@ pub use language::*; use lsp::{LanguageServerBinary, LanguageServerName}; use regex::Regex; use serde_json::json; -use smol::{fs, process}; +use smol::fs; use std::{ any::Any, borrow::Cow, ffi::{OsStr, OsString}, ops::Range, path::PathBuf, + process::Output, str, sync::{ atomic::{AtomicBool, Ordering::SeqCst}, @@ -35,13 +36,19 @@ impl GoLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("gopls"); } -static GOPLS_VERSION_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"\d+\.\d+\.\d+").expect("Failed to create GOPLS_VERSION_REGEX")); +static VERSION_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\d+\.\d+\.\d+").expect("Failed to create VERSION_REGEX")); static GO_ESCAPE_SUBTEST_NAME_REGEX: LazyLock = LazyLock::new(|| { Regex::new(r#"[.*+?^${}()|\[\]\\]"#).expect("Failed to create GO_ESCAPE_SUBTEST_NAME_REGEX") }); +const BINARY: &str = if cfg!(target_os = "windows") { + "gopls.exe" +} else { + "gopls" +}; + #[async_trait(?Send)] impl super::LspAdapter for GoLspAdapter { fn name(&self) -> LanguageServerName { @@ -67,6 +74,7 @@ impl super::LspAdapter for GoLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; @@ -110,11 +118,18 @@ impl super::LspAdapter for GoLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { + let go = delegate.which("go".as_ref()).await.unwrap_or("go".into()); + let go_version_output = util::command::new_smol_command(&go) + .args(["version"]) + .output() + .await + .context("failed to get go version via `go version` command`")?; + let go_version = parse_version_output(&go_version_output)?; let version = version.downcast::>().unwrap(); let this = *self; if let Some(version) = *version { - let binary_path = container_dir.join(format!("gopls_{version}")); + let binary_path = container_dir.join(format!("gopls_{version}_go_{go_version}")); if let Ok(metadata) = fs::metadata(&binary_path).await { if metadata.is_file() { remove_matching(&container_dir, |entry| { @@ -138,8 +153,7 @@ impl super::LspAdapter for GoLspAdapter { let gobin_dir = container_dir.join("gobin"); fs::create_dir_all(&gobin_dir).await?; - let go = delegate.which("go".as_ref()).await.unwrap_or("go".into()); - let install_output = process::Command::new(go) + let install_output = util::command::new_smol_command(go) .env("GO111MODULE", "on") .env("GOBIN", &gobin_dir) .args(["install", "golang.org/x/tools/gopls@latest"]) @@ -156,19 +170,14 @@ impl super::LspAdapter for GoLspAdapter { return Err(anyhow!("failed to install gopls with `go install`. Is `go` installed and in the PATH? Check logs for more information.")); } - let installed_binary_path = gobin_dir.join("gopls"); - let version_output = process::Command::new(&installed_binary_path) + let installed_binary_path = gobin_dir.join(BINARY); + let version_output = util::command::new_smol_command(&installed_binary_path) .arg("version") .output() .await .context("failed to run installed gopls binary")?; - let version_stdout = str::from_utf8(&version_output.stdout) - .context("gopls version produced invalid utf8 output")?; - let version = GOPLS_VERSION_REGEX - .find(version_stdout) - .with_context(|| format!("failed to parse golps version output '{version_stdout}'"))? - .as_str(); - let binary_path = container_dir.join(format!("gopls_{version}")); + let gopls_version = parse_version_output(&version_output)?; + let binary_path = container_dir.join(format!("gopls_{gopls_version}_go_{go_version}")); fs::rename(&installed_binary_path, &binary_path).await?; Ok(LanguageServerBinary { @@ -364,6 +373,18 @@ impl super::LspAdapter for GoLspAdapter { } } +fn parse_version_output(output: &Output) -> Result<&str> { + let version_stdout = + str::from_utf8(&output.stdout).context("version command produced invalid utf8 output")?; + + let version = VERSION_REGEX + .find(version_stdout) + .with_context(|| format!("failed to parse version output '{version_stdout}'"))? + .as_str(); + + Ok(version) +} + async fn get_cached_server_binary(container_dir: PathBuf) -> Option { maybe!(async { let mut last_binary_path = None; diff --git a/crates/languages/src/go/injections.scm b/crates/languages/src/go/injections.scm index 7744d98679..2be0844d97 100644 --- a/crates/languages/src/go/injections.scm +++ b/crates/languages/src/go/injections.scm @@ -9,5 +9,5 @@ [ (raw_string_literal) (interpreted_string_literal) - ] @content - (#set! "language" "regex"))) + ] @injection.content + (#set! injection.language "regex"))) diff --git a/crates/languages/src/go/textobjects.scm b/crates/languages/src/go/textobjects.scm new file mode 100644 index 0000000000..eb4f3a0050 --- /dev/null +++ b/crates/languages/src/go/textobjects.scm @@ -0,0 +1,25 @@ +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(type_declaration + (type_spec (struct_type (field_declaration_list ( + "{" + (_)* @class.inside + "}")?)))) @class.around + +(type_declaration + (type_spec (interface_type + (_)* @class.inside))) @class.around + +(type_declaration) @class.around + +(comment)+ @comment.around diff --git a/crates/languages/src/javascript/highlights.scm b/crates/languages/src/javascript/highlights.scm index e5d4cb2068..8ae208d4cd 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/languages/src/javascript/highlights.scm @@ -194,6 +194,7 @@ "throw" "try" "typeof" + "using" "var" "void" "while" diff --git a/crates/languages/src/javascript/injections.scm b/crates/languages/src/javascript/injections.scm index 180608944a..7baba5f227 100644 --- a/crates/languages/src/javascript/injections.scm +++ b/crates/languages/src/javascript/injections.scm @@ -1,60 +1,60 @@ (((comment) @_jsdoc_comment - (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @content - (#set! "language" "jsdoc")) + (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content + (#set! injection.language "jsdoc")) -((regex) @content - (#set! "language" "regex")) +((regex) @injection.content + (#set! injection.language "regex")) (call_expression function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @content - (#set! "language" "css")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) ) (call_expression function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string) @content - (#set! "language" "html") + arguments: (template_string) @injection.content + (#set! injection.language "html") ) (call_expression function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @content - (#set! "language" "javascript")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "javascript")) ) (call_expression function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @content - (#set! "language" "json")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "json")) ) (call_expression function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @content - (#set! "language" "sql")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "sql")) ) (call_expression function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @content - (#set! "language" "typescript")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "typescript")) ) (call_expression function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @content - (#set! "language" "yaml")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "yaml")) ) (call_expression function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @content - (#set! "language" "graphql")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "graphql")) ) (call_expression function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @content - (#set! "language" "graphql"))) + arguments: (arguments (template_string (string_fragment) @injection.content + (#set! injection.language "graphql"))) ) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm index c5ec3d36dd..d70d8bb597 100644 --- a/crates/languages/src/javascript/outline.scm +++ b/crates/languages/src/javascript/outline.scm @@ -62,12 +62,20 @@ name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -(call_expression - function: (_) @context - (#any-of? @context "it" "test" "describe") - arguments: ( - arguments . (string - (string_fragment) @name +( + (call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression object: (identifier) @_name) + ] + ) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: ( + arguments . (string (string_fragment) @name) ) ) ) @item diff --git a/crates/languages/src/javascript/runnables.scm b/crates/languages/src/javascript/runnables.scm index 37f48e1df8..1b68b9a41e 100644 --- a/crates/languages/src/javascript/runnables.scm +++ b/crates/languages/src/javascript/runnables.scm @@ -2,13 +2,20 @@ ; Function expression that has `it`, `test` or `describe` as the function name ( (call_expression - function: (_) @_name - (#any-of? @_name "it" "test" "describe") - arguments: ( - arguments . (string - (string_fragment) @run + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression object: (identifier) @_name) + ] ) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: ( + arguments . (string (string_fragment) @run) ) ) @_js-test + (#set! tag js-test) ) diff --git a/crates/languages/src/javascript/textobjects.scm b/crates/languages/src/javascript/textobjects.scm new file mode 100644 index 0000000000..1a273ddb50 --- /dev/null +++ b/crates/languages/src/javascript/textobjects.scm @@ -0,0 +1,51 @@ +(comment)+ @comment.around + +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(function_expression + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + +(arrow_function) @function.around + +(generator_function + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(generator_function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(class_declaration + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(class + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around diff --git a/crates/languages/src/jsdoc/config.toml b/crates/languages/src/jsdoc/config.toml index 444e657a38..0aa0d361bd 100644 --- a/crates/languages/src/jsdoc/config.toml +++ b/crates/languages/src/jsdoc/config.toml @@ -5,3 +5,4 @@ brackets = [ { start = "{", end = "}", close = true, newline = false }, { start = "[", end = "]", close = true, newline = false }, ] +hidden = true diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index c0c7e6f453..7172f96f74 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -9,7 +9,7 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; use language::{LanguageRegistry, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{LanguageServerBinary, LanguageServerName}; use node_runtime::NodeRuntime; -use project::ContextProviderWithTasks; +use project::{lsp_store::language_server_settings, ContextProviderWithTasks}; use serde_json::{json, Value}; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; use smol::{ @@ -25,7 +25,7 @@ use std::{ sync::{Arc, OnceLock}, }; use task::{TaskTemplate, TaskTemplates, VariableName}; -use util::{fs::remove_matching, maybe, ResultExt}; +use util::{fs::remove_matching, maybe, merge_json_value_into, ResultExt}; const SERVER_PATH: &str = "node_modules/vscode-langservers-extracted/bin/vscode-json-language-server"; @@ -64,6 +64,8 @@ pub struct JsonLspAdapter { } impl JsonLspAdapter { + const PACKAGE_NAME: &str = "vscode-langservers-extracted"; + pub fn new(node: NodeRuntime, languages: Arc) -> Self { Self { node, @@ -74,6 +76,7 @@ impl JsonLspAdapter { fn get_workspace_config(language_names: Vec, cx: &mut AppContext) -> Value { let action_names = cx.all_action_names(); + let deprecations = cx.action_deprecations(); let font_names = &cx.text_system().all_font_names(); let settings_schema = cx.global::().json_schema( @@ -114,7 +117,7 @@ impl JsonLspAdapter { }, { "fileMatch": [schema_file_match(paths::keymap_file())], - "schema": KeymapFile::generate_json_schema(action_names), + "schema": KeymapFile::generate_json_schema(action_names, deprecations), }, { "fileMatch": [ @@ -142,11 +145,36 @@ impl LspAdapter for JsonLspAdapter { ) -> Result> { Ok(Box::new( self.node - .npm_package_latest_version("vscode-langservers-extracted") + .npm_package_latest_version(Self::PACKAGE_NAME) .await?, ) as Box<_>) } + async fn check_if_version_installed( + &self, + version: &(dyn 'static + Send + Any), + container_dir: &PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + let version = version.downcast_ref::().unwrap(); + let server_path = container_dir.join(SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .await; + + if should_install_language_server { + None + } else { + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: server_binary_arguments(&server_path), + }) + } + } + async fn fetch_server_binary( &self, latest_version: Box, @@ -155,18 +183,13 @@ impl LspAdapter for JsonLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = "vscode-langservers-extracted"; - let should_install_language_server = self - .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) - .await; - - if should_install_language_server { - self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) - .await?; - } + self.node + .npm_install_packages( + &container_dir, + &[(Self::PACKAGE_NAME, latest_version.as_str())], + ) + .await?; Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -194,15 +217,26 @@ impl LspAdapter for JsonLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + delegate: &Arc, _: Arc, cx: &mut AsyncAppContext, ) -> Result { - cx.update(|cx| { + let mut config = cx.update(|cx| { self.workspace_config .get_or_init(|| Self::get_workspace_config(self.languages.language_names(), cx)) .clone() - }) + })?; + + let project_options = cx.update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|s| s.settings.clone()) + })?; + + if let Some(override_options) = project_options { + merge_json_value_into(override_options, &mut config); + } + + Ok(config) } fn language_ids(&self) -> HashMap { diff --git a/crates/languages/src/json/config.toml b/crates/languages/src/json/config.toml index c4a91c20b0..dc49f4f36e 100644 --- a/crates/languages/src/json/config.toml +++ b/crates/languages/src/json/config.toml @@ -1,6 +1,6 @@ name = "JSON" grammar = "json" -path_suffixes = ["json"] +path_suffixes = ["json", "flake.lock"] line_comments = ["// "] autoclose_before = ",]}" brackets = [ diff --git a/crates/languages/src/json/schemas/package.json b/crates/languages/src/json/schemas/package.json index 42c8f3c114..79d2457276 100644 --- a/crates/languages/src/json/schemas/package.json +++ b/crates/languages/src/json/schemas/package.json @@ -139,7 +139,7 @@ } }, "patternProperties": { - "^(?![\\.0-9]).": { + "^[^.0-9]+$": { "$ref": "#/definitions/packageExportsEntryOrFallback", "description": "The module path that is resolved when this environment matches the property name." } @@ -616,7 +616,7 @@ } } }, - "bundledDependencies": { + "bundleDependencies": { "description": "Array of package names that will be bundled when publishing the package.", "oneOf": [ { @@ -630,8 +630,8 @@ } ] }, - "bundleDependencies": { - "description": "DEPRECATED: This field is honored, but \"bundledDependencies\" is the correct field name.", + "bundledDependencies": { + "description": "DEPRECATED: This field is honored, but \"bundleDependencies\" is the correct field name.", "oneOf": [ { "type": "array", @@ -734,6 +734,9 @@ "registry": { "type": "string", "format": "uri" + }, + "provenance": { + "type": "boolean" } }, "additionalProperties": true diff --git a/crates/languages/src/json/schemas/tsconfig.json b/crates/languages/src/json/schemas/tsconfig.json index 808fc6f966..9174a58537 100644 --- a/crates/languages/src/json/schemas/tsconfig.json +++ b/crates/languages/src/json/schemas/tsconfig.json @@ -232,7 +232,7 @@ "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable importing files with any extension, provided a declaration file is present.", "type": ["boolean", "null"], - "markdownDescription": "Enable importing files with any extension, provided a declaration file is present.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowImportingTsExtensions" + "markdownDescription": "Enable importing files with any extension, provided a declaration file is present.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowArbitraryExtensions" }, "allowImportingTsExtensions": { "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", @@ -426,17 +426,17 @@ "anyOf": [ { "enum": [ - "Classic", - "Node", - "Node10", - "Node16", - "NodeNext", - "Bundler" + "classic", + "node", + "node10", + "node16", + "nodenext", + "bundler" ], "markdownEnumDescriptions": [ - "It’s recommended to use `\"Node16\"` instead", - "Deprecated, use `\"Node10\"` in TypeScript 5.0+ instead", - "It’s recommended to use `\"Node16\"` instead", + "It’s recommended to use `\"node16\"` instead", + "Deprecated, use `\"node10\"` in TypeScript 5.0+ instead", + "It’s recommended to use `\"node16\"` instead", "This is the recommended setting for libraries and Node.js applications", "This is the recommended setting for libraries and Node.js applications", "This is the recommended setting in TypeScript 5.0+ for applications that use a bundler" @@ -497,10 +497,10 @@ }, "noUnusedLocals": { "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", - "description": "Enable error reporting when a local variables aren't read.", + "description": "Enable error reporting when a local variable isn't read.", "type": ["boolean", "null"], "default": false, - "markdownDescription": "Enable error reporting when a local variables aren't read.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedLocals" + "markdownDescription": "Enable error reporting when a local variable isn't read.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedLocals" }, "noUnusedParameters": { "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", @@ -949,14 +949,19 @@ "ESNext.Array", "ESNext.AsyncIterable", "ESNext.BigInt", + "ESNext.Collection", "ESNext.Intl", + "ESNext.Object", "ESNext.Promise", + "ESNext.Regexp", "ESNext.String", "ESNext.Symbol", "DOM", + "DOM.AsyncIterable", "DOM.Iterable", "ScriptHost", "WebWorker", + "WebWorker.AsyncIterable", "WebWorker.ImportScripts", "Webworker.Iterable", "ES7", @@ -1022,13 +1027,13 @@ "pattern": "^[Ee][Ss][Nn][Ee][Xx][Tt](\\.([Aa][Rr][Rr][Aa][Yy]|[Aa][Ss][Yy][Nn][Cc][Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]|[Bb][Ii][Gg][Ii][Nn][Tt]|[Ii][Nn][Tt][Ll]|[Pp][Rr][Oo][Mm][Ii][Ss][Ee]|[Ss][Tt][Rr][Ii][Nn][Gg]|[Ss][Yy][Mm][Bb][Oo][Ll]|[Ww][Ee][Aa][Kk][Rr][Ee][Ff]|[Dd][Ee][Cc][Oo][Rr][Aa][Tt][Oo][Rr][Ss]|[Dd][Ii][Ss][Pp][Oo][Ss][Aa][Bb][Ll][Ee]))?$" }, { - "pattern": "^[Dd][Oo][Mm](\\.[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee])?$" + "pattern": "^[Dd][Oo][Mm](\\.([Aa][Ss][Yy][Nn][Cc])?[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee])?$" }, { "pattern": "^[Ss][Cc][Rr][Ii][Pp][Tt][Hh][Oo][Ss][Tt]$" }, { - "pattern": "^[Ww][Ee][Bb][Ww][Oo][Rr][Kk][Ee][Rr](\\.([Ii][Mm][Pp][Oo][Rr][Tt][Ss][Cc][Rr][Ii][Pp][Tt][Ss]|[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]))?$" + "pattern": "^[Ww][Ee][Bb][Ww][Oo][Rr][Kk][Ee][Rr](\\.([Ii][Mm][Pp][Oo][Rr][Tt][Ss][Cc][Rr][Ii][Pp][Tt][Ss]|([Aa][Ss][Yy][Nn][Cc])?[Ii][Tt][Ee][Rr][Aa][Bb][Ll][Ee]))?$" }, { "pattern": "^[Dd][Ee][Cc][Oo][Rr][Aa][Tt][Oo][Rr][Ss](\\.([Ll][Ee][Gg][Aa][Cc][Yy]))?$" @@ -1203,6 +1208,34 @@ "description": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.", "type": ["boolean", "null"], "markdownDescription": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.\n\nSee more: https://www.typescriptlang.org/tsconfig#verbatimModuleSyntax" + }, + "noCheck": { + "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", + "description": "Disable full type checking (only critical parse and emit errors will be reported)", + "type": ["boolean", "null"], + "default": false, + "markdownDescription": "Disable full type checking (only critical parse and emit errors will be reported)\n\nSee more: https://www.typescriptlang.org/tsconfig#noCheck" + }, + "isolatedDeclarations": { + "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", + "description": "Require sufficient annotation on exports so other tools can trivially generate declaration files.", + "type": ["boolean", "null"], + "default": false, + "markdownDescription": "Require sufficient annotation on exports so other tools can trivially generate declaration files.\n\nSee more: https://www.typescriptlang.org/tsconfig#isolatedDeclarations" + }, + "noUncheckedSideEffectImports": { + "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", + "description": "Check side effect imports.", + "type": ["boolean", "null"], + "default": false, + "markdownDescription": "Check side effect imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUncheckedSideEffectImports" + }, + "strictBuiltinIteratorReturn": { + "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", + "description": "Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'.", + "type": ["boolean", "null"], + "default": false, + "markdownDescription": "Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictBuiltinIteratorReturn" } } } @@ -1423,4 +1456,3 @@ "title": "JSON schema for the TypeScript compiler's configuration file", "type": "object" } - diff --git a/crates/languages/src/json/textobjects.scm b/crates/languages/src/json/textobjects.scm new file mode 100644 index 0000000000..81fd20245b --- /dev/null +++ b/crates/languages/src/json/textobjects.scm @@ -0,0 +1 @@ +(comment)+ @comment.around diff --git a/crates/languages/src/jsonc/config.toml b/crates/languages/src/jsonc/config.toml index fe62764b27..226ae92912 100644 --- a/crates/languages/src/jsonc/config.toml +++ b/crates/languages/src/jsonc/config.toml @@ -1,6 +1,6 @@ name = "JSONC" grammar = "jsonc" -path_suffixes = ["jsonc"] +path_suffixes = ["jsonc", "tsconfig.json", "pyrightconfig.json"] line_comments = ["// "] autoclose_before = ",]}" brackets = [ diff --git a/crates/languages/src/jsonc/textobjects.scm b/crates/languages/src/jsonc/textobjects.scm new file mode 100644 index 0000000000..81fd20245b --- /dev/null +++ b/crates/languages/src/jsonc/textobjects.scm @@ -0,0 +1 @@ +(comment)+ @comment.around diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 776d47a5f7..5ba6f5c034 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -62,6 +62,7 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), + config.hidden, Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), @@ -83,6 +84,7 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), + config.hidden, Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), @@ -104,6 +106,7 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), + config.hidden, Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), @@ -125,6 +128,7 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), + config.hidden, Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 94230d7128..af20aa49ba 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -2,6 +2,7 @@ name = "Markdown" grammar = "markdown" path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"] word_characters = ["-"] +block_comment = [""] brackets = [ { start = "{", end = "}", close = true, newline = true }, { start = "[", end = "]", close = true, newline = true }, diff --git a/crates/languages/src/markdown/injections.scm b/crates/languages/src/markdown/injections.scm index 4b2493d4ce..f2b959dfda 100644 --- a/crates/languages/src/markdown/injections.scm +++ b/crates/languages/src/markdown/injections.scm @@ -1,7 +1,14 @@ (fenced_code_block (info_string - (language) @language) - (code_fence_content) @content) + (language) @injection.language) + (code_fence_content) @injection.content) -((inline) @content - (#set! "language" "markdown-inline")) +((inline) @injection.content + (#set! injection.language "markdown-inline")) + +((html_block) @injection.content + (#set! injection.language "html")) + +((minus_metadata) @injection.content (#set! injection.language "yaml")) + +((plus_metadata) @injection.content (#set! injection.language "toml")) diff --git a/crates/languages/src/markdown/textobjects.scm b/crates/languages/src/markdown/textobjects.scm new file mode 100644 index 0000000000..e0f76c5365 --- /dev/null +++ b/crates/languages/src/markdown/textobjects.scm @@ -0,0 +1,3 @@ +(section + (atx_heading) + (_)* @class.inside) @class.around diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 1e855777b2..be9cef8651 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -2,8 +2,9 @@ use anyhow::ensure; use anyhow::{anyhow, Result}; use async_trait::async_trait; use collections::HashMap; -use gpui::AsyncAppContext; use gpui::{AppContext, Task}; +use gpui::{AsyncAppContext, SharedString}; +use language::language_settings::language_settings; use language::LanguageName; use language::LanguageToolchainStore; use language::Toolchain; @@ -18,9 +19,10 @@ use pet_core::python_environment::PythonEnvironmentKind; use pet_core::Configuration; use project::lsp_store::language_server_settings; use serde_json::{json, Value}; -use smol::{lock::OnceCell, process::Command}; +use smol::lock::OnceCell; use std::cmp::Ordering; +use std::str::FromStr; use std::sync::Mutex; use std::{ any::Any, @@ -35,6 +37,23 @@ use util::ResultExt; const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js"; const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js"; +enum TestRunner { + UNITTEST, + PYTEST, +} + +impl FromStr for TestRunner { + type Err = (); + + fn from_str(s: &str) -> std::result::Result { + match s { + "unittest" => Ok(Self::UNITTEST), + "pytest" => Ok(Self::PYTEST), + _ => Err(()), + } + } +} + fn server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] } @@ -60,6 +79,7 @@ impl LspAdapter for PythonLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { let node = delegate.which("node".as_ref()).await?; @@ -97,24 +117,12 @@ impl LspAdapter for PythonLspAdapter { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let should_install_language_server = self - .node - .should_install_npm_package( - Self::SERVER_NAME.as_ref(), - &server_path, + self.node + .npm_install_packages( &container_dir, - &latest_version, + &[(Self::SERVER_NAME.as_ref(), latest_version.as_str())], ) - .await; - - if should_install_language_server { - self.node - .npm_install_packages( - &container_dir, - &[(Self::SERVER_NAME.as_ref(), latest_version.as_str())], - ) - .await?; - } + .await?; Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -123,6 +131,36 @@ impl LspAdapter for PythonLspAdapter { }) } + async fn check_if_version_installed( + &self, + version: &(dyn 'static + Send + Any), + container_dir: &PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + let version = version.downcast_ref::().unwrap(); + let server_path = container_dir.join(SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package( + Self::SERVER_NAME.as_ref(), + &server_path, + &container_dir, + &version, + ) + .await; + + if should_install_language_server { + None + } else { + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: server_binary_arguments(&server_path), + }) + } + } + async fn cached_server_binary( &self, container_dir: PathBuf, @@ -265,8 +303,8 @@ async fn get_cached_server_binary( pub(crate) struct PythonContextProvider; -const PYTHON_UNITTEST_TARGET_TASK_VARIABLE: VariableName = - VariableName::Custom(Cow::Borrowed("PYTHON_UNITTEST_TARGET")); +const PYTHON_TEST_TARGET_TASK_VARIABLE: VariableName = + VariableName::Custom(Cow::Borrowed("PYTHON_TEST_TARGET")); const PYTHON_ACTIVE_TOOLCHAIN_PATH: VariableName = VariableName::Custom(Cow::Borrowed("PYTHON_ACTIVE_ZED_TOOLCHAIN")); @@ -279,11 +317,150 @@ impl ContextProvider for PythonContextProvider { toolchains: Arc, cx: &mut gpui::AppContext, ) -> Task> { + let test_target = { + let test_runner = selected_test_runner(location.buffer.read(cx).file(), cx); + + let runner = match test_runner { + TestRunner::UNITTEST => self.build_unittest_target(variables), + TestRunner::PYTEST => self.build_pytest_target(variables), + }; + runner + }; + + let worktree_id = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)); + cx.spawn(move |mut cx| async move { + let active_toolchain = if let Some(worktree_id) = worktree_id { + toolchains + .active_toolchain(worktree_id, "Python".into(), &mut cx) + .await + .map_or_else( + || "python3".to_owned(), + |toolchain| format!("\"{}\"", toolchain.path), + ) + } else { + String::from("python3") + }; + let toolchain = (PYTHON_ACTIVE_TOOLCHAIN_PATH, active_toolchain); + Ok(task::TaskVariables::from_iter([test_target?, toolchain])) + }) + } + + fn associated_tasks( + &self, + file: Option>, + cx: &AppContext, + ) -> Option { + let test_runner = selected_test_runner(file.as_ref(), cx); + + let mut tasks = vec![ + // Execute a selection + TaskTemplate { + label: "execute selection".to_owned(), + command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), + args: vec![ + "-c".to_owned(), + VariableName::SelectedText.template_value_with_whitespace(), + ], + ..TaskTemplate::default() + }, + // Execute an entire file + TaskTemplate { + label: format!("run '{}'", VariableName::File.template_value()), + command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), + args: vec![VariableName::File.template_value_with_whitespace()], + ..TaskTemplate::default() + }, + ]; + + tasks.extend(match test_runner { + TestRunner::UNITTEST => { + [ + // Run tests for an entire file + TaskTemplate { + label: format!("unittest '{}'", VariableName::File.template_value()), + command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), + args: vec![ + "-m".to_owned(), + "unittest".to_owned(), + VariableName::File.template_value_with_whitespace(), + ], + ..TaskTemplate::default() + }, + // Run test(s) for a specific target within a file + TaskTemplate { + label: "unittest $ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(), + command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), + args: vec![ + "-m".to_owned(), + "unittest".to_owned(), + PYTHON_TEST_TARGET_TASK_VARIABLE.template_value_with_whitespace(), + ], + tags: vec![ + "python-unittest-class".to_owned(), + "python-unittest-method".to_owned(), + ], + ..TaskTemplate::default() + }, + ] + } + TestRunner::PYTEST => { + [ + // Run tests for an entire file + TaskTemplate { + label: format!("pytest '{}'", VariableName::File.template_value()), + command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), + args: vec![ + "-m".to_owned(), + "pytest".to_owned(), + VariableName::File.template_value_with_whitespace(), + ], + ..TaskTemplate::default() + }, + // Run test(s) for a specific target within a file + TaskTemplate { + label: "pytest $ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(), + command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), + args: vec![ + "-m".to_owned(), + "pytest".to_owned(), + PYTHON_TEST_TARGET_TASK_VARIABLE.template_value_with_whitespace(), + ], + tags: vec![ + "python-pytest-class".to_owned(), + "python-pytest-method".to_owned(), + ], + ..TaskTemplate::default() + }, + ] + } + }); + + Some(TaskTemplates(tasks)) + } +} + +fn selected_test_runner(location: Option<&Arc>, cx: &AppContext) -> TestRunner { + const TEST_RUNNER_VARIABLE: &str = "TEST_RUNNER"; + language_settings(Some(LanguageName::new("Python")), location, cx) + .tasks + .variables + .get(TEST_RUNNER_VARIABLE) + .and_then(|val| TestRunner::from_str(val).ok()) + .unwrap_or(TestRunner::PYTEST) +} + +impl PythonContextProvider { + fn build_unittest_target( + &self, + variables: &task::TaskVariables, + ) -> Result<(VariableName, String)> { let python_module_name = python_module_name_from_relative_path( variables.get(&VariableName::RelativeFile).unwrap_or(""), ); + let unittest_class_name = variables.get(&VariableName::Custom(Cow::Borrowed("_unittest_class_name"))); + let unittest_method_name = variables.get(&VariableName::Custom(Cow::Borrowed( "_unittest_method_name", ))); @@ -294,71 +471,47 @@ impl ContextProvider for PythonContextProvider { } (Some(class_name), None) => format!("{}.{}", python_module_name, class_name), (None, None) => python_module_name, - (None, Some(_)) => return Task::ready(Ok(task::TaskVariables::default())), // should never happen, a TestCase class is the unit of testing + (None, Some(_)) => return Ok((VariableName::Custom(Cow::Borrowed("")), String::new())), // should never happen, a TestCase class is the unit of testing }; let unittest_target = ( - PYTHON_UNITTEST_TARGET_TASK_VARIABLE.clone(), + PYTHON_TEST_TARGET_TASK_VARIABLE.clone(), unittest_target_str, ); - let worktree_id = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)); - cx.spawn(move |mut cx| async move { - let active_toolchain = if let Some(worktree_id) = worktree_id { - toolchains - .active_toolchain(worktree_id, "Python".into(), &mut cx) - .await - .map_or_else(|| "python3".to_owned(), |toolchain| toolchain.path.into()) - } else { - String::from("python3") - }; - let toolchain = (PYTHON_ACTIVE_TOOLCHAIN_PATH, active_toolchain); - Ok(task::TaskVariables::from_iter([unittest_target, toolchain])) - }) + + Ok(unittest_target) } - fn associated_tasks( + fn build_pytest_target( &self, - _: Option>, - _: &AppContext, - ) -> Option { - Some(TaskTemplates(vec![ - TaskTemplate { - label: "execute selection".to_owned(), - command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), - args: vec!["-c".to_owned(), VariableName::SelectedText.template_value()], - ..TaskTemplate::default() - }, - TaskTemplate { - label: format!("run '{}'", VariableName::File.template_value()), - command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), - args: vec![VariableName::File.template_value()], - ..TaskTemplate::default() - }, - TaskTemplate { - label: format!("unittest '{}'", VariableName::File.template_value()), - command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), - args: vec![ - "-m".to_owned(), - "unittest".to_owned(), - VariableName::File.template_value(), - ], - ..TaskTemplate::default() - }, - TaskTemplate { - label: "unittest $ZED_CUSTOM_PYTHON_UNITTEST_TARGET".to_owned(), - command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(), - args: vec![ - "-m".to_owned(), - "unittest".to_owned(), - "$ZED_CUSTOM_PYTHON_UNITTEST_TARGET".to_owned(), - ], - tags: vec![ - "python-unittest-class".to_owned(), - "python-unittest-method".to_owned(), - ], - ..TaskTemplate::default() - }, - ])) + variables: &task::TaskVariables, + ) -> Result<(VariableName, String)> { + let file_path = variables + .get(&VariableName::RelativeFile) + .ok_or_else(|| anyhow!("No file path given"))?; + + let pytest_class_name = + variables.get(&VariableName::Custom(Cow::Borrowed("_pytest_class_name"))); + + let pytest_method_name = + variables.get(&VariableName::Custom(Cow::Borrowed("_pytest_method_name"))); + + let pytest_target_str = match (pytest_class_name, pytest_method_name) { + (Some(class_name), Some(method_name)) => { + format!("{}::{}::{}", file_path, class_name, method_name) + } + (Some(class_name), None) => { + format!("{}::{}", file_path, class_name) + } + (None, Some(method_name)) => { + format!("{}::{}", file_path, method_name) + } + (None, None) => file_path.to_string(), + }; + + let pytest_target = (PYTHON_TEST_TARGET_TASK_VARIABLE.clone(), pytest_target_str); + + Ok(pytest_target) } } @@ -370,8 +523,17 @@ fn python_module_name_from_relative_path(relative_path: &str) -> String { .to_string() } -#[derive(Default)] -pub(crate) struct PythonToolchainProvider {} +pub(crate) struct PythonToolchainProvider { + term: SharedString, +} + +impl Default for PythonToolchainProvider { + fn default() -> Self { + Self { + term: SharedString::new_static("Virtual Environment"), + } + } +} static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[ // Prioritize non-Conda environments. @@ -380,6 +542,7 @@ static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[ PythonEnvironmentKind::VirtualEnvWrapper, PythonEnvironmentKind::Venv, PythonEnvironmentKind::VirtualEnv, + PythonEnvironmentKind::Pixi, PythonEnvironmentKind::Conda, PythonEnvironmentKind::Pyenv, PythonEnvironmentKind::GlobalPaths, @@ -398,7 +561,7 @@ fn env_priority(kind: Option) -> usize { } } -#[async_trait(?Send)] +#[async_trait] impl ToolchainLister for PythonToolchainProvider { async fn list( &self, @@ -463,8 +626,9 @@ impl ToolchainLister for PythonToolchainProvider { .into(); Some(Toolchain { name, - path: toolchain.executable?.to_str()?.to_owned().into(), + path: toolchain.executable.as_ref()?.to_str()?.to_owned().into(), language_name: LanguageName::new("Python"), + as_json: serde_json::to_value(toolchain).ok()?, }) }) .collect(); @@ -475,6 +639,9 @@ impl ToolchainLister for PythonToolchainProvider { groups: Default::default(), } } + fn term(&self) -> SharedString { + self.term.clone() + } } pub struct EnvironmentApi<'a> { @@ -570,7 +737,7 @@ impl PyLspAdapter { let mut path = PathBuf::from(work_dir.as_ref()); path.push("pylsp-venv"); if !path.exists() { - Command::new(python_path) + util::command::new_smol_command(python_path) .arg("-m") .arg("venv") .arg("pylsp-venv") @@ -604,6 +771,12 @@ impl PyLspAdapter { } } +const BINARY_DIR: &str = if cfg!(target_os = "windows") { + "Scripts" +} else { + "bin" +}; + #[async_trait(?Send)] impl LspAdapter for PyLspAdapter { fn name(&self) -> LanguageServerName { @@ -612,33 +785,29 @@ impl LspAdapter for PyLspAdapter { async fn check_if_user_installed( &self, - _: &dyn LspAdapterDelegate, - _: &AsyncAppContext, + delegate: &dyn LspAdapterDelegate, + toolchains: Arc, + cx: &AsyncAppContext, ) -> Option { - // We don't support user-provided pylsp, as global packages are discouraged in Python ecosystem. - None + let venv = toolchains + .active_toolchain( + delegate.worktree_id(), + LanguageName::new("Python"), + &mut cx.clone(), + ) + .await?; + let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp"); + pylsp_path.exists().then(|| LanguageServerBinary { + path: venv.path.to_string().into(), + arguments: vec![pylsp_path.into()], + env: None, + }) } async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, ) -> Result> { - // let uri = "https://pypi.org/pypi/python-lsp-server/json"; - // let mut root_manifest = delegate - // .http_client() - // .get(&uri, Default::default(), true) - // .await?; - // let mut body = Vec::new(); - // root_manifest.body_mut().read_to_end(&mut body).await?; - // let as_str = String::from_utf8(body)?; - // let json = serde_json::Value::from_str(&as_str)?; - // let latest_version = json - // .get("info") - // .and_then(|info| info.get("version")) - // .and_then(|version| version.as_str().map(ToOwned::to_owned)) - // .ok_or_else(|| { - // anyhow!("PyPI response did not contain version info for python-language-server") - // })?; Ok(Box::new(()) as Box<_>) } @@ -649,9 +818,9 @@ impl LspAdapter for PyLspAdapter { delegate: &dyn LspAdapterDelegate, ) -> Result { let venv = self.base_venv(delegate).await.map_err(|e| anyhow!(e))?; - let pip_path = venv.join("bin").join("pip3"); + let pip_path = venv.join(BINARY_DIR).join("pip3"); ensure!( - Command::new(pip_path.as_path()) + util::command::new_smol_command(pip_path.as_path()) .arg("install") .arg("python-lsp-server") .output() @@ -661,7 +830,7 @@ impl LspAdapter for PyLspAdapter { "python-lsp-server installation failed" ); ensure!( - Command::new(pip_path.as_path()) + util::command::new_smol_command(pip_path.as_path()) .arg("install") .arg("python-lsp-server[all]") .output() @@ -671,7 +840,7 @@ impl LspAdapter for PyLspAdapter { "python-lsp-server[all] installation failed" ); ensure!( - Command::new(pip_path) + util::command::new_smol_command(pip_path) .arg("install") .arg("pylsp-mypy") .output() @@ -680,7 +849,7 @@ impl LspAdapter for PyLspAdapter { .success(), "pylsp-mypy installation failed" ); - let pylsp = venv.join("bin").join("pylsp"); + let pylsp = venv.join(BINARY_DIR).join("pylsp"); Ok(LanguageServerBinary { path: pylsp, env: None, @@ -694,7 +863,7 @@ impl LspAdapter for PyLspAdapter { delegate: &dyn LspAdapterDelegate, ) -> Option { let venv = self.base_venv(delegate).await.ok()?; - let pylsp = venv.join("bin").join("pylsp"); + let pylsp = venv.join(BINARY_DIR).join("pylsp"); Some(LanguageServerBinary { path: pylsp, env: None, @@ -776,13 +945,17 @@ impl LspAdapter for PyLspAdapter { .unwrap_or_else(|| { json!({ "plugins": { - "rope_autoimport": {"enabled": true}, - "mypy": {"enabled": true} - } + "pycodestyle": {"enabled": false}, + "rope_autoimport": {"enabled": true, "memory": true}, + "pylsp_mypy": {"enabled": false} + }, + "rope": { + "ropeFolder": null + }, }) }); - // If python.pythonPath is not set in user config, do so using our toolchain picker. + // If user did not explicitly modify their python venv, use one from picker. if let Some(toolchain) = toolchain { if user_settings.is_null() { user_settings = Value::Object(serde_json::Map::default()); @@ -798,23 +971,22 @@ impl LspAdapter for PyLspAdapter { .or_insert(Value::Object(serde_json::Map::default())) .as_object_mut() { - jedi.insert( - "environment".to_string(), - Value::String(toolchain.path.clone().into()), - ); + jedi.entry("environment".to_string()) + .or_insert_with(|| Value::String(toolchain.path.clone().into())); } if let Some(pylint) = python - .entry("mypy") + .entry("pylsp_mypy") .or_insert(Value::Object(serde_json::Map::default())) .as_object_mut() { - pylint.insert( - "overrides".to_string(), + pylint.entry("overrides".to_string()).or_insert_with(|| { Value::Array(vec![ Value::String("--python-executable".into()), Value::String(toolchain.path.into()), - ]), - ); + Value::String("--cache-dir=/dev/null".into()), + Value::Bool(true), + ]) + }); } } } diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index e5f1b4d423..9d08237e3c 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,6 +1,19 @@ +; Identifier naming conventions; these "soft conventions" should stay at the top of the file as they're often overridden + +; CamelCase for classes +((identifier) @type.class + (#match? @type.class "^_*[A-Z][A-Za-z0-9_]*$")) + +; ALL_CAPS for constants: +((identifier) @constant + (#match? @constant "^_*[A-Z][A-Z0-9_]*$")) + (attribute attribute: (identifier) @property) (type (identifier) @type) (generic_type (identifier) @type) +(comment) @comment +(string) @string +(escape_sequence) @string.escape ; Type alias (type_alias_statement "type" @keyword) @@ -10,29 +23,43 @@ (tuple (identifier) @type) ) +; Forward references +(type + (string) @type +) + + ; Function calls -(decorator) @function - (call - function: (attribute attribute: (identifier) @function.method)) + function: (attribute attribute: (identifier) @function.method.call)) (call - function: (identifier) @function) + function: (identifier) @function.call) -; Function definitions +(decorator + "@" @punctuation.special + [ + (identifier) @function.decorator + (attribute attribute: (identifier) @function.decorator) + (call function: (identifier) @function.decorator.call) + (call (attribute attribute: (identifier) @function.decorator.call)) + ]) + +; Function and class definitions (function_definition - name: (identifier) @function) + name: (identifier) @function.definition) -; Identifier naming conventions +; Class definitions and calling: needs to come after the regex matching above -((identifier) @type - (#match? @type "^[A-Z]")) +(class_definition + name: (identifier) @type.class.definition) -((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) +(call + function: (identifier) @type.class.call + (#match? @type.class.call "^_*[A-Z][A-Za-z0-9_]*$")) -; Builtin functions +; Builtins ((call function: (identifier) @function.builtin) @@ -40,12 +67,16 @@ @function.builtin "^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$")) +((identifier) @type.builtin + (#any-of? @type.builtin "int" "float" "complex" "bool" "list" "tuple" "range" "str" "bytes" "bytearray" "memoryview" "set" "frozenset" "dict")) + ; Literals [ (none) (true) (false) + (ellipsis) ] @constant.builtin [ @@ -58,13 +89,9 @@ [ (parameters (identifier) @variable.special) (attribute (identifier) @variable.special) - (#match? @variable.special "^self$") + (#match? @variable.special "^self|cls$") ] -(comment) @comment -(string) @string -(escape_sequence) @string.escape - [ "(" ")" @@ -84,7 +111,38 @@ "def" name: (_) (parameters)? - body: (block (expression_statement (string) @string.doc))) + body: (block . (expression_statement (string) @string.doc))) + +(class_definition + body: (block + . (comment) @comment* + . (expression_statement (string) @string.doc))) + +(module + . (comment) @comment* + . (expression_statement (string) @string.doc)) + +(module + [ + (expression_statement (assignment)) + (type_alias_statement) + ] + . (expression_statement (string) @string.doc)) + +(class_definition + body: (block + (expression_statement (assignment)) + . (expression_statement (string) @string.doc))) + +(class_definition + body: (block + (function_definition + name: (identifier) @function.method.constructor + (#eq? @function.method.constructor "__init__") + body: (block + (expression_statement (assignment)) + . (expression_statement (string) @string.doc))))) + [ "-" @@ -117,6 +175,9 @@ ">>" "|" "~" +] @operator + +[ "and" "in" "is" @@ -124,7 +185,7 @@ "or" "is not" "not in" -] @operator +] @keyword.operator [ "as" @@ -139,6 +200,7 @@ "elif" "else" "except" + "except*" "exec" "finally" "for" diff --git a/crates/languages/src/python/runnables.scm b/crates/languages/src/python/runnables.scm index b9bc5e9bf2..8cdb0d77eb 100644 --- a/crates/languages/src/python/runnables.scm +++ b/crates/languages/src/python/runnables.scm @@ -29,3 +29,56 @@ ) ) ) + +; pytest functions +( + (module + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_") + ) @_python-pytest-method + ) + (#set! tag python-pytest-method) +) + +; decorated pytest functions +( + (module + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_") + ) + ) @_python-pytest-method + ) + (#set! tag python-pytest-method) +) + +; pytest classes +( + (module + (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test") + ) + (#set! tag python-pytest-class) + ) +) + +; pytest class methods +( + (module + (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test") + ) @_python-pytest-method + (#set! tag python-pytest-method) + ) + ) + ) +) diff --git a/crates/languages/src/python/textobjects.scm b/crates/languages/src/python/textobjects.scm new file mode 100644 index 0000000000..abd28ab75a --- /dev/null +++ b/crates/languages/src/python/textobjects.scm @@ -0,0 +1,7 @@ +(comment)+ @comment.around + +(function_definition + body: (_) @function.inside) @function.around + +(class_definition + body: (_) @class.inside) @class.around diff --git a/crates/languages/src/regex/config.toml b/crates/languages/src/regex/config.toml index d0938024d6..85f2e370d6 100644 --- a/crates/languages/src/regex/config.toml +++ b/crates/languages/src/regex/config.toml @@ -6,3 +6,4 @@ brackets = [ { start = "{", end = "}", close = true, newline = false }, { start = "[", end = "]", close = true, newline = false }, ] +hidden = true diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 730f20b134..3ef2747642 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -10,12 +10,12 @@ pub use language::*; use lsp::{LanguageServerBinary, LanguageServerName}; use regex::Regex; use smol::fs::{self}; +use std::fmt::Display; use std::{ any::Any, borrow::Cow, path::{Path, PathBuf}, - sync::Arc, - sync::LazyLock, + sync::{Arc, LazyLock}, }; use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; use util::{fs::remove_matching, maybe, ResultExt}; @@ -77,6 +77,7 @@ impl LspAdapter for RustLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { let path = delegate.which("rust-analyzer".as_ref()).await?; @@ -252,49 +253,51 @@ impl LspAdapter for RustLspAdapter { .as_ref() .and_then(|detail| detail.detail.as_ref()) .or(completion.detail.as_ref()) - .map(ToOwned::to_owned); + .map(|detail| detail.trim()); let function_signature = completion .label_details .as_ref() - .and_then(|detail| detail.description.as_ref()) - .or(completion.detail.as_ref()) - .map(ToOwned::to_owned); - match completion.kind { - Some(lsp::CompletionItemKind::FIELD) if detail.is_some() => { + .and_then(|detail| detail.description.as_deref()) + .or(completion.detail.as_deref()); + match (detail, completion.kind) { + (Some(detail), Some(lsp::CompletionItemKind::FIELD)) => { let name = &completion.label; - let text = format!("{}: {}", name, detail.unwrap()); - let source = Rope::from(format!("struct S {{ {} }}", text).as_str()); - let runs = language.highlight_text(&source, 11..11 + text.len()); + let text = format!("{name}: {detail}"); + let prefix = "struct S { "; + let source = Rope::from(format!("{prefix}{text} }}")); + let runs = + language.highlight_text(&source, prefix.len()..prefix.len() + text.len()); return Some(CodeLabel { text, runs, filter_range: 0..name.len(), }); } - Some(lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE) - if detail.is_some() - && completion.insert_text_format != Some(lsp::InsertTextFormat::SNIPPET) => - { + ( + Some(detail), + Some(lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE), + ) if completion.insert_text_format != Some(lsp::InsertTextFormat::SNIPPET) => { let name = &completion.label; let text = format!( "{}: {}", name, - completion.detail.as_ref().or(detail.as_ref()).unwrap() + completion.detail.as_deref().unwrap_or(detail) ); - let source = Rope::from(format!("let {} = ();", text).as_str()); - let runs = language.highlight_text(&source, 4..4 + text.len()); + let prefix = "let "; + let source = Rope::from(format!("{prefix}{text} = ();")); + let runs = + language.highlight_text(&source, prefix.len()..prefix.len() + text.len()); return Some(CodeLabel { text, runs, filter_range: 0..name.len(), }); } - Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD) - if detail.is_some() => - { + ( + Some(detail), + Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD), + ) => { static REGEX: LazyLock = LazyLock::new(|| Regex::new("\\(…?\\)").unwrap()); - - let detail = detail.unwrap(); const FUNCTION_PREFIXES: [&str; 6] = [ "async fn", "async unsafe fn", @@ -314,10 +317,11 @@ impl LspAdapter for RustLspAdapter { // fn keyword should be followed by opening parenthesis. if let Some((prefix, suffix)) = fn_keyword { let mut text = REGEX.replace(&completion.label, suffix).to_string(); - let source = Rope::from(format!("{prefix} {} {{}}", text).as_str()); + let source = Rope::from(format!("{prefix} {text} {{}}")); let run_start = prefix.len() + 1; let runs = language.highlight_text(&source, run_start..run_start + text.len()); - if detail.starts_with(" (") { + if detail.starts_with("(") { + text.push(' '); text.push_str(&detail); } @@ -341,7 +345,7 @@ impl LspAdapter for RustLspAdapter { }); } } - Some(kind) => { + (_, Some(kind)) => { let highlight_name = match kind { lsp::CompletionItemKind::STRUCT | lsp::CompletionItemKind::INTERFACE @@ -355,9 +359,9 @@ impl LspAdapter for RustLspAdapter { }; let mut label = completion.label.clone(); - if let Some(detail) = detail.filter(|detail| detail.starts_with(" (")) { - use std::fmt::Write; - write!(label, "{detail}").ok()?; + if let Some(detail) = detail.filter(|detail| detail.starts_with("(")) { + label.push(' '); + label.push_str(detail); } let mut label = CodeLabel::plain(label, None); if let Some(highlight_name) = highlight_name { @@ -444,6 +448,10 @@ const RUST_PACKAGE_TASK_VARIABLE: VariableName = const RUST_BIN_NAME_TASK_VARIABLE: VariableName = VariableName::Custom(Cow::Borrowed("RUST_BIN_NAME")); +/// The bin kind (bin/example) corresponding to the current file in Cargo.toml +const RUST_BIN_KIND_TASK_VARIABLE: VariableName = + VariableName::Custom(Cow::Borrowed("RUST_BIN_KIND")); + const RUST_MAIN_FUNCTION_TASK_VARIABLE: VariableName = VariableName::Custom(Cow::Borrowed("_rust_main_function_end")); @@ -469,12 +477,16 @@ impl ContextProvider for RustContextProvider { .is_some(); if is_main_function { - if let Some((package_name, bin_name)) = local_abs_path.and_then(|path| { + if let Some(target) = local_abs_path.and_then(|path| { package_name_and_bin_name_from_abs_path(path, project_env.as_ref()) }) { return Task::ready(Ok(TaskVariables::from_iter([ - (RUST_PACKAGE_TASK_VARIABLE.clone(), package_name), - (RUST_BIN_NAME_TASK_VARIABLE.clone(), bin_name), + (RUST_PACKAGE_TASK_VARIABLE.clone(), target.package_name), + (RUST_BIN_NAME_TASK_VARIABLE.clone(), target.target_name), + ( + RUST_BIN_KIND_TASK_VARIABLE.clone(), + target.target_kind.to_string(), + ), ]))); } } @@ -568,8 +580,9 @@ impl ContextProvider for RustContextProvider { }, TaskTemplate { label: format!( - "cargo run -p {} --bin {}", + "cargo run -p {} --{} {}", RUST_PACKAGE_TASK_VARIABLE.template_value(), + RUST_BIN_KIND_TASK_VARIABLE.template_value(), RUST_BIN_NAME_TASK_VARIABLE.template_value(), ), command: "cargo".into(), @@ -577,7 +590,7 @@ impl ContextProvider for RustContextProvider { "run".into(), "-p".into(), RUST_PACKAGE_TASK_VARIABLE.template_value(), - "--bin".into(), + format!("--{}", RUST_BIN_KIND_TASK_VARIABLE.template_value()), RUST_BIN_NAME_TASK_VARIABLE.template_value(), ], cwd: Some("$ZED_DIRNAME".to_owned()), @@ -635,11 +648,43 @@ struct CargoTarget { src_path: String, } +#[derive(Debug, PartialEq)] +enum TargetKind { + Bin, + Example, +} + +impl Display for TargetKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TargetKind::Bin => write!(f, "bin"), + TargetKind::Example => write!(f, "example"), + } + } +} + +impl TryFrom<&str> for TargetKind { + type Error = (); + fn try_from(value: &str) -> Result { + match value { + "bin" => Ok(Self::Bin), + "example" => Ok(Self::Example), + _ => Err(()), + } + } +} +/// Which package and binary target are we in? +struct TargetInfo { + package_name: String, + target_name: String, + target_kind: TargetKind, +} + fn package_name_and_bin_name_from_abs_path( abs_path: &Path, project_env: Option<&HashMap>, -) -> Option<(String, String)> { - let mut command = std::process::Command::new("cargo"); +) -> Option { + let mut command = util::command::new_std_command("cargo"); if let Some(envs) = project_env { command.envs(envs); } @@ -656,10 +701,14 @@ fn package_name_and_bin_name_from_abs_path( let metadata: CargoMetadata = serde_json::from_slice(&output).log_err()?; retrieve_package_id_and_bin_name_from_metadata(metadata, abs_path).and_then( - |(package_id, bin_name)| { + |(package_id, bin_name, target_kind)| { let package_name = package_name_from_pkgid(&package_id); - package_name.map(|package_name| (package_name.to_owned(), bin_name)) + package_name.map(|package_name| TargetInfo { + package_name: package_name.to_owned(), + target_name: bin_name, + target_kind, + }) }, ) } @@ -667,13 +716,19 @@ fn package_name_and_bin_name_from_abs_path( fn retrieve_package_id_and_bin_name_from_metadata( metadata: CargoMetadata, abs_path: &Path, -) -> Option<(String, String)> { +) -> Option<(String, String, TargetKind)> { for package in metadata.packages { for target in package.targets { - let is_bin = target.kind.iter().any(|kind| kind == "bin"); + let Some(bin_kind) = target + .kind + .iter() + .find_map(|kind| TargetKind::try_from(kind.as_ref()).ok()) + else { + continue; + }; let target_path = PathBuf::from(target.src_path); - if target_path == abs_path && is_bin { - return Some((package.id, target.name)); + if target_path == abs_path { + return Some((package.id, target.name, bin_kind)); } } } @@ -685,11 +740,10 @@ fn human_readable_package_name( package_directory: &Path, project_env: Option<&HashMap>, ) -> Option { - let mut command = std::process::Command::new("cargo"); + let mut command = util::command::new_std_command("cargo"); if let Some(envs) = project_env { command.envs(envs); } - let pkgid = String::from_utf8( command .current_dir(package_directory) @@ -832,7 +886,7 @@ mod tests { kind: Some(lsp::CompletionItemKind::FUNCTION), label: "hello(…)".to_string(), label_details: Some(CompletionItemLabelDetails { - detail: Some(" (use crate::foo)".into()), + detail: Some("(use crate::foo)".into()), description: Some("fn(&mut Option) -> Vec".to_string()) }), ..Default::default() @@ -1067,7 +1121,11 @@ mod tests { ( r#"{"packages":[{"id":"path+file:///path/to/zed/crates/zed#0.131.0","targets":[{"name":"zed","kind":["bin"],"src_path":"/path/to/zed/src/main.rs"}]}]}"#, "/path/to/zed/src/main.rs", - Some(("path+file:///path/to/zed/crates/zed#0.131.0", "zed")), + Some(( + "path+file:///path/to/zed/crates/zed#0.131.0", + "zed", + TargetKind::Bin, + )), ), ( r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["bin"],"src_path":"/path/to/custom-package/src/main.rs"}]}]}"#, @@ -1075,6 +1133,16 @@ mod tests { Some(( "path+file:///path/to/custom-package#my-custom-package@0.1.0", "my-custom-bin", + TargetKind::Bin, + )), + ), + ( + r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs"}]}]}"#, + "/path/to/custom-package/src/main.rs", + Some(( + "path+file:///path/to/custom-package#my-custom-package@0.1.0", + "my-custom-bin", + TargetKind::Example, )), ), ( @@ -1089,7 +1157,7 @@ mod tests { assert_eq!( retrieve_package_id_and_bin_name_from_metadata(metadata, absolute_path), - expected.map(|(pkgid, bin)| (pkgid.to_owned(), bin.to_owned())) + expected.map(|(pkgid, name, kind)| (pkgid.to_owned(), name.to_owned(), kind)) ); } } diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm index 0ce91f2287..0c6094ec19 100644 --- a/crates/languages/src/rust/injections.scm +++ b/crates/languages/src/rust/injections.scm @@ -1,7 +1,7 @@ (macro_invocation - (token_tree) @content - (#set! "language" "rust")) + (token_tree) @injection.content + (#set! injection.language "rust")) (macro_rule - (token_tree) @content - (#set! "language" "rust")) + (token_tree) @injection.content + (#set! injection.language "rust")) diff --git a/crates/languages/src/rust/runnables.scm b/crates/languages/src/rust/runnables.scm index 0552a3d798..6d8dee4445 100644 --- a/crates/languages/src/rust/runnables.scm +++ b/crates/languages/src/rust/runnables.scm @@ -18,6 +18,8 @@ . (attribute_item) * . + [(line_comment) (block_comment)] * + . (function_item name: (_) @run body: _ diff --git a/crates/languages/src/rust/textobjects.scm b/crates/languages/src/rust/textobjects.scm new file mode 100644 index 0000000000..4e7e7fa0cd --- /dev/null +++ b/crates/languages/src/rust/textobjects.scm @@ -0,0 +1,51 @@ +; functions +(function_signature_item) @function.around + +(function_item + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + +; classes +(struct_item + body: (_ + ["{" "("]? + [(_) ","?]* @class.inside + ["}" ")"]? )) @class.around + +(enum_item + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +(union_item + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +(trait_item + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +(impl_item + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +(mod_item + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +; comments + +(line_comment)+ @comment.around + +(block_comment) @comment.around diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index e2ced0f67f..02c3dbefc2 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -34,6 +34,7 @@ pub struct TailwindLspAdapter { impl TailwindLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("tailwindcss-language-server"); + const PACKAGE_NAME: &str = "@tailwindcss/language-server"; pub fn new(node: NodeRuntime) -> Self { TailwindLspAdapter { node } @@ -52,7 +53,7 @@ impl LspAdapter for TailwindLspAdapter { ) -> Result> { Ok(Box::new( self.node - .npm_package_latest_version("@tailwindcss/language-server") + .npm_package_latest_version(Self::PACKAGE_NAME) .await?, ) as Box<_>) } @@ -65,18 +66,13 @@ impl LspAdapter for TailwindLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = "@tailwindcss/language-server"; - let should_install_language_server = self - .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) - .await; - - if should_install_language_server { - self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) - .await?; - } + self.node + .npm_install_packages( + &container_dir, + &[(Self::PACKAGE_NAME, latest_version.as_str())], + ) + .await?; Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -85,6 +81,31 @@ impl LspAdapter for TailwindLspAdapter { }) } + async fn check_if_version_installed( + &self, + version: &(dyn 'static + Send + Any), + container_dir: &PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + let version = version.downcast_ref::().unwrap(); + let server_path = container_dir.join(SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .await; + + if should_install_language_server { + None + } else { + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: server_binary_arguments(&server_path), + }) + } + } + async fn cached_server_binary( &self, container_dir: PathBuf, diff --git a/crates/languages/src/tsx/injections.scm b/crates/languages/src/tsx/injections.scm index 3aa6bfd745..48da80995b 100644 --- a/crates/languages/src/tsx/injections.scm +++ b/crates/languages/src/tsx/injections.scm @@ -1,60 +1,60 @@ (((comment) @_jsdoc_comment - (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @content - (#set! "language" "jsdoc")) + (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content + (#set! injection.language "jsdoc")) -((regex) @content - (#set! "language" "regex")) +((regex) @injection.content + (#set! injection.language "regex")) (call_expression function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @content - (#set! "language" "css")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) ) (call_expression function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string (string_fragment) @content - (#set! "language" "html")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "html")) ) (call_expression function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @content - (#set! "language" "javascript")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "javascript")) ) (call_expression function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @content - (#set! "language" "json")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "json")) ) (call_expression function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @content - (#set! "language" "sql")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "sql")) ) (call_expression function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @content - (#set! "language" "typescript")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "typescript")) ) (call_expression function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @content - (#set! "language" "yaml")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "yaml")) ) (call_expression function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @content - (#set! "language" "graphql")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "graphql")) ) (call_expression function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @content - (#set! "language" "graphql"))) + arguments: (arguments (template_string (string_fragment) @injection.content + (#set! injection.language "graphql"))) ) diff --git a/crates/languages/src/tsx/outline.scm b/crates/languages/src/tsx/outline.scm index 0c3589071d..c0c5c735e2 100644 --- a/crates/languages/src/tsx/outline.scm +++ b/crates/languages/src/tsx/outline.scm @@ -70,12 +70,20 @@ name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -(call_expression - function: (_) @context - (#any-of? @context "it" "test" "describe") - arguments: ( - arguments . (string - (string_fragment) @name +( + (call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression object: (identifier) @_name) + ] + ) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: ( + arguments . (string (string_fragment) @name) ) ) ) @item diff --git a/crates/languages/src/tsx/runnables.scm b/crates/languages/src/tsx/runnables.scm index 68c81d04c7..1b68b9a41e 100644 --- a/crates/languages/src/tsx/runnables.scm +++ b/crates/languages/src/tsx/runnables.scm @@ -2,13 +2,20 @@ ; Function expression that has `it`, `test` or `describe` as the function name ( (call_expression - function: (_) @_name - (#any-of? @_name "it" "test" "describe") - arguments: ( - arguments . (string - (string_fragment) @run + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression object: (identifier) @_name) + ] ) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: ( + arguments . (string (string_fragment) @run) ) - ) @_tsx-test - (#set! tag tsx-test) + ) @_js-test + + (#set! tag js-test) ) diff --git a/crates/languages/src/tsx/textobjects.scm b/crates/languages/src/tsx/textobjects.scm new file mode 100644 index 0000000000..836fed35ba --- /dev/null +++ b/crates/languages/src/tsx/textobjects.scm @@ -0,0 +1,79 @@ +(comment)+ @comment.around + +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(function_expression + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + +(arrow_function) @function.around +(function_signature) @function.around + +(generator_function + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(generator_function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(class_declaration + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(class + body: (_ + "{" + (_)* @class.inside + "}" )) @class.around + +(interface_declaration + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(enum_declaration + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +(ambient_declaration + (module + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" ))) @class.around + +(internal_module + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(type_alias_declaration) @class.around diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index c580575a1e..3a0291feb5 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -73,6 +73,7 @@ impl TypeScriptLspAdapter { const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("typescript-language-server"); + const PACKAGE_NAME: &str = "typescript"; pub fn new(node: NodeRuntime) -> Self { TypeScriptLspAdapter { node } } @@ -114,6 +115,36 @@ impl LspAdapter for TypeScriptLspAdapter { }) as Box<_>) } + async fn check_if_version_installed( + &self, + version: &(dyn 'static + Send + Any), + container_dir: &PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + let version = version.downcast_ref::().unwrap(); + let server_path = container_dir.join(Self::NEW_SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + version.typescript_version.as_str(), + ) + .await; + + if should_install_language_server { + None + } else { + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: typescript_server_binary_arguments(&server_path), + }) + } + } + async fn fetch_server_binary( &self, latest_version: Box, @@ -122,32 +153,22 @@ impl LspAdapter for TypeScriptLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(Self::NEW_SERVER_PATH); - let package_name = "typescript"; - let should_install_language_server = self - .node - .should_install_npm_package( - package_name, - &server_path, + self.node + .npm_install_packages( &container_dir, - latest_version.typescript_version.as_str(), + &[ + ( + Self::PACKAGE_NAME, + latest_version.typescript_version.as_str(), + ), + ( + "typescript-language-server", + latest_version.server_version.as_str(), + ), + ], ) - .await; - - if should_install_language_server { - self.node - .npm_install_packages( - &container_dir, - &[ - (package_name, latest_version.typescript_version.as_str()), - ( - "typescript-language-server", - latest_version.server_version.as_str(), - ), - ], - ) - .await?; - } + .await?; Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -412,7 +433,7 @@ impl LspAdapter for EsLintLspAdapter { _delegate: &dyn LspAdapterDelegate, ) -> Result> { let url = build_asset_url( - "microsoft/vscode-eslint", + "zed-industries/vscode-eslint", Self::CURRENT_VERSION_TAG_NAME, Self::GITHUB_ASSET_KIND, )?; diff --git a/crates/languages/src/typescript/injections.scm b/crates/languages/src/typescript/injections.scm index db8d7a9b59..c8bb72bdc5 100644 --- a/crates/languages/src/typescript/injections.scm +++ b/crates/languages/src/typescript/injections.scm @@ -1,64 +1,64 @@ (((comment) @_jsdoc_comment - (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @content - (#set! "language" "jsdoc")) + (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content + (#set! injection.language "jsdoc")) (((comment) @reference - (#match? @reference "^///\\s+\\s*$")) @content - (#set! "language" "html")) + (#match? @reference "^///\\s+\\s*$")) @injection.content + (#set! injection.language "html")) -((regex) @content - (#set! "language" "regex")) +((regex) @injection.content + (#set! injection.language "regex")) (call_expression function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @content - (#set! "language" "css")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) ) (call_expression function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string) @content - (#set! "language" "html") + arguments: (template_string) @injection.content + (#set! injection.language "html") ) (call_expression function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @content - (#set! "language" "javascript")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "javascript")) ) (call_expression function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @content - (#set! "language" "json")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "json")) ) (call_expression function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @content - (#set! "language" "sql")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "sql")) ) (call_expression function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @content - (#set! "language" "typescript")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "typescript")) ) (call_expression function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @content - (#set! "language" "yaml")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "yaml")) ) (call_expression function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @content - (#set! "language" "graphql")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "graphql")) ) (call_expression function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @content - (#set! "language" "graphql"))) + arguments: (arguments (template_string (string_fragment) @injection.content + (#set! injection.language "graphql"))) ) diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm index 0c3589071d..c0c5c735e2 100644 --- a/crates/languages/src/typescript/outline.scm +++ b/crates/languages/src/typescript/outline.scm @@ -70,12 +70,20 @@ name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -(call_expression - function: (_) @context - (#any-of? @context "it" "test" "describe") - arguments: ( - arguments . (string - (string_fragment) @name +( + (call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression object: (identifier) @_name) + ] + ) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: ( + arguments . (string (string_fragment) @name) ) ) ) @item diff --git a/crates/languages/src/typescript/runnables.scm b/crates/languages/src/typescript/runnables.scm index 21a965fd31..1b68b9a41e 100644 --- a/crates/languages/src/typescript/runnables.scm +++ b/crates/languages/src/typescript/runnables.scm @@ -2,13 +2,20 @@ ; Function expression that has `it`, `test` or `describe` as the function name ( (call_expression - function: (_) @_name - (#any-of? @_name "it" "test" "describe") - arguments: ( - arguments . (string - (string_fragment) @run + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression object: (identifier) @_name) + ] ) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: ( + arguments . (string (string_fragment) @run) ) - ) @_ts-test - (#set! tag ts-test) + ) @_js-test + + (#set! tag js-test) ) diff --git a/crates/languages/src/typescript/textobjects.scm b/crates/languages/src/typescript/textobjects.scm new file mode 100644 index 0000000000..836fed35ba --- /dev/null +++ b/crates/languages/src/typescript/textobjects.scm @@ -0,0 +1,79 @@ +(comment)+ @comment.around + +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(function_expression + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + +(arrow_function) @function.around +(function_signature) @function.around + +(generator_function + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(generator_function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(class_declaration + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(class + body: (_ + "{" + (_)* @class.inside + "}" )) @class.around + +(interface_declaration + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(enum_declaration + body: (_ + "{" + [(_) ","?]* @class.inside + "}" )) @class.around + +(ambient_declaration + (module + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" ))) @class.around + +(internal_module + body: (_ + "{" + [(_) ";"?]* @class.inside + "}" )) @class.around + +(type_alias_declaration) @class.around diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 0ad9158003..e44e4e295f 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -77,6 +77,7 @@ impl LspAdapter for VtslsLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { let env = delegate.shell_env().await; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 6d34d9816c..2f412d3102 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -31,6 +31,7 @@ pub struct YamlLspAdapter { impl YamlLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server"); + const PACKAGE_NAME: &str = "yaml-language-server"; pub fn new(node: NodeRuntime) -> Self { YamlLspAdapter { node } } @@ -61,18 +62,13 @@ impl LspAdapter for YamlLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = "yaml-language-server"; - let should_install_language_server = self - .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) - .await; - - if should_install_language_server { - self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) - .await?; - } + self.node + .npm_install_packages( + &container_dir, + &[(Self::PACKAGE_NAME, latest_version.as_str())], + ) + .await?; Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -81,6 +77,31 @@ impl LspAdapter for YamlLspAdapter { }) } + async fn check_if_version_installed( + &self, + version: &(dyn 'static + Send + Any), + container_dir: &PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + let version = version.downcast_ref::().unwrap(); + let server_path = container_dir.join(SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .await; + + if should_install_language_server { + None + } else { + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: server_binary_arguments(&server_path), + }) + } + } + async fn cached_server_binary( &self, container_dir: PathBuf, diff --git a/crates/languages/src/yaml/textobjects.scm b/crates/languages/src/yaml/textobjects.scm new file mode 100644 index 0000000000..5262b7e232 --- /dev/null +++ b/crates/languages/src/yaml/textobjects.scm @@ -0,0 +1 @@ +(comment)+ @comment diff --git a/crates/live_kit_client/.cargo/config.toml b/crates/livekit_client/.cargo/config.toml similarity index 62% rename from crates/live_kit_client/.cargo/config.toml rename to crates/livekit_client/.cargo/config.toml index b33fe211bd..77f7c9dd6c 100644 --- a/crates/live_kit_client/.cargo/config.toml +++ b/crates/livekit_client/.cargo/config.toml @@ -1,2 +1,2 @@ -[live_kit_client_test] +[livekit_client_test] rustflags = ["-C", "link-args=-ObjC"] diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml new file mode 100644 index 0000000000..ac0c3b5740 --- /dev/null +++ b/crates/livekit_client/Cargo.toml @@ -0,0 +1,65 @@ +[package] +name = "livekit_client" +version = "0.1.0" +edition = "2021" +description = "Logic for using LiveKit with GPUI" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/livekit_client.rs" +doctest = false + +[[example]] +name = "test_app" + +[features] +no-webrtc = [] +test-support = [ + "collections/test-support", + "gpui/test-support", + "nanoid", +] + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +collections.workspace = true +cpal = "0.15" +futures.workspace = true +gpui.workspace = true +http_2 = { package = "http", version = "0.2.1" } +livekit_server.workspace = true +log.workspace = true +media.workspace = true +nanoid = { workspace = true, optional = true} +parking_lot.workspace = true +postage.workspace = true +util.workspace = true +http_client.workspace = true +smallvec.workspace = true +image.workspace = true + +[target.'cfg(not(target_os = "windows"))'.dependencies] +livekit.workspace = true + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation.workspace = true +coreaudio-rs = "0.12.1" + +[dev-dependencies] +collections = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +nanoid.workspace = true +sha2.workspace = true +simplelog.workspace = true + +[build-dependencies] +serde.workspace = true +serde_json.workspace = true + +[package.metadata.cargo-machete] +ignored = ["serde_json"] diff --git a/crates/livekit_client/LICENSE-GPL b/crates/livekit_client/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/livekit_client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/livekit_client/examples/test_app.rs b/crates/livekit_client/examples/test_app.rs new file mode 100644 index 0000000000..ef7fc91d31 --- /dev/null +++ b/crates/livekit_client/examples/test_app.rs @@ -0,0 +1,442 @@ +#![cfg_attr(windows, allow(unused))] +// TODO: For some reason mac build complains about import of postage::stream::Stream, but removal of +// it causes compile errors. +#![cfg_attr(target_os = "macos", allow(unused_imports))] + +use gpui::{ + actions, bounds, div, point, + prelude::{FluentBuilder as _, IntoElement}, + px, rgb, size, AsyncAppContext, Bounds, InteractiveElement, KeyBinding, Menu, MenuItem, + ParentElement, Pixels, Render, ScreenCaptureStream, SharedString, + StatefulInteractiveElement as _, Styled, Task, View, ViewContext, VisualContext, WindowBounds, + WindowHandle, WindowOptions, +}; +#[cfg(not(target_os = "windows"))] +use livekit_client::{ + capture_local_audio_track, capture_local_video_track, + id::ParticipantIdentity, + options::{TrackPublishOptions, VideoCodec}, + participant::{Participant, RemoteParticipant}, + play_remote_audio_track, + publication::{LocalTrackPublication, RemoteTrackPublication}, + track::{LocalTrack, RemoteTrack, RemoteVideoTrack, TrackSource}, + AudioStream, RemoteVideoTrackView, Room, RoomEvent, RoomOptions, +}; +#[cfg(not(target_os = "windows"))] +use postage::stream::Stream; + +#[cfg(target_os = "windows")] +use livekit_client::{ + participant::{Participant, RemoteParticipant}, + publication::{LocalTrackPublication, RemoteTrackPublication}, + track::{LocalTrack, RemoteTrack, RemoteVideoTrack}, + AudioStream, RemoteVideoTrackView, Room, RoomEvent, +}; + +use livekit_server::token::{self, VideoGrant}; +use log::LevelFilter; +use simplelog::SimpleLogger; + +actions!(livekit_client, [Quit]); + +#[cfg(windows)] +fn main() {} + +#[cfg(not(windows))] +fn main() { + SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger"); + + gpui::App::new().run(|cx| { + livekit_client::init( + cx.background_executor().dispatcher.clone(), + cx.http_client(), + ); + + #[cfg(any(test, feature = "test-support"))] + println!("USING TEST LIVEKIT"); + + #[cfg(not(any(test, feature = "test-support")))] + println!("USING REAL LIVEKIT"); + + cx.activate(true); + cx.on_action(quit); + cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); + cx.set_menus(vec![Menu { + name: "Zed".into(), + items: vec![MenuItem::Action { + name: "Quit".into(), + action: Box::new(Quit), + os_action: None, + }], + }]); + + let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into()); + let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into()); + let livekit_secret = std::env::var("LIVEKIT_SECRET").unwrap_or("secret".into()); + let height = px(800.); + let width = px(800.); + + cx.spawn(|cx| async move { + let mut windows = Vec::new(); + for i in 0..2 { + let token = token::create( + &livekit_key, + &livekit_secret, + Some(&format!("test-participant-{i}")), + VideoGrant::to_join("test-room"), + ) + .unwrap(); + + let bounds = bounds(point(width * i, px(0.0)), size(width, height)); + let window = + LivekitWindow::new(livekit_url.as_str(), token.as_str(), bounds, cx.clone()) + .await; + windows.push(window); + } + }) + .detach(); + }); +} + +fn quit(_: &Quit, cx: &mut gpui::AppContext) { + cx.quit(); +} + +struct LivekitWindow { + room: Room, + microphone_track: Option, + screen_share_track: Option, + microphone_stream: Option, + screen_share_stream: Option>, + #[cfg(not(target_os = "windows"))] + remote_participants: Vec<(ParticipantIdentity, ParticipantState)>, + _events_task: Task<()>, +} + +#[derive(Default)] +struct ParticipantState { + audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>, + muted: bool, + screen_share_output_view: Option<(RemoteVideoTrack, View)>, + speaking: bool, +} + +#[cfg(not(windows))] +impl LivekitWindow { + async fn new( + url: &str, + token: &str, + bounds: Bounds, + cx: AsyncAppContext, + ) -> WindowHandle { + let (room, mut events) = Room::connect(url, token, RoomOptions::default()) + .await + .unwrap(); + + cx.update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |cx| { + cx.new_view(|cx| { + let _events_task = cx.spawn(|this, mut cx| async move { + while let Some(event) = events.recv().await { + this.update(&mut cx, |this: &mut LivekitWindow, cx| { + this.handle_room_event(event, cx) + }) + .ok(); + } + }); + + Self { + room, + microphone_track: None, + microphone_stream: None, + screen_share_track: None, + screen_share_stream: None, + remote_participants: Vec::new(), + _events_task, + } + }) + }, + ) + .unwrap() + }) + .unwrap() + } + + fn handle_room_event(&mut self, event: RoomEvent, cx: &mut ViewContext) { + eprintln!("event: {event:?}"); + + match event { + RoomEvent::TrackUnpublished { + publication, + participant, + } => { + let output = self.remote_participant(participant); + let unpublish_sid = publication.sid(); + if output + .audio_output_stream + .as_ref() + .map_or(false, |(track, _)| track.sid() == unpublish_sid) + { + output.audio_output_stream.take(); + } + if output + .screen_share_output_view + .as_ref() + .map_or(false, |(track, _)| track.sid() == unpublish_sid) + { + output.screen_share_output_view.take(); + } + cx.notify(); + } + + RoomEvent::TrackSubscribed { + publication, + participant, + track, + } => { + let output = self.remote_participant(participant); + match track { + RemoteTrack::Audio(track) => { + output.audio_output_stream = Some(( + publication.clone(), + play_remote_audio_track(&track, cx.background_executor()).unwrap(), + )); + } + RemoteTrack::Video(track) => { + output.screen_share_output_view = Some(( + track.clone(), + cx.new_view(|cx| RemoteVideoTrackView::new(track, cx)), + )); + } + } + cx.notify(); + } + + RoomEvent::TrackMuted { participant, .. } => { + if let Participant::Remote(participant) = participant { + self.remote_participant(participant).muted = true; + cx.notify(); + } + } + + RoomEvent::TrackUnmuted { participant, .. } => { + if let Participant::Remote(participant) = participant { + self.remote_participant(participant).muted = false; + cx.notify(); + } + } + + RoomEvent::ActiveSpeakersChanged { speakers } => { + for (identity, output) in &mut self.remote_participants { + output.speaking = speakers.iter().any(|speaker| { + if let Participant::Remote(speaker) = speaker { + speaker.identity() == *identity + } else { + false + } + }); + } + cx.notify(); + } + + _ => {} + } + + cx.notify(); + } + + fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState { + match self + .remote_participants + .binary_search_by_key(&&participant.identity(), |row| &row.0) + { + Ok(ix) => &mut self.remote_participants[ix].1, + Err(ix) => { + self.remote_participants + .insert(ix, (participant.identity(), ParticipantState::default())); + &mut self.remote_participants[ix].1 + } + } + } + + fn toggle_mute(&mut self, cx: &mut ViewContext) { + if let Some(track) = &self.microphone_track { + if track.is_muted() { + track.unmute(); + } else { + track.mute(); + } + cx.notify(); + } else { + let participant = self.room.local_participant(); + cx.spawn(|this, mut cx| async move { + let (track, stream) = capture_local_audio_track(cx.background_executor())?.await; + let publication = participant + .publish_track( + LocalTrack::Audio(track), + TrackPublishOptions { + source: TrackSource::Microphone, + ..Default::default() + }, + ) + .await + .unwrap(); + this.update(&mut cx, |this, cx| { + this.microphone_track = Some(publication); + this.microphone_stream = Some(stream); + cx.notify(); + }) + }) + .detach(); + } + } + + fn toggle_screen_share(&mut self, cx: &mut ViewContext) { + if let Some(track) = self.screen_share_track.take() { + self.screen_share_stream.take(); + let participant = self.room.local_participant(); + cx.background_executor() + .spawn(async move { + participant.unpublish_track(&track.sid()).await.unwrap(); + }) + .detach(); + cx.notify(); + } else { + let participant = self.room.local_participant(); + let sources = cx.screen_capture_sources(); + cx.spawn(|this, mut cx| async move { + let sources = sources.await.unwrap()?; + let source = sources.into_iter().next().unwrap(); + let (track, stream) = capture_local_video_track(&*source).await?; + let publication = participant + .publish_track( + LocalTrack::Video(track), + TrackPublishOptions { + source: TrackSource::Screenshare, + video_codec: VideoCodec::H264, + ..Default::default() + }, + ) + .await + .unwrap(); + this.update(&mut cx, |this, cx| { + this.screen_share_track = Some(publication); + this.screen_share_stream = Some(stream); + cx.notify(); + }) + }) + .detach(); + } + } + + fn toggle_remote_audio_for_participant( + &mut self, + identity: &ParticipantIdentity, + cx: &mut ViewContext, + ) -> Option<()> { + let participant = self.remote_participants.iter().find_map(|(id, state)| { + if id == identity { + Some(state) + } else { + None + } + })?; + let publication = &participant.audio_output_stream.as_ref()?.0; + publication.set_enabled(!publication.is_enabled()); + cx.notify(); + Some(()) + } +} + +#[cfg(not(windows))] +impl Render for LivekitWindow { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + fn button() -> gpui::Div { + div() + .w(px(180.0)) + .h(px(30.0)) + .px_2() + .m_2() + .bg(rgb(0x8888ff)) + } + + div() + .bg(rgb(0xffffff)) + .size_full() + .flex() + .flex_col() + .child( + div().bg(rgb(0xffd4a8)).flex().flex_row().children([ + button() + .id("toggle-mute") + .child(if let Some(track) = &self.microphone_track { + if track.is_muted() { + "Unmute" + } else { + "Mute" + } + } else { + "Publish mic" + }) + .on_click(cx.listener(|this, _, cx| this.toggle_mute(cx))), + button() + .id("toggle-screen-share") + .child(if self.screen_share_track.is_none() { + "Share screen" + } else { + "Unshare screen" + }) + .on_click(cx.listener(|this, _, cx| this.toggle_screen_share(cx))), + ]), + ) + .child( + div() + .id("remote-participants") + .overflow_y_scroll() + .flex() + .flex_col() + .flex_grow() + .children(self.remote_participants.iter().map(|(identity, state)| { + div() + .h(px(300.0)) + .flex() + .flex_col() + .m_2() + .px_2() + .bg(rgb(0x8888ff)) + .child(SharedString::from(if state.speaking { + format!("{} (speaking)", &identity.0) + } else if state.muted { + format!("{} (muted)", &identity.0) + } else { + identity.0.clone() + })) + .when_some(state.audio_output_stream.as_ref(), |el, state| { + el.child( + button() + .id(SharedString::from(identity.0.clone())) + .child(if state.0.is_enabled() { + "Deafen" + } else { + "Undeafen" + }) + .on_click(cx.listener({ + let identity = identity.clone(); + move |this, _, cx| { + this.toggle_remote_audio_for_participant( + &identity, cx, + ); + } + })), + ) + }) + .children(state.screen_share_output_view.as_ref().map(|e| e.1.clone())) + })), + ) + } +} diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs new file mode 100644 index 0000000000..5031dfdb33 --- /dev/null +++ b/crates/livekit_client/src/livekit_client.rs @@ -0,0 +1,661 @@ +#![cfg_attr(target_os = "windows", allow(unused))] + +mod remote_video_track_view; +#[cfg(any(test, feature = "test-support", target_os = "windows"))] +pub mod test; + +use anyhow::{anyhow, Context as _, Result}; +use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _}; +use futures::{io, Stream, StreamExt as _}; +use gpui::{ + BackgroundExecutor, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, Task, +}; +use parking_lot::Mutex; +use std::{borrow::Cow, collections::VecDeque, future::Future, pin::Pin, sync::Arc, thread}; +use util::{debug_panic, ResultExt as _}; +#[cfg(not(target_os = "windows"))] +use webrtc::{ + audio_frame::AudioFrame, + audio_source::{native::NativeAudioSource, AudioSourceOptions, RtcAudioSource}, + audio_stream::native::NativeAudioStream, + video_frame::{VideoBuffer, VideoFrame, VideoRotation}, + video_source::{native::NativeVideoSource, RtcVideoSource, VideoResolution}, + video_stream::native::NativeVideoStream, +}; + +#[cfg(all(not(any(test, feature = "test-support")), not(target_os = "windows")))] +use livekit::track::RemoteAudioTrack; +#[cfg(all(not(any(test, feature = "test-support")), not(target_os = "windows")))] +pub use livekit::*; +#[cfg(any(test, feature = "test-support", target_os = "windows"))] +use test::track::RemoteAudioTrack; +#[cfg(any(test, feature = "test-support", target_os = "windows"))] +pub use test::*; + +pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent}; + +pub enum AudioStream { + Input { + _thread_handle: std::sync::mpsc::Sender<()>, + _transmit_task: Task<()>, + }, + Output { + _task: Task<()>, + }, +} + +struct Dispatcher(Arc); + +#[cfg(not(target_os = "windows"))] +impl livekit::dispatcher::Dispatcher for Dispatcher { + fn dispatch(&self, runnable: livekit::dispatcher::Runnable) { + self.0.dispatch(runnable, None); + } + + fn dispatch_after( + &self, + duration: std::time::Duration, + runnable: livekit::dispatcher::Runnable, + ) { + self.0.dispatch_after(duration, runnable); + } +} + +struct HttpClientAdapter(Arc); + +fn http_2_status(status: http_client::http::StatusCode) -> http_2::StatusCode { + http_2::StatusCode::from_u16(status.as_u16()) + .expect("valid status code to status code conversion") +} + +#[cfg(not(target_os = "windows"))] +impl livekit::dispatcher::HttpClient for HttpClientAdapter { + fn get( + &self, + url: &str, + ) -> Pin> + Send>> { + let http_client = self.0.clone(); + let url = url.to_string(); + Box::pin(async move { + let response = http_client + .get(&url, http_client::AsyncBody::empty(), false) + .await + .map_err(io::Error::other)?; + Ok(livekit::dispatcher::Response { + status: http_2_status(response.status()), + body: Box::pin(response.into_body()), + }) + }) + } + + fn send_async( + &self, + request: http_2::Request>, + ) -> Pin> + Send>> { + let http_client = self.0.clone(); + let mut builder = http_client::http::Request::builder() + .method(request.method().as_str()) + .uri(request.uri().to_string()); + + for (key, value) in request.headers().iter() { + builder = builder.header(key.as_str(), value.as_bytes()); + } + + if !request.extensions().is_empty() { + debug_panic!( + "Livekit sent an HTTP request with a protocol extension that Zed doesn't support!" + ); + } + + let request = builder + .body(http_client::AsyncBody::from_bytes( + request.into_body().into(), + )) + .unwrap(); + + Box::pin(async move { + let response = http_client.send(request).await.map_err(io::Error::other)?; + Ok(livekit::dispatcher::Response { + status: http_2_status(response.status()), + body: Box::pin(response.into_body()), + }) + }) + } +} + +#[cfg(target_os = "windows")] +pub fn init( + dispatcher: Arc, + http_client: Arc, +) { +} + +#[cfg(not(target_os = "windows"))] +pub fn init( + dispatcher: Arc, + http_client: Arc, +) { + livekit::dispatcher::set_dispatcher(Dispatcher(dispatcher)); + livekit::dispatcher::set_http_client(HttpClientAdapter(http_client)); +} + +#[cfg(not(target_os = "windows"))] +pub async fn capture_local_video_track( + capture_source: &dyn ScreenCaptureSource, +) -> Result<(track::LocalVideoTrack, Box)> { + let resolution = capture_source.resolution()?; + let track_source = NativeVideoSource::new(VideoResolution { + width: resolution.width.0 as u32, + height: resolution.height.0 as u32, + }); + + let capture_stream = capture_source + .stream({ + let track_source = track_source.clone(); + Box::new(move |frame| { + if let Some(buffer) = video_frame_buffer_to_webrtc(frame) { + track_source.capture_frame(&VideoFrame { + rotation: VideoRotation::VideoRotation0, + timestamp_us: 0, + buffer, + }); + } + }) + }) + .await??; + + Ok(( + track::LocalVideoTrack::create_video_track( + "screen share", + RtcVideoSource::Native(track_source), + ), + capture_stream, + )) +} + +#[cfg(not(target_os = "windows"))] +pub fn capture_local_audio_track( + background_executor: &BackgroundExecutor, +) -> Result> { + use util::maybe; + + let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded(); + let (thread_handle, thread_kill_rx) = std::sync::mpsc::channel::<()>(); + let sample_rate; + let channels; + + if cfg!(any(test, feature = "test-support")) { + sample_rate = 2; + channels = 1; + } else { + let (device, config) = default_device(true)?; + sample_rate = config.sample_rate().0; + channels = config.channels() as u32; + thread::spawn(move || { + maybe!({ + if let Some(name) = device.name().ok() { + log::info!("Using microphone: {}", name) + } else { + log::info!("Using microphone: "); + } + + let stream = device + .build_input_stream_raw( + &config.config(), + cpal::SampleFormat::I16, + move |data, _: &_| { + frame_tx + .unbounded_send(AudioFrame { + data: Cow::Owned(data.as_slice::().unwrap().to_vec()), + sample_rate, + num_channels: channels, + samples_per_channel: data.len() as u32 / channels, + }) + .ok(); + }, + |err| log::error!("error capturing audio track: {:?}", err), + None, + ) + .context("failed to build input stream")?; + + stream.play()?; + // Keep the thread alive and holding onto the `stream` + thread_kill_rx.recv().ok(); + anyhow::Ok(Some(())) + }) + .log_err(); + }); + } + + Ok(background_executor.spawn({ + let background_executor = background_executor.clone(); + async move { + let source = NativeAudioSource::new( + AudioSourceOptions { + echo_cancellation: true, + noise_suppression: true, + auto_gain_control: true, + }, + sample_rate, + channels, + 100, + ); + let transmit_task = background_executor.spawn({ + let source = source.clone(); + async move { + while let Some(frame) = frame_rx.next().await { + source.capture_frame(&frame).await.log_err(); + } + } + }); + + let track = track::LocalAudioTrack::create_audio_track( + "microphone", + RtcAudioSource::Native(source), + ); + + ( + track, + AudioStream::Input { + _thread_handle: thread_handle, + _transmit_task: transmit_task, + }, + ) + } + })) +} + +#[cfg(not(target_os = "windows"))] +pub fn play_remote_audio_track( + track: &RemoteAudioTrack, + background_executor: &BackgroundExecutor, +) -> Result { + let track = track.clone(); + // We track device changes in our output because Livekit has a resampler built in, + // and it's easy to create a new native audio stream when the device changes. + if cfg!(any(test, feature = "test-support")) { + Ok(AudioStream::Output { + _task: background_executor.spawn(async {}), + }) + } else { + let mut default_change_listener = DeviceChangeListener::new(false)?; + let (output_device, output_config) = default_device(false)?; + + let _task = background_executor.spawn({ + let background_executor = background_executor.clone(); + async move { + let (mut _receive_task, mut _thread) = + start_output_stream(output_config, output_device, &track, &background_executor); + + while let Some(_) = default_change_listener.next().await { + let Some((output_device, output_config)) = get_default_output().log_err() + else { + continue; + }; + + if let Ok(name) = output_device.name() { + log::info!("Using speaker: {}", name) + } else { + log::info!("Using speaker: ") + } + + (_receive_task, _thread) = start_output_stream( + output_config, + output_device, + &track, + &background_executor, + ); + } + + futures::future::pending::<()>().await; + } + }); + + Ok(AudioStream::Output { _task }) + } +} + +fn default_device(input: bool) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> { + let device; + let config; + if input { + device = cpal::default_host() + .default_input_device() + .ok_or_else(|| anyhow!("no audio input device available"))?; + config = device + .default_input_config() + .context("failed to get default input config")?; + } else { + device = cpal::default_host() + .default_output_device() + .ok_or_else(|| anyhow!("no audio output device available"))?; + config = device + .default_output_config() + .context("failed to get default output config")?; + } + Ok((device, config)) +} + +#[cfg(not(target_os = "windows"))] +fn get_default_output() -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> { + let host = cpal::default_host(); + let output_device = host + .default_output_device() + .context("failed to read default output device")?; + let output_config = output_device.default_output_config()?; + Ok((output_device, output_config)) +} + +#[cfg(not(target_os = "windows"))] +fn start_output_stream( + output_config: cpal::SupportedStreamConfig, + output_device: cpal::Device, + track: &track::RemoteAudioTrack, + background_executor: &BackgroundExecutor, +) -> (Task<()>, std::sync::mpsc::Sender<()>) { + let buffer = Arc::new(Mutex::new(VecDeque::::new())); + let sample_rate = output_config.sample_rate(); + + let mut stream = NativeAudioStream::new( + track.rtc_track(), + sample_rate.0 as i32, + output_config.channels() as i32, + ); + + let receive_task = background_executor.spawn({ + let buffer = buffer.clone(); + async move { + const MS_OF_BUFFER: u32 = 100; + const MS_IN_SEC: u32 = 1000; + while let Some(frame) = stream.next().await { + let frame_size = frame.samples_per_channel * frame.num_channels; + debug_assert!(frame.data.len() == frame_size as usize); + + let buffer_size = + ((frame.sample_rate * frame.num_channels) / MS_IN_SEC * MS_OF_BUFFER) as usize; + + let mut buffer = buffer.lock(); + let new_size = buffer.len() + frame.data.len(); + if new_size > buffer_size { + let overflow = new_size - buffer_size; + buffer.drain(0..overflow); + } + + buffer.extend(frame.data.iter()); + } + } + }); + + // The _output_stream needs to be on it's own thread because it's !Send + // and we experienced a deadlock when it's created on the main thread. + let (thread, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); + thread::spawn(move || { + if cfg!(any(test, feature = "test-support")) { + // Can't play audio in tests + return; + } + + let output_stream = output_device.build_output_stream( + &output_config.config(), + { + let buffer = buffer.clone(); + move |data, _info| { + let mut buffer = buffer.lock(); + if buffer.len() < data.len() { + // Instead of partially filling a buffer, output silence. If a partial + // buffer was outputted then this could lead to a perpetual state of + // outputting partial buffers as it never gets filled enough for a full + // frame. + data.fill(0); + } else { + // SAFETY: We know that buffer has at least data.len() values in it. + // because we just checked + let mut drain = buffer.drain(..data.len()); + data.fill_with(|| unsafe { drain.next().unwrap_unchecked() }); + } + } + }, + |error| log::error!("error playing audio track: {:?}", error), + None, + ); + + let Some(output_stream) = output_stream.log_err() else { + return; + }; + + output_stream.play().log_err(); + // Block forever to keep the output stream alive + end_on_drop_rx.recv().ok(); + }); + + (receive_task, thread) +} + +#[cfg(target_os = "windows")] +pub fn play_remote_video_track( + track: &track::RemoteVideoTrack, +) -> impl Stream { + futures::stream::empty() +} + +#[cfg(not(target_os = "windows"))] +pub fn play_remote_video_track( + track: &track::RemoteVideoTrack, +) -> impl Stream { + NativeVideoStream::new(track.rtc_track()) + .filter_map(|frame| async move { video_frame_buffer_from_webrtc(frame.buffer) }) +} + +#[cfg(target_os = "macos")] +pub type RemoteVideoFrame = media::core_video::CVImageBuffer; + +#[cfg(target_os = "macos")] +fn video_frame_buffer_from_webrtc(buffer: Box) -> Option { + use core_foundation::base::TCFType as _; + use media::core_video::CVImageBuffer; + + let buffer = buffer.as_native()?; + let pixel_buffer = buffer.get_cv_pixel_buffer(); + if pixel_buffer.is_null() { + return None; + } + + unsafe { Some(CVImageBuffer::wrap_under_get_rule(pixel_buffer as _)) } +} + +#[cfg(not(target_os = "macos"))] +pub type RemoteVideoFrame = Arc; + +#[cfg(not(any(target_os = "macos", target_os = "windows")))] +fn video_frame_buffer_from_webrtc(buffer: Box) -> Option { + use gpui::RenderImage; + use image::{Frame, RgbaImage}; + use livekit::webrtc::prelude::VideoFormatType; + use smallvec::SmallVec; + use std::alloc::{alloc, Layout}; + + let width = buffer.width(); + let height = buffer.height(); + let stride = width * 4; + let byte_len = (stride * height) as usize; + let argb_image = unsafe { + // Motivation for this unsafe code is to avoid initializing the frame data, since to_argb + // will write all bytes anyway. + let start_ptr = alloc(Layout::array::(byte_len).log_err()?); + if start_ptr.is_null() { + return None; + } + let bgra_frame_slice = std::slice::from_raw_parts_mut(start_ptr, byte_len); + buffer.to_argb( + VideoFormatType::ARGB, // For some reason, this displays correctly while RGBA (the correct format) does not + bgra_frame_slice, + stride, + width as i32, + height as i32, + ); + Vec::from_raw_parts(start_ptr, byte_len, byte_len) + }; + + Some(Arc::new(RenderImage::new(SmallVec::from_elem( + Frame::new( + RgbaImage::from_raw(width, height, argb_image) + .with_context(|| "Bug: not enough bytes allocated for image.") + .log_err()?, + ), + 1, + )))) +} + +#[cfg(target_os = "macos")] +fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option> { + use core_foundation::base::TCFType as _; + + let pixel_buffer = frame.0.as_concrete_TypeRef(); + std::mem::forget(frame.0); + unsafe { + Some(webrtc::video_frame::native::NativeBuffer::from_cv_pixel_buffer(pixel_buffer as _)) + } +} + +#[cfg(not(any(target_os = "macos", target_os = "windows")))] +fn video_frame_buffer_to_webrtc(_frame: ScreenCaptureFrame) -> Option> { + None as Option> +} + +trait DeviceChangeListenerApi: Stream + Sized { + fn new(input: bool) -> Result; +} + +#[cfg(target_os = "macos")] +mod macos { + + use coreaudio::sys::{ + kAudioHardwarePropertyDefaultInputDevice, kAudioHardwarePropertyDefaultOutputDevice, + kAudioObjectPropertyElementMaster, kAudioObjectPropertyScopeGlobal, + kAudioObjectSystemObject, AudioObjectAddPropertyListener, AudioObjectID, + AudioObjectPropertyAddress, AudioObjectRemovePropertyListener, OSStatus, + }; + use futures::{channel::mpsc::UnboundedReceiver, StreamExt}; + + use crate::DeviceChangeListenerApi; + + /// Implementation from: https://github.com/zed-industries/cpal/blob/fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50/src/host/coreaudio/macos/property_listener.rs#L15 + pub struct CoreAudioDefaultDeviceChangeListener { + rx: UnboundedReceiver<()>, + callback: Box, + input: bool, + } + + trait _AssertSend: Send {} + impl _AssertSend for CoreAudioDefaultDeviceChangeListener {} + + struct PropertyListenerCallbackWrapper(Box); + + unsafe extern "C" fn property_listener_handler_shim( + _: AudioObjectID, + _: u32, + _: *const AudioObjectPropertyAddress, + callback: *mut ::std::os::raw::c_void, + ) -> OSStatus { + let wrapper = callback as *mut PropertyListenerCallbackWrapper; + (*wrapper).0(); + 0 + } + + impl DeviceChangeListenerApi for CoreAudioDefaultDeviceChangeListener { + fn new(input: bool) -> gpui::Result { + let (tx, rx) = futures::channel::mpsc::unbounded(); + + let callback = Box::new(PropertyListenerCallbackWrapper(Box::new(move || { + tx.unbounded_send(()).ok(); + }))); + + unsafe { + coreaudio::Error::from_os_status(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, + &AudioObjectPropertyAddress { + mSelector: if input { + kAudioHardwarePropertyDefaultInputDevice + } else { + kAudioHardwarePropertyDefaultOutputDevice + }, + mScope: kAudioObjectPropertyScopeGlobal, + mElement: kAudioObjectPropertyElementMaster, + }, + Some(property_listener_handler_shim), + &*callback as *const _ as *mut _, + ))?; + } + + Ok(Self { + rx, + callback, + input, + }) + } + } + + impl Drop for CoreAudioDefaultDeviceChangeListener { + fn drop(&mut self) { + unsafe { + AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, + &AudioObjectPropertyAddress { + mSelector: if self.input { + kAudioHardwarePropertyDefaultInputDevice + } else { + kAudioHardwarePropertyDefaultOutputDevice + }, + mScope: kAudioObjectPropertyScopeGlobal, + mElement: kAudioObjectPropertyElementMaster, + }, + Some(property_listener_handler_shim), + &*self.callback as *const _ as *mut _, + ); + } + } + } + + impl futures::Stream for CoreAudioDefaultDeviceChangeListener { + type Item = (); + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.rx.poll_next_unpin(cx) + } + } +} + +#[cfg(target_os = "macos")] +type DeviceChangeListener = macos::CoreAudioDefaultDeviceChangeListener; + +#[cfg(not(target_os = "macos"))] +mod noop_change_listener { + use std::task::Poll; + + use crate::DeviceChangeListenerApi; + + pub struct NoopOutputDeviceChangelistener {} + + impl DeviceChangeListenerApi for NoopOutputDeviceChangelistener { + fn new(_input: bool) -> anyhow::Result { + Ok(NoopOutputDeviceChangelistener {}) + } + } + + impl futures::Stream for NoopOutputDeviceChangelistener { + type Item = (); + + fn poll_next( + self: std::pin::Pin<&mut Self>, + _cx: &mut std::task::Context<'_>, + ) -> Poll> { + Poll::Pending + } + } +} + +#[cfg(not(target_os = "macos"))] +type DeviceChangeListener = noop_change_listener::NoopOutputDeviceChangelistener; diff --git a/crates/livekit_client/src/remote_video_track_view.rs b/crates/livekit_client/src/remote_video_track_view.rs new file mode 100644 index 0000000000..d7618391d6 --- /dev/null +++ b/crates/livekit_client/src/remote_video_track_view.rs @@ -0,0 +1,99 @@ +use crate::track::RemoteVideoTrack; +use anyhow::Result; +use futures::StreamExt as _; +use gpui::{Empty, EventEmitter, IntoElement, Render, Task, View, ViewContext, VisualContext as _}; + +pub struct RemoteVideoTrackView { + track: RemoteVideoTrack, + latest_frame: Option, + #[cfg(not(target_os = "macos"))] + current_rendered_frame: Option, + #[cfg(not(target_os = "macos"))] + previous_rendered_frame: Option, + _maintain_frame: Task>, +} + +#[derive(Debug)] +pub enum RemoteVideoTrackViewEvent { + Close, +} + +impl RemoteVideoTrackView { + pub fn new(track: RemoteVideoTrack, cx: &mut ViewContext) -> Self { + cx.focus_handle(); + let frames = super::play_remote_video_track(&track); + + Self { + track, + latest_frame: None, + _maintain_frame: cx.spawn(|this, mut cx| async move { + futures::pin_mut!(frames); + while let Some(frame) = frames.next().await { + this.update(&mut cx, |this, cx| { + this.latest_frame = Some(frame); + cx.notify(); + })?; + } + this.update(&mut cx, |_this, cx| { + #[cfg(not(target_os = "macos"))] + { + use util::ResultExt as _; + if let Some(frame) = _this.previous_rendered_frame.take() { + cx.window_context().drop_image(frame).log_err(); + } + // TODO(mgsloan): This might leak the last image of the screenshare if + // render is called after the screenshare ends. + if let Some(frame) = _this.current_rendered_frame.take() { + cx.window_context().drop_image(frame).log_err(); + } + } + cx.emit(RemoteVideoTrackViewEvent::Close) + })?; + Ok(()) + }), + #[cfg(not(target_os = "macos"))] + current_rendered_frame: None, + #[cfg(not(target_os = "macos"))] + previous_rendered_frame: None, + } + } + + pub fn clone(&self, cx: &mut ViewContext) -> View { + cx.new_view(|cx| Self::new(self.track.clone(), cx)) + } +} + +impl EventEmitter for RemoteVideoTrackView {} + +impl Render for RemoteVideoTrackView { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + #[cfg(target_os = "macos")] + if let Some(latest_frame) = &self.latest_frame { + use gpui::Styled as _; + return gpui::surface(latest_frame.clone()) + .size_full() + .into_any_element(); + } + + #[cfg(not(target_os = "macos"))] + if let Some(latest_frame) = &self.latest_frame { + use gpui::Styled as _; + if let Some(current_rendered_frame) = self.current_rendered_frame.take() { + if let Some(frame) = self.previous_rendered_frame.take() { + // Only drop the frame if it's not also the current frame. + if frame.id != current_rendered_frame.id { + use util::ResultExt as _; + _cx.window_context().drop_image(frame).log_err(); + } + } + self.previous_rendered_frame = Some(current_rendered_frame) + } + self.current_rendered_frame = Some(latest_frame.clone()); + return gpui::img(latest_frame.clone()) + .size_full() + .into_any_element(); + } + + Empty.into_any_element() + } +} diff --git a/crates/livekit_client/src/test.rs b/crates/livekit_client/src/test.rs new file mode 100644 index 0000000000..e67189c09c --- /dev/null +++ b/crates/livekit_client/src/test.rs @@ -0,0 +1,825 @@ +pub mod participant; +pub mod publication; +pub mod track; + +#[cfg(not(windows))] +pub mod webrtc; + +#[cfg(not(windows))] +use self::id::*; +use self::{participant::*, publication::*, track::*}; +use anyhow::{anyhow, Context, Result}; +use async_trait::async_trait; +use collections::{btree_map::Entry as BTreeEntry, hash_map::Entry, BTreeMap, HashMap, HashSet}; +use gpui::BackgroundExecutor; +#[cfg(not(windows))] +use livekit::options::TrackPublishOptions; +use livekit_server::{proto, token}; +use parking_lot::Mutex; +use postage::{mpsc, sink::Sink}; +use std::sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, Weak, +}; + +#[cfg(not(windows))] +pub use livekit::{id, options, ConnectionState, DisconnectReason, RoomOptions}; + +static SERVERS: Mutex>> = Mutex::new(BTreeMap::new()); + +pub struct TestServer { + pub url: String, + pub api_key: String, + pub secret_key: String, + #[cfg(not(target_os = "windows"))] + rooms: Mutex>, + executor: BackgroundExecutor, +} + +#[cfg(not(target_os = "windows"))] +impl TestServer { + pub fn create( + url: String, + api_key: String, + secret_key: String, + executor: BackgroundExecutor, + ) -> Result> { + let mut servers = SERVERS.lock(); + if let BTreeEntry::Vacant(e) = servers.entry(url.clone()) { + let server = Arc::new(TestServer { + url, + api_key, + secret_key, + rooms: Default::default(), + executor, + }); + e.insert(server.clone()); + Ok(server) + } else { + Err(anyhow!("a server with url {:?} already exists", url)) + } + } + + fn get(url: &str) -> Result> { + Ok(SERVERS + .lock() + .get(url) + .ok_or_else(|| anyhow!("no server found for url"))? + .clone()) + } + + pub fn teardown(&self) -> Result<()> { + SERVERS + .lock() + .remove(&self.url) + .ok_or_else(|| anyhow!("server with url {:?} does not exist", self.url))?; + Ok(()) + } + + pub fn create_api_client(&self) -> TestApiClient { + TestApiClient { + url: self.url.clone(), + } + } + + pub async fn create_room(&self, room: String) -> Result<()> { + self.executor.simulate_random_delay().await; + + let mut server_rooms = self.rooms.lock(); + if let Entry::Vacant(e) = server_rooms.entry(room.clone()) { + e.insert(Default::default()); + Ok(()) + } else { + Err(anyhow!("room {:?} already exists", room)) + } + } + + async fn delete_room(&self, room: String) -> Result<()> { + self.executor.simulate_random_delay().await; + + let mut server_rooms = self.rooms.lock(); + server_rooms + .remove(&room) + .ok_or_else(|| anyhow!("room {:?} does not exist", room))?; + Ok(()) + } + + async fn join_room(&self, token: String, client_room: Room) -> Result { + self.executor.simulate_random_delay().await; + + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let room_name = claims.video.room.unwrap(); + let mut server_rooms = self.rooms.lock(); + let room = (*server_rooms).entry(room_name.to_string()).or_default(); + + if let Entry::Vacant(e) = room.client_rooms.entry(identity.clone()) { + for server_track in &room.video_tracks { + let track = RemoteTrack::Video(RemoteVideoTrack { + server_track: server_track.clone(), + _room: client_room.downgrade(), + }); + client_room + .0 + .lock() + .updates_tx + .blocking_send(RoomEvent::TrackSubscribed { + track: track.clone(), + publication: RemoteTrackPublication { + sid: server_track.sid.clone(), + room: client_room.downgrade(), + track, + }, + participant: RemoteParticipant { + room: client_room.downgrade(), + identity: server_track.publisher_id.clone(), + }, + }) + .unwrap(); + } + for server_track in &room.audio_tracks { + let track = RemoteTrack::Audio(RemoteAudioTrack { + server_track: server_track.clone(), + room: client_room.downgrade(), + }); + client_room + .0 + .lock() + .updates_tx + .blocking_send(RoomEvent::TrackSubscribed { + track: track.clone(), + publication: RemoteTrackPublication { + sid: server_track.sid.clone(), + room: client_room.downgrade(), + track, + }, + participant: RemoteParticipant { + room: client_room.downgrade(), + identity: server_track.publisher_id.clone(), + }, + }) + .unwrap(); + } + e.insert(client_room); + Ok(identity) + } else { + Err(anyhow!( + "{:?} attempted to join room {:?} twice", + identity, + room_name + )) + } + } + + async fn leave_room(&self, token: String) -> Result<()> { + self.executor.simulate_random_delay().await; + + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let room_name = claims.video.room.unwrap(); + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + room.client_rooms.remove(&identity).ok_or_else(|| { + anyhow!( + "{:?} attempted to leave room {:?} before joining it", + identity, + room_name + ) + })?; + Ok(()) + } + + fn remote_participants( + &self, + token: String, + ) -> Result> { + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let local_identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let room_name = claims.video.room.unwrap().to_string(); + + if let Some(server_room) = self.rooms.lock().get(&room_name) { + let room = server_room + .client_rooms + .get(&local_identity) + .unwrap() + .downgrade(); + Ok(server_room + .client_rooms + .iter() + .filter(|(identity, _)| *identity != &local_identity) + .map(|(identity, _)| { + ( + identity.clone(), + RemoteParticipant { + room: room.clone(), + identity: identity.clone(), + }, + ) + }) + .collect()) + } else { + Ok(Default::default()) + } + } + + async fn remove_participant( + &self, + room_name: String, + identity: ParticipantIdentity, + ) -> Result<()> { + self.executor.simulate_random_delay().await; + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + room.client_rooms.remove(&identity).ok_or_else(|| { + anyhow!( + "participant {:?} did not join room {:?}", + identity, + room_name + ) + })?; + Ok(()) + } + + async fn update_participant( + &self, + room_name: String, + identity: String, + permission: proto::ParticipantPermission, + ) -> Result<()> { + self.executor.simulate_random_delay().await; + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + room.participant_permissions + .insert(ParticipantIdentity(identity), permission); + Ok(()) + } + + pub async fn disconnect_client(&self, client_identity: String) { + let client_identity = ParticipantIdentity(client_identity); + + self.executor.simulate_random_delay().await; + + let mut server_rooms = self.rooms.lock(); + for room in server_rooms.values_mut() { + if let Some(room) = room.client_rooms.remove(&client_identity) { + let mut room = room.0.lock(); + room.connection_state = ConnectionState::Disconnected; + room.updates_tx + .blocking_send(RoomEvent::Disconnected { + reason: DisconnectReason::SignalClose, + }) + .ok(); + } + } + } + + async fn publish_video_track( + &self, + token: String, + _local_track: LocalVideoTrack, + ) -> Result { + self.executor.simulate_random_delay().await; + + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let room_name = claims.video.room.unwrap(); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + + let can_publish = room + .participant_permissions + .get(&identity) + .map(|permission| permission.can_publish) + .or(claims.video.can_publish) + .unwrap_or(true); + + if !can_publish { + return Err(anyhow!("user is not allowed to publish")); + } + + let sid: TrackSid = format!("TR_{}", nanoid::nanoid!(17)).try_into().unwrap(); + let server_track = Arc::new(TestServerVideoTrack { + sid: sid.clone(), + publisher_id: identity.clone(), + }); + + room.video_tracks.push(server_track.clone()); + + for (room_identity, client_room) in &room.client_rooms { + if *room_identity != identity { + let track = RemoteTrack::Video(RemoteVideoTrack { + server_track: server_track.clone(), + _room: client_room.downgrade(), + }); + let publication = RemoteTrackPublication { + sid: sid.clone(), + room: client_room.downgrade(), + track: track.clone(), + }; + let participant = RemoteParticipant { + identity: identity.clone(), + room: client_room.downgrade(), + }; + client_room + .0 + .lock() + .updates_tx + .blocking_send(RoomEvent::TrackSubscribed { + track, + publication, + participant, + }) + .unwrap(); + } + } + + Ok(sid) + } + + async fn publish_audio_track( + &self, + token: String, + _local_track: &LocalAudioTrack, + ) -> Result { + self.executor.simulate_random_delay().await; + + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let room_name = claims.video.room.unwrap(); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + + let can_publish = room + .participant_permissions + .get(&identity) + .map(|permission| permission.can_publish) + .or(claims.video.can_publish) + .unwrap_or(true); + + if !can_publish { + return Err(anyhow!("user is not allowed to publish")); + } + + let sid: TrackSid = format!("TR_{}", nanoid::nanoid!(17)).try_into().unwrap(); + let server_track = Arc::new(TestServerAudioTrack { + sid: sid.clone(), + publisher_id: identity.clone(), + muted: AtomicBool::new(false), + }); + + room.audio_tracks.push(server_track.clone()); + + for (room_identity, client_room) in &room.client_rooms { + if *room_identity != identity { + let track = RemoteTrack::Audio(RemoteAudioTrack { + server_track: server_track.clone(), + room: client_room.downgrade(), + }); + let publication = RemoteTrackPublication { + sid: sid.clone(), + room: client_room.downgrade(), + track: track.clone(), + }; + let participant = RemoteParticipant { + identity: identity.clone(), + room: client_room.downgrade(), + }; + client_room + .0 + .lock() + .updates_tx + .blocking_send(RoomEvent::TrackSubscribed { + track, + publication, + participant, + }) + .ok(); + } + } + + Ok(sid) + } + + async fn unpublish_track(&self, _token: String, _track: &TrackSid) -> Result<()> { + Ok(()) + } + + fn set_track_muted(&self, token: &str, track_sid: &TrackSid, muted: bool) -> Result<()> { + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let room_name = claims.video.room.unwrap(); + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + if let Some(track) = room + .audio_tracks + .iter_mut() + .find(|track| track.sid == *track_sid) + { + track.muted.store(muted, SeqCst); + for (id, client_room) in room.client_rooms.iter() { + if *id != identity { + let participant = Participant::Remote(RemoteParticipant { + identity: identity.clone(), + room: client_room.downgrade(), + }); + let track = RemoteTrack::Audio(RemoteAudioTrack { + server_track: track.clone(), + room: client_room.downgrade(), + }); + let publication = TrackPublication::Remote(RemoteTrackPublication { + sid: track_sid.clone(), + room: client_room.downgrade(), + track, + }); + + let event = if muted { + RoomEvent::TrackMuted { + participant, + publication, + } + } else { + RoomEvent::TrackUnmuted { + participant, + publication, + } + }; + + client_room + .0 + .lock() + .updates_tx + .blocking_send(event) + .unwrap(); + } + } + } + Ok(()) + } + + fn is_track_muted(&self, token: &str, track_sid: &TrackSid) -> Option { + let claims = livekit_server::token::validate(&token, &self.secret_key).ok()?; + let room_name = claims.video.room.unwrap(); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms.get_mut(&*room_name)?; + room.audio_tracks.iter().find_map(|track| { + if track.sid == *track_sid { + Some(track.muted.load(SeqCst)) + } else { + None + } + }) + } + + fn video_tracks(&self, token: String) -> Result> { + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let room_name = claims.video.room.unwrap(); + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + let client_room = room + .client_rooms + .get(&identity) + .ok_or_else(|| anyhow!("not a participant in room"))?; + Ok(room + .video_tracks + .iter() + .map(|track| RemoteVideoTrack { + server_track: track.clone(), + _room: client_room.downgrade(), + }) + .collect()) + } + + fn audio_tracks(&self, token: String) -> Result> { + let claims = livekit_server::token::validate(&token, &self.secret_key)?; + let room_name = claims.video.room.unwrap(); + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + let client_room = room + .client_rooms + .get(&identity) + .ok_or_else(|| anyhow!("not a participant in room"))?; + Ok(room + .audio_tracks + .iter() + .map(|track| RemoteAudioTrack { + server_track: track.clone(), + room: client_room.downgrade(), + }) + .collect()) + } +} + +#[cfg(not(target_os = "windows"))] +#[derive(Default, Debug)] +struct TestServerRoom { + client_rooms: HashMap, + video_tracks: Vec>, + audio_tracks: Vec>, + participant_permissions: HashMap, +} + +#[cfg(not(target_os = "windows"))] +#[derive(Debug)] +struct TestServerVideoTrack { + sid: TrackSid, + publisher_id: ParticipantIdentity, + // frames_rx: async_broadcast::Receiver, +} + +#[cfg(not(target_os = "windows"))] +#[derive(Debug)] +struct TestServerAudioTrack { + sid: TrackSid, + publisher_id: ParticipantIdentity, + muted: AtomicBool, +} + +pub struct TestApiClient { + url: String, +} + +#[derive(Clone, Debug)] +#[non_exhaustive] +pub enum RoomEvent { + ParticipantConnected(RemoteParticipant), + ParticipantDisconnected(RemoteParticipant), + LocalTrackPublished { + publication: LocalTrackPublication, + track: LocalTrack, + participant: LocalParticipant, + }, + LocalTrackUnpublished { + publication: LocalTrackPublication, + participant: LocalParticipant, + }, + TrackSubscribed { + track: RemoteTrack, + publication: RemoteTrackPublication, + participant: RemoteParticipant, + }, + TrackUnsubscribed { + track: RemoteTrack, + publication: RemoteTrackPublication, + participant: RemoteParticipant, + }, + TrackSubscriptionFailed { + participant: RemoteParticipant, + error: String, + #[cfg(not(target_os = "windows"))] + track_sid: TrackSid, + }, + TrackPublished { + publication: RemoteTrackPublication, + participant: RemoteParticipant, + }, + TrackUnpublished { + publication: RemoteTrackPublication, + participant: RemoteParticipant, + }, + TrackMuted { + participant: Participant, + publication: TrackPublication, + }, + TrackUnmuted { + participant: Participant, + publication: TrackPublication, + }, + RoomMetadataChanged { + old_metadata: String, + metadata: String, + }, + ParticipantMetadataChanged { + participant: Participant, + old_metadata: String, + metadata: String, + }, + ParticipantNameChanged { + participant: Participant, + old_name: String, + name: String, + }, + ActiveSpeakersChanged { + speakers: Vec, + }, + #[cfg(not(target_os = "windows"))] + ConnectionStateChanged(ConnectionState), + Connected { + participants_with_tracks: Vec<(RemoteParticipant, Vec)>, + }, + #[cfg(not(target_os = "windows"))] + Disconnected { + reason: DisconnectReason, + }, + Reconnecting, + Reconnected, +} + +#[cfg(not(target_os = "windows"))] +#[async_trait] +impl livekit_server::api::Client for TestApiClient { + fn url(&self) -> &str { + &self.url + } + + async fn create_room(&self, name: String) -> Result<()> { + let server = TestServer::get(&self.url)?; + server.create_room(name).await?; + Ok(()) + } + + async fn delete_room(&self, name: String) -> Result<()> { + let server = TestServer::get(&self.url)?; + server.delete_room(name).await?; + Ok(()) + } + + async fn remove_participant(&self, room: String, identity: String) -> Result<()> { + let server = TestServer::get(&self.url)?; + server + .remove_participant(room, ParticipantIdentity(identity)) + .await?; + Ok(()) + } + + async fn update_participant( + &self, + room: String, + identity: String, + permission: livekit_server::proto::ParticipantPermission, + ) -> Result<()> { + let server = TestServer::get(&self.url)?; + server + .update_participant(room, identity, permission) + .await?; + Ok(()) + } + + fn room_token(&self, room: &str, identity: &str) -> Result { + let server = TestServer::get(&self.url)?; + token::create( + &server.api_key, + &server.secret_key, + Some(identity), + token::VideoGrant::to_join(room), + ) + } + + fn guest_token(&self, room: &str, identity: &str) -> Result { + let server = TestServer::get(&self.url)?; + token::create( + &server.api_key, + &server.secret_key, + Some(identity), + token::VideoGrant::for_guest(room), + ) + } +} + +struct RoomState { + url: String, + token: String, + #[cfg(not(target_os = "windows"))] + local_identity: ParticipantIdentity, + #[cfg(not(target_os = "windows"))] + connection_state: ConnectionState, + #[cfg(not(target_os = "windows"))] + paused_audio_tracks: HashSet, + updates_tx: mpsc::Sender, +} + +#[derive(Clone, Debug)] +pub struct Room(Arc>); + +#[derive(Clone, Debug)] +pub(crate) struct WeakRoom(Weak>); + +#[cfg(not(target_os = "windows"))] +impl std::fmt::Debug for RoomState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Room") + .field("url", &self.url) + .field("token", &self.token) + .field("local_identity", &self.local_identity) + .field("connection_state", &self.connection_state) + .field("paused_audio_tracks", &self.paused_audio_tracks) + .finish() + } +} + +#[cfg(target_os = "windows")] +impl std::fmt::Debug for RoomState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Room") + .field("url", &self.url) + .field("token", &self.token) + .finish() + } +} + +#[cfg(not(target_os = "windows"))] +impl Room { + fn downgrade(&self) -> WeakRoom { + WeakRoom(Arc::downgrade(&self.0)) + } + + pub fn connection_state(&self) -> ConnectionState { + self.0.lock().connection_state + } + + pub fn local_participant(&self) -> LocalParticipant { + let identity = self.0.lock().local_identity.clone(); + LocalParticipant { + identity, + room: self.clone(), + } + } + + pub async fn connect( + url: &str, + token: &str, + _options: RoomOptions, + ) -> Result<(Self, mpsc::Receiver)> { + let server = TestServer::get(&url)?; + let (updates_tx, updates_rx) = mpsc::channel(1024); + let this = Self(Arc::new(Mutex::new(RoomState { + local_identity: ParticipantIdentity(String::new()), + url: url.to_string(), + token: token.to_string(), + connection_state: ConnectionState::Disconnected, + paused_audio_tracks: Default::default(), + updates_tx, + }))); + + let identity = server + .join_room(token.to_string(), this.clone()) + .await + .context("room join")?; + { + let mut state = this.0.lock(); + state.local_identity = identity; + state.connection_state = ConnectionState::Connected; + } + + Ok((this, updates_rx)) + } + + pub fn remote_participants(&self) -> HashMap { + self.test_server() + .remote_participants(self.0.lock().token.clone()) + .unwrap() + } + + fn test_server(&self) -> Arc { + TestServer::get(&self.0.lock().url).unwrap() + } + + fn token(&self) -> String { + self.0.lock().token.clone() + } +} + +#[cfg(not(target_os = "windows"))] +impl Drop for RoomState { + fn drop(&mut self) { + if self.connection_state == ConnectionState::Connected { + if let Ok(server) = TestServer::get(&self.url) { + let executor = server.executor.clone(); + let token = self.token.clone(); + executor + .spawn(async move { server.leave_room(token).await.ok() }) + .detach(); + } + } + } +} + +impl WeakRoom { + fn upgrade(&self) -> Option { + self.0.upgrade().map(Room) + } +} diff --git a/crates/livekit_client/src/test/participant.rs b/crates/livekit_client/src/test/participant.rs new file mode 100644 index 0000000000..8d476b1537 --- /dev/null +++ b/crates/livekit_client/src/test/participant.rs @@ -0,0 +1,111 @@ +use super::*; + +#[derive(Clone, Debug)] +pub enum Participant { + Local(LocalParticipant), + Remote(RemoteParticipant), +} + +#[derive(Clone, Debug)] +pub struct LocalParticipant { + #[cfg(not(target_os = "windows"))] + pub(super) identity: ParticipantIdentity, + pub(super) room: Room, +} + +#[derive(Clone, Debug)] +pub struct RemoteParticipant { + #[cfg(not(target_os = "windows"))] + pub(super) identity: ParticipantIdentity, + pub(super) room: WeakRoom, +} + +#[cfg(not(target_os = "windows"))] +impl Participant { + pub fn identity(&self) -> ParticipantIdentity { + match self { + Participant::Local(participant) => participant.identity.clone(), + Participant::Remote(participant) => participant.identity.clone(), + } + } +} + +#[cfg(not(target_os = "windows"))] +impl LocalParticipant { + pub async fn unpublish_track(&self, track: &TrackSid) -> Result<()> { + self.room + .test_server() + .unpublish_track(self.room.token(), track) + .await + } + + pub async fn publish_track( + &self, + track: LocalTrack, + _options: TrackPublishOptions, + ) -> Result { + let this = self.clone(); + let track = track.clone(); + let server = this.room.test_server(); + let sid = match track { + LocalTrack::Video(track) => { + server.publish_video_track(this.room.token(), track).await? + } + LocalTrack::Audio(track) => { + server + .publish_audio_track(this.room.token(), &track) + .await? + } + }; + Ok(LocalTrackPublication { + room: self.room.downgrade(), + sid, + }) + } +} + +#[cfg(not(target_os = "windows"))] +impl RemoteParticipant { + pub fn track_publications(&self) -> HashMap { + if let Some(room) = self.room.upgrade() { + let server = room.test_server(); + let audio = server + .audio_tracks(room.token()) + .unwrap() + .into_iter() + .filter(|track| track.publisher_id() == self.identity) + .map(|track| { + ( + track.sid(), + RemoteTrackPublication { + sid: track.sid(), + room: self.room.clone(), + track: RemoteTrack::Audio(track), + }, + ) + }); + let video = server + .video_tracks(room.token()) + .unwrap() + .into_iter() + .filter(|track| track.publisher_id() == self.identity) + .map(|track| { + ( + track.sid(), + RemoteTrackPublication { + sid: track.sid(), + room: self.room.clone(), + track: RemoteTrack::Video(track), + }, + ) + }); + audio.chain(video).collect() + } else { + HashMap::default() + } + } + + pub fn identity(&self) -> ParticipantIdentity { + self.identity.clone() + } +} diff --git a/crates/livekit_client/src/test/publication.rs b/crates/livekit_client/src/test/publication.rs new file mode 100644 index 0000000000..6a3dfa0a51 --- /dev/null +++ b/crates/livekit_client/src/test/publication.rs @@ -0,0 +1,116 @@ +use super::*; + +#[derive(Clone, Debug)] +pub enum TrackPublication { + Local(LocalTrackPublication), + Remote(RemoteTrackPublication), +} + +#[derive(Clone, Debug)] +pub struct LocalTrackPublication { + #[cfg(not(target_os = "windows"))] + pub(crate) sid: TrackSid, + pub(crate) room: WeakRoom, +} + +#[derive(Clone, Debug)] +pub struct RemoteTrackPublication { + #[cfg(not(target_os = "windows"))] + pub(crate) sid: TrackSid, + pub(crate) room: WeakRoom, + pub(crate) track: RemoteTrack, +} + +#[cfg(not(target_os = "windows"))] +impl TrackPublication { + pub fn sid(&self) -> TrackSid { + match self { + TrackPublication::Local(track) => track.sid(), + TrackPublication::Remote(track) => track.sid(), + } + } + + pub fn is_muted(&self) -> bool { + match self { + TrackPublication::Local(track) => track.is_muted(), + TrackPublication::Remote(track) => track.is_muted(), + } + } +} + +#[cfg(not(target_os = "windows"))] +impl LocalTrackPublication { + pub fn sid(&self) -> TrackSid { + self.sid.clone() + } + + pub fn mute(&self) { + self.set_mute(true) + } + + pub fn unmute(&self) { + self.set_mute(false) + } + + fn set_mute(&self, mute: bool) { + if let Some(room) = self.room.upgrade() { + room.test_server() + .set_track_muted(&room.token(), &self.sid, mute) + .ok(); + } + } + + pub fn is_muted(&self) -> bool { + if let Some(room) = self.room.upgrade() { + room.test_server() + .is_track_muted(&room.token(), &self.sid) + .unwrap_or(false) + } else { + false + } + } +} + +#[cfg(not(target_os = "windows"))] +impl RemoteTrackPublication { + pub fn sid(&self) -> TrackSid { + self.sid.clone() + } + + pub fn track(&self) -> Option { + Some(self.track.clone()) + } + + pub fn kind(&self) -> TrackKind { + self.track.kind() + } + + pub fn is_muted(&self) -> bool { + if let Some(room) = self.room.upgrade() { + room.test_server() + .is_track_muted(&room.token(), &self.sid) + .unwrap_or(false) + } else { + false + } + } + + pub fn is_enabled(&self) -> bool { + if let Some(room) = self.room.upgrade() { + !room.0.lock().paused_audio_tracks.contains(&self.sid) + } else { + false + } + } + + pub fn set_enabled(&self, enabled: bool) { + if let Some(room) = self.room.upgrade() { + let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks; + if enabled { + paused_audio_tracks.remove(&self.sid); + } else { + paused_audio_tracks.insert(self.sid.clone()); + } + } + } +} diff --git a/crates/livekit_client/src/test/track.rs b/crates/livekit_client/src/test/track.rs new file mode 100644 index 0000000000..302177a10a --- /dev/null +++ b/crates/livekit_client/src/test/track.rs @@ -0,0 +1,201 @@ +use super::*; +#[cfg(not(windows))] +use webrtc::{audio_source::RtcAudioSource, video_source::RtcVideoSource}; + +#[cfg(not(windows))] +pub use livekit::track::{TrackKind, TrackSource}; + +#[derive(Clone, Debug)] +pub enum LocalTrack { + Audio(LocalAudioTrack), + Video(LocalVideoTrack), +} + +#[derive(Clone, Debug)] +pub enum RemoteTrack { + Audio(RemoteAudioTrack), + Video(RemoteVideoTrack), +} + +#[derive(Clone, Debug)] +pub struct LocalVideoTrack {} + +#[derive(Clone, Debug)] +pub struct LocalAudioTrack {} + +#[derive(Clone, Debug)] +pub struct RemoteVideoTrack { + #[cfg(not(target_os = "windows"))] + pub(super) server_track: Arc, + pub(super) _room: WeakRoom, +} + +#[derive(Clone, Debug)] +pub struct RemoteAudioTrack { + #[cfg(not(target_os = "windows"))] + pub(super) server_track: Arc, + pub(super) room: WeakRoom, +} + +pub enum RtcTrack { + Audio(RtcAudioTrack), + Video(RtcVideoTrack), +} + +pub struct RtcAudioTrack { + #[cfg(not(target_os = "windows"))] + pub(super) server_track: Arc, + pub(super) room: WeakRoom, +} + +pub struct RtcVideoTrack { + #[cfg(not(target_os = "windows"))] + pub(super) _server_track: Arc, +} + +#[cfg(not(target_os = "windows"))] +impl RemoteTrack { + pub fn sid(&self) -> TrackSid { + match self { + RemoteTrack::Audio(track) => track.sid(), + RemoteTrack::Video(track) => track.sid(), + } + } + + pub fn kind(&self) -> TrackKind { + match self { + RemoteTrack::Audio(_) => TrackKind::Audio, + RemoteTrack::Video(_) => TrackKind::Video, + } + } + + pub fn publisher_id(&self) -> ParticipantIdentity { + match self { + RemoteTrack::Audio(track) => track.publisher_id(), + RemoteTrack::Video(track) => track.publisher_id(), + } + } + + pub fn rtc_track(&self) -> RtcTrack { + match self { + RemoteTrack::Audio(track) => RtcTrack::Audio(track.rtc_track()), + RemoteTrack::Video(track) => RtcTrack::Video(track.rtc_track()), + } + } +} + +#[cfg(not(windows))] +impl LocalVideoTrack { + pub fn create_video_track(_name: &str, _source: RtcVideoSource) -> Self { + Self {} + } +} + +#[cfg(not(windows))] +impl LocalAudioTrack { + pub fn create_audio_track(_name: &str, _source: RtcAudioSource) -> Self { + Self {} + } +} + +#[cfg(not(target_os = "windows"))] +impl RemoteAudioTrack { + pub fn sid(&self) -> TrackSid { + self.server_track.sid.clone() + } + + pub fn publisher_id(&self) -> ParticipantIdentity { + self.server_track.publisher_id.clone() + } + + pub fn start(&self) { + if let Some(room) = self.room.upgrade() { + room.0 + .lock() + .paused_audio_tracks + .remove(&self.server_track.sid); + } + } + + pub fn stop(&self) { + if let Some(room) = self.room.upgrade() { + room.0 + .lock() + .paused_audio_tracks + .insert(self.server_track.sid.clone()); + } + } + + pub fn rtc_track(&self) -> RtcAudioTrack { + RtcAudioTrack { + server_track: self.server_track.clone(), + room: self.room.clone(), + } + } +} + +#[cfg(not(target_os = "windows"))] +impl RemoteVideoTrack { + pub fn sid(&self) -> TrackSid { + self.server_track.sid.clone() + } + + pub fn publisher_id(&self) -> ParticipantIdentity { + self.server_track.publisher_id.clone() + } + + pub fn rtc_track(&self) -> RtcVideoTrack { + RtcVideoTrack { + _server_track: self.server_track.clone(), + } + } +} + +#[cfg(not(target_os = "windows"))] +impl RtcTrack { + pub fn enabled(&self) -> bool { + match self { + RtcTrack::Audio(track) => track.enabled(), + RtcTrack::Video(track) => track.enabled(), + } + } + + pub fn set_enabled(&self, enabled: bool) { + match self { + RtcTrack::Audio(track) => track.set_enabled(enabled), + RtcTrack::Video(_) => {} + } + } +} + +#[cfg(not(target_os = "windows"))] +impl RtcAudioTrack { + pub fn set_enabled(&self, enabled: bool) { + if let Some(room) = self.room.upgrade() { + let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks; + if enabled { + paused_audio_tracks.remove(&self.server_track.sid); + } else { + paused_audio_tracks.insert(self.server_track.sid.clone()); + } + } + } + + pub fn enabled(&self) -> bool { + if let Some(room) = self.room.upgrade() { + !room + .0 + .lock() + .paused_audio_tracks + .contains(&self.server_track.sid) + } else { + false + } + } +} + +impl RtcVideoTrack { + pub fn enabled(&self) -> bool { + true + } +} diff --git a/crates/livekit_client/src/test/webrtc.rs b/crates/livekit_client/src/test/webrtc.rs new file mode 100644 index 0000000000..6ac06e0484 --- /dev/null +++ b/crates/livekit_client/src/test/webrtc.rs @@ -0,0 +1,136 @@ +use super::track::{RtcAudioTrack, RtcVideoTrack}; +use futures::Stream; +use livekit::webrtc as real; +use std::{ + pin::Pin, + task::{Context, Poll}, +}; + +pub mod video_stream { + use super::*; + + pub mod native { + use super::*; + use real::video_frame::BoxVideoFrame; + + pub struct NativeVideoStream { + pub track: RtcVideoTrack, + } + + impl NativeVideoStream { + pub fn new(track: RtcVideoTrack) -> Self { + Self { track } + } + } + + impl Stream for NativeVideoStream { + type Item = BoxVideoFrame; + + fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll> { + Poll::Pending + } + } + } +} + +pub mod audio_stream { + use super::*; + + pub mod native { + use super::*; + use real::audio_frame::AudioFrame; + + pub struct NativeAudioStream { + pub track: RtcAudioTrack, + } + + impl NativeAudioStream { + pub fn new(track: RtcAudioTrack, _sample_rate: i32, _num_channels: i32) -> Self { + Self { track } + } + } + + impl Stream for NativeAudioStream { + type Item = AudioFrame<'static>; + + fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll> { + Poll::Pending + } + } + } +} + +pub mod audio_source { + use super::*; + + pub use real::audio_source::AudioSourceOptions; + + pub mod native { + use std::sync::Arc; + + use super::*; + use real::{audio_frame::AudioFrame, RtcError}; + + #[derive(Clone)] + pub struct NativeAudioSource { + pub options: Arc, + pub sample_rate: u32, + pub num_channels: u32, + } + + impl NativeAudioSource { + pub fn new( + options: AudioSourceOptions, + sample_rate: u32, + num_channels: u32, + _queue_size_ms: u32, + ) -> Self { + Self { + options: Arc::new(options), + sample_rate, + num_channels, + } + } + + pub async fn capture_frame(&self, _frame: &AudioFrame<'_>) -> Result<(), RtcError> { + Ok(()) + } + } + } + + pub enum RtcAudioSource { + Native(native::NativeAudioSource), + } +} + +pub use livekit::webrtc::audio_frame; +pub use livekit::webrtc::video_frame; + +pub mod video_source { + use super::*; + pub use real::video_source::VideoResolution; + + pub struct RTCVideoSource; + + pub mod native { + use super::*; + use real::video_frame::{VideoBuffer, VideoFrame}; + + #[derive(Clone)] + pub struct NativeVideoSource { + pub resolution: VideoResolution, + } + + impl NativeVideoSource { + pub fn new(resolution: super::VideoResolution) -> Self { + Self { resolution } + } + + pub fn capture_frame>(&self, _frame: &VideoFrame) {} + } + } + + pub enum RtcVideoSource { + Native(native::NativeVideoSource), + } +} diff --git a/crates/livekit_client_macos/.cargo/config.toml b/crates/livekit_client_macos/.cargo/config.toml new file mode 100644 index 0000000000..77f7c9dd6c --- /dev/null +++ b/crates/livekit_client_macos/.cargo/config.toml @@ -0,0 +1,2 @@ +[livekit_client_test] +rustflags = ["-C", "link-args=-ObjC"] diff --git a/crates/live_kit_client/Cargo.toml b/crates/livekit_client_macos/Cargo.toml similarity index 85% rename from crates/live_kit_client/Cargo.toml rename to crates/livekit_client_macos/Cargo.toml index e23c63453e..88565c0348 100644 --- a/crates/live_kit_client/Cargo.toml +++ b/crates/livekit_client_macos/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "live_kit_client" +name = "livekit_client_macos" version = "0.1.0" edition = "2021" description = "Bindings to LiveKit Swift client SDK" @@ -10,11 +10,11 @@ license = "GPL-3.0-or-later" workspace = true [lib] -path = "src/live_kit_client.rs" +path = "src/livekit_client.rs" doctest = false [[example]] -name = "test_app" +name = "test_app_macos" [features] no-webrtc = [] @@ -22,7 +22,7 @@ test-support = [ "async-trait", "collections/test-support", "gpui/test-support", - "live_kit_server", + "livekit_server", "nanoid", ] @@ -33,7 +33,7 @@ async-trait = { workspace = true, optional = true } collections = { workspace = true, optional = true } futures.workspace = true gpui = { workspace = true, optional = true } -live_kit_server = { workspace = true, optional = true } +livekit_server = { workspace = true, optional = true } log.workspace = true media.workspace = true nanoid = { workspace = true, optional = true} @@ -47,14 +47,14 @@ core-foundation.workspace = true async-trait = { workspace = true } collections = { workspace = true } gpui = { workspace = true } -live_kit_server.workspace = true +livekit_server.workspace = true nanoid.workspace = true [dev-dependencies] async-trait.workspace = true collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -live_kit_server.workspace = true +livekit_server.workspace = true nanoid.workspace = true sha2.workspace = true simplelog.workspace = true diff --git a/crates/livekit_client_macos/LICENSE-GPL b/crates/livekit_client_macos/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/livekit_client_macos/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/live_kit_client/LiveKitBridge/Package.resolved b/crates/livekit_client_macos/LiveKitBridge/Package.resolved similarity index 86% rename from crates/live_kit_client/LiveKitBridge/Package.resolved rename to crates/livekit_client_macos/LiveKitBridge/Package.resolved index b925bc8f0d..c84933e5c1 100644 --- a/crates/live_kit_client/LiveKitBridge/Package.resolved +++ b/crates/livekit_client_macos/LiveKitBridge/Package.resolved @@ -6,8 +6,8 @@ "repositoryURL": "https://github.com/livekit/client-sdk-swift.git", "state": { "branch": null, - "revision": "7331b813a5ab8a95cfb81fb2b4ed10519428b9ff", - "version": "1.0.12" + "revision": "8cde9e66ce9b470c3a743f5c72784f57c5a6d0c3", + "version": "1.1.6" } }, { @@ -24,8 +24,8 @@ "repositoryURL": "https://github.com/webrtc-sdk/Specs.git", "state": { "branch": null, - "revision": "2f6bab30c8df0fe59ab3e58bc99097f757f85f65", - "version": "104.5112.17" + "revision": "4fa8d6d647fc759cdd0265fd413d2f28ea2e0e08", + "version": "114.5735.8" } }, { diff --git a/crates/live_kit_client/LiveKitBridge/Package.swift b/crates/livekit_client_macos/LiveKitBridge/Package.swift similarity index 90% rename from crates/live_kit_client/LiveKitBridge/Package.swift rename to crates/livekit_client_macos/LiveKitBridge/Package.swift index d7b5c271b9..a2a5b3eb75 100644 --- a/crates/live_kit_client/LiveKitBridge/Package.swift +++ b/crates/livekit_client_macos/LiveKitBridge/Package.swift @@ -12,16 +12,16 @@ let package = Package( .library( name: "LiveKitBridge", type: .static, - targets: ["LiveKitBridge"]), + targets: ["LiveKitBridge"]) ], dependencies: [ - .package(url: "https://github.com/livekit/client-sdk-swift.git", .exact("1.0.12")), + .package(url: "https://github.com/livekit/client-sdk-swift.git", .exact("1.1.6")) ], targets: [ // Targets are the basic building blocks of a package. A target can define a module or a test suite. // Targets can depend on other targets in this package, and on products in packages this package depends on. .target( name: "LiveKitBridge", - dependencies: [.product(name: "LiveKit", package: "client-sdk-swift")]), + dependencies: [.product(name: "LiveKit", package: "client-sdk-swift")]) ] ) diff --git a/crates/live_kit_client/LiveKitBridge/README.md b/crates/livekit_client_macos/LiveKitBridge/README.md similarity index 100% rename from crates/live_kit_client/LiveKitBridge/README.md rename to crates/livekit_client_macos/LiveKitBridge/README.md diff --git a/crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift b/crates/livekit_client_macos/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift similarity index 100% rename from crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift rename to crates/livekit_client_macos/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift diff --git a/crates/live_kit_client/build.rs b/crates/livekit_client_macos/build.rs similarity index 100% rename from crates/live_kit_client/build.rs rename to crates/livekit_client_macos/build.rs diff --git a/crates/live_kit_client/examples/test_app.rs b/crates/livekit_client_macos/examples/test_app_macos.rs similarity index 97% rename from crates/live_kit_client/examples/test_app.rs rename to crates/livekit_client_macos/examples/test_app_macos.rs index de8be97e86..c6ae2cc478 100644 --- a/crates/live_kit_client/examples/test_app.rs +++ b/crates/livekit_client_macos/examples/test_app_macos.rs @@ -2,12 +2,12 @@ use std::time::Duration; use futures::StreamExt; use gpui::{actions, KeyBinding, Menu, MenuItem}; -use live_kit_client::{LocalAudioTrack, LocalVideoTrack, Room, RoomUpdate}; -use live_kit_server::token::{self, VideoGrant}; +use livekit_client_macos::{LocalAudioTrack, LocalVideoTrack, Room, RoomUpdate}; +use livekit_server::token::{self, VideoGrant}; use log::LevelFilter; use simplelog::SimpleLogger; -actions!(live_kit_client, [Quit]); +actions!(livekit_client_macos, [Quit]); fn main() { SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger"); diff --git a/crates/live_kit_client/src/live_kit_client.rs b/crates/livekit_client_macos/src/livekit_client.rs similarity index 100% rename from crates/live_kit_client/src/live_kit_client.rs rename to crates/livekit_client_macos/src/livekit_client.rs diff --git a/crates/live_kit_client/src/prod.rs b/crates/livekit_client_macos/src/prod.rs similarity index 100% rename from crates/live_kit_client/src/prod.rs rename to crates/livekit_client_macos/src/prod.rs diff --git a/crates/live_kit_client/src/test.rs b/crates/livekit_client_macos/src/test.rs similarity index 97% rename from crates/live_kit_client/src/test.rs rename to crates/livekit_client_macos/src/test.rs index 2c26c88f72..6db24174ff 100644 --- a/crates/live_kit_client/src/test.rs +++ b/crates/livekit_client_macos/src/test.rs @@ -4,7 +4,7 @@ use async_trait::async_trait; use collections::{btree_map::Entry as BTreeEntry, hash_map::Entry, BTreeMap, HashMap, HashSet}; use futures::Stream; use gpui::{BackgroundExecutor, SurfaceSource}; -use live_kit_server::{proto, token}; +use livekit_server::{proto, token}; use parking_lot::Mutex; use postage::watch; @@ -102,7 +102,7 @@ impl TestServer { #[cfg(any(test, feature = "test-support"))] self.executor.simulate_random_delay().await; - let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let claims = livekit_server::token::validate(&token, &self.secret_key)?; let identity = claims.sub.unwrap().to_string(); let room_name = claims.video.room.unwrap(); let mut server_rooms = self.rooms.lock(); @@ -150,7 +150,7 @@ impl TestServer { // todo(linux): Remove this once the cross-platform LiveKit implementation is merged #[cfg(any(test, feature = "test-support"))] self.executor.simulate_random_delay().await; - let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let claims = livekit_server::token::validate(&token, &self.secret_key)?; let identity = claims.sub.unwrap().to_string(); let room_name = claims.video.room.unwrap(); let mut server_rooms = self.rooms.lock(); @@ -224,7 +224,7 @@ impl TestServer { // todo(linux): Remove this once the cross-platform LiveKit implementation is merged #[cfg(any(test, feature = "test-support"))] self.executor.simulate_random_delay().await; - let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let claims = livekit_server::token::validate(&token, &self.secret_key)?; let identity = claims.sub.unwrap().to_string(); let room_name = claims.video.room.unwrap(); @@ -280,7 +280,7 @@ impl TestServer { #[cfg(any(test, feature = "test-support"))] self.executor.simulate_random_delay().await; - let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let claims = livekit_server::token::validate(&token, &self.secret_key)?; let identity = claims.sub.unwrap().to_string(); let room_name = claims.video.room.unwrap(); @@ -332,7 +332,7 @@ impl TestServer { } fn set_track_muted(&self, token: &str, track_sid: &str, muted: bool) -> Result<()> { - let claims = live_kit_server::token::validate(token, &self.secret_key)?; + let claims = livekit_server::token::validate(token, &self.secret_key)?; let room_name = claims.video.room.unwrap(); let identity = claims.sub.unwrap(); let mut server_rooms = self.rooms.lock(); @@ -363,7 +363,7 @@ impl TestServer { } fn is_track_muted(&self, token: &str, track_sid: &str) -> Option { - let claims = live_kit_server::token::validate(token, &self.secret_key).ok()?; + let claims = livekit_server::token::validate(token, &self.secret_key).ok()?; let room_name = claims.video.room.unwrap(); let mut server_rooms = self.rooms.lock(); @@ -378,7 +378,7 @@ impl TestServer { } fn video_tracks(&self, token: String) -> Result>> { - let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let claims = livekit_server::token::validate(&token, &self.secret_key)?; let room_name = claims.video.room.unwrap(); let identity = claims.sub.unwrap(); @@ -401,7 +401,7 @@ impl TestServer { } fn audio_tracks(&self, token: String) -> Result>> { - let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let claims = livekit_server::token::validate(&token, &self.secret_key)?; let room_name = claims.video.room.unwrap(); let identity = claims.sub.unwrap(); @@ -455,7 +455,7 @@ pub struct TestApiClient { } #[async_trait] -impl live_kit_server::api::Client for TestApiClient { +impl livekit_server::api::Client for TestApiClient { fn url(&self) -> &str { &self.url } @@ -482,7 +482,7 @@ impl live_kit_server::api::Client for TestApiClient { &self, room: String, identity: String, - permission: live_kit_server::proto::ParticipantPermission, + permission: livekit_server::proto::ParticipantPermission, ) -> Result<()> { let server = TestServer::get(&self.url)?; server diff --git a/crates/live_kit_server/Cargo.toml b/crates/livekit_server/Cargo.toml similarity index 90% rename from crates/live_kit_server/Cargo.toml rename to crates/livekit_server/Cargo.toml index 4b4b5e13da..c76cb1580c 100644 --- a/crates/live_kit_server/Cargo.toml +++ b/crates/livekit_server/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "live_kit_server" +name = "livekit_server" version = "0.1.0" edition = "2021" description = "SDK for the LiveKit server API" @@ -10,7 +10,7 @@ license = "AGPL-3.0-or-later" workspace = true [lib] -path = "src/live_kit_server.rs" +path = "src/livekit_server.rs" doctest = false [dependencies] diff --git a/crates/live_kit_server/LICENSE-AGPL b/crates/livekit_server/LICENSE-AGPL similarity index 100% rename from crates/live_kit_server/LICENSE-AGPL rename to crates/livekit_server/LICENSE-AGPL diff --git a/crates/live_kit_server/build.rs b/crates/livekit_server/build.rs similarity index 100% rename from crates/live_kit_server/build.rs rename to crates/livekit_server/build.rs diff --git a/crates/live_kit_server/src/api.rs b/crates/livekit_server/src/api.rs similarity index 100% rename from crates/live_kit_server/src/api.rs rename to crates/livekit_server/src/api.rs diff --git a/crates/live_kit_server/src/live_kit_server.rs b/crates/livekit_server/src/livekit_server.rs similarity index 100% rename from crates/live_kit_server/src/live_kit_server.rs rename to crates/livekit_server/src/livekit_server.rs diff --git a/crates/live_kit_server/src/proto.rs b/crates/livekit_server/src/proto.rs similarity index 100% rename from crates/live_kit_server/src/proto.rs rename to crates/livekit_server/src/proto.rs diff --git a/crates/live_kit_server/src/token.rs b/crates/livekit_server/src/token.rs similarity index 100% rename from crates/live_kit_server/src/token.rs rename to crates/livekit_server/src/token.rs diff --git a/crates/live_kit_server/vendored/protocol/README.md b/crates/livekit_server/vendored/protocol/README.md similarity index 100% rename from crates/live_kit_server/vendored/protocol/README.md rename to crates/livekit_server/vendored/protocol/README.md diff --git a/crates/live_kit_server/vendored/protocol/livekit_analytics.proto b/crates/livekit_server/vendored/protocol/livekit_analytics.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_analytics.proto rename to crates/livekit_server/vendored/protocol/livekit_analytics.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_egress.proto b/crates/livekit_server/vendored/protocol/livekit_egress.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_egress.proto rename to crates/livekit_server/vendored/protocol/livekit_egress.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_ingress.proto b/crates/livekit_server/vendored/protocol/livekit_ingress.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_ingress.proto rename to crates/livekit_server/vendored/protocol/livekit_ingress.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_internal.proto b/crates/livekit_server/vendored/protocol/livekit_internal.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_internal.proto rename to crates/livekit_server/vendored/protocol/livekit_internal.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_models.proto b/crates/livekit_server/vendored/protocol/livekit_models.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_models.proto rename to crates/livekit_server/vendored/protocol/livekit_models.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_room.proto b/crates/livekit_server/vendored/protocol/livekit_room.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_room.proto rename to crates/livekit_server/vendored/protocol/livekit_room.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_rpc_internal.proto b/crates/livekit_server/vendored/protocol/livekit_rpc_internal.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_rpc_internal.proto rename to crates/livekit_server/vendored/protocol/livekit_rpc_internal.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_rtc.proto b/crates/livekit_server/vendored/protocol/livekit_rtc.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_rtc.proto rename to crates/livekit_server/vendored/protocol/livekit_rtc.proto diff --git a/crates/live_kit_server/vendored/protocol/livekit_webhook.proto b/crates/livekit_server/vendored/protocol/livekit_webhook.proto similarity index 100% rename from crates/live_kit_server/vendored/protocol/livekit_webhook.proto rename to crates/livekit_server/vendored/protocol/livekit_webhook.proto diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 3460bf34dd..f06173ac1b 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -32,9 +32,6 @@ smol.workspace = true util.workspace = true release_channel.workspace = true -[target.'cfg(windows)'.dependencies] -windows.workspace = true - [dev-dependencies] async-pipe.workspace = true ctor.workspace = true diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 5f0186e61e..63a900e49b 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -19,12 +19,9 @@ use serde_json::{json, value::RawValue, Value}; use smol::{ channel, io::{AsyncBufReadExt, AsyncWriteExt, BufReader}, - process::{self, Child}, + process::Child, }; -#[cfg(target_os = "windows")] -use smol::process::windows::CommandExt; - use std::{ ffi::{OsStr, OsString}, fmt, @@ -85,6 +82,7 @@ pub struct LanguageServer { outbound_tx: channel::Sender, name: LanguageServerName, process_name: Arc, + binary: LanguageServerBinary, capabilities: RwLock, code_action_kinds: Option>, notification_handlers: Arc>>, @@ -287,6 +285,7 @@ impl LspRequestFuture for LspRequest { } /// Combined capabilities of the server and the adapter. +#[derive(Debug)] pub struct AdapterServerCapabilities { // Reported capabilities by the server pub server_capabilities: ServerCapabilities, @@ -346,28 +345,26 @@ impl LanguageServer { &binary.arguments ); - let mut command = process::Command::new(&binary.path); - command + let mut server = util::command::new_smol_command(&binary.path) .current_dir(working_dir) .args(&binary.arguments) - .envs(binary.env.unwrap_or_default()) + .envs(binary.env.clone().unwrap_or_default()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .kill_on_drop(true); - #[cfg(windows)] - command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - let mut server = command.spawn().with_context(|| { - format!( - "failed to spawn command. path: {:?}, working directory: {:?}, args: {:?}", - binary.path, working_dir, &binary.arguments - ) - })?; + .kill_on_drop(true) + .spawn() + .with_context(|| { + format!( + "failed to spawn command. path: {:?}, working directory: {:?}, args: {:?}", + binary.path, working_dir, &binary.arguments + ) + })?; let stdin = server.stdin.take().unwrap(); let stdout = server.stdout.take().unwrap(); let stderr = server.stderr.take().unwrap(); - let mut server = Self::new_internal( + let server = Self::new_internal( server_id, server_name, stdin, @@ -378,6 +375,7 @@ impl LanguageServer { root_path, working_dir, code_action_kinds, + binary, cx, move |notification| { log::info!( @@ -389,10 +387,6 @@ impl LanguageServer { }, ); - if let Some(name) = binary.path.file_name() { - server.process_name = name.to_string_lossy().into(); - } - Ok(server) } @@ -408,6 +402,7 @@ impl LanguageServer { root_path: &Path, working_dir: &Path, code_action_kinds: Option>, + binary: LanguageServerBinary, cx: AsyncAppContext, on_unhandled_notification: F, ) -> Self @@ -448,7 +443,7 @@ impl LanguageServer { let stderr_captures = stderr_capture.clone(); cx.spawn(|_| Self::handle_stderr(stderr, io_handlers, stderr_captures).log_err()) }) - .unwrap_or_else(|| Task::Ready(Some(None))); + .unwrap_or_else(|| Task::ready(None)); let input_task = cx.spawn(|_| async move { let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task); stdout.or(stderr) @@ -470,7 +465,12 @@ impl LanguageServer { response_handlers, io_handlers, name: server_name, - process_name: Arc::default(), + process_name: binary + .path + .file_name() + .map(|name| Arc::from(name.to_string_lossy())) + .unwrap_or_default(), + binary, capabilities: Default::default(), code_action_kinds, next_id: Default::default(), @@ -604,23 +604,19 @@ impl LanguageServer { Ok(()) } - /// Initializes a language server by sending the `Initialize` request. - /// Note that `options` is used directly to construct [`InitializeParams`], which is why it is owned. - /// - /// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize) - pub fn initialize( - mut self, - options: Option, - cx: &AppContext, - ) -> Task>> { + pub fn default_initialize_params(&self, cx: &AppContext) -> InitializeParams { let root_uri = Url::from_file_path(&self.working_dir).unwrap(); #[allow(deprecated)] - let params = InitializeParams { + InitializeParams { process_id: None, root_path: None, root_uri: Some(root_uri.clone()), - initialization_options: options, + initialization_options: None, capabilities: ClientCapabilities { + general: Some(GeneralClientCapabilities { + position_encodings: Some(vec![PositionEncodingKind::UTF16]), + ..Default::default() + }), workspace: Some(WorkspaceClientCapabilities { configuration: Some(true), did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { @@ -651,6 +647,13 @@ impl LanguageServer { snippet_edit_support: Some(true), ..WorkspaceEditClientCapabilities::default() }), + file_operations: Some(WorkspaceFileOperationsClientCapabilities { + dynamic_registration: Some(false), + did_rename: Some(true), + will_rename: Some(true), + ..Default::default() + }), + apply_edit: Some(true), ..Default::default() }), text_document: Some(TextDocumentClientCapabilities { @@ -702,6 +705,7 @@ impl LanguageServer { "commitCharacters".to_owned(), "editRange".to_owned(), "insertTextMode".to_owned(), + "insertTextFormat".to_owned(), "data".to_owned(), ]), }), @@ -766,9 +770,11 @@ impl LanguageServer { })), window: Some(WindowClientCapabilities { work_done_progress: Some(true), + show_message: Some(ShowMessageRequestClientCapabilities { + message_action_item: None, + }), ..Default::default() }), - general: None, }, trace: None, workspace_folders: Some(vec![WorkspaceFolder { @@ -782,7 +788,24 @@ impl LanguageServer { } }), locale: None, + ..Default::default() + } + } + + /// Initializes a language server by sending the `Initialize` request. + /// Note that `options` is used directly to construct [`InitializeParams`], which is why it is owned. + /// + /// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize) + pub fn initialize( + mut self, + initialize_params: Option, + cx: &AppContext, + ) -> Task>> { + let params = if let Some(params) = initialize_params { + params + } else { + self.default_initialize_params(cx) }; cx.spawn(|_| async move { @@ -1036,6 +1059,11 @@ impl LanguageServer { &self.root_path } + /// Language server's binary information. + pub fn binary(&self) -> &LanguageServerBinary { + &self.binary + } + /// Sends a RPC request to the language server. /// /// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage) @@ -1259,12 +1287,13 @@ impl FakeLanguageServer { root, root, None, + binary.clone(), cx.clone(), |_| {}, ); server.process_name = process_name; let fake = FakeLanguageServer { - binary, + binary: binary.clone(), server: Arc::new({ let mut server = LanguageServer::new_internal( server_id, @@ -1277,7 +1306,8 @@ impl FakeLanguageServer { root, root, None, - cx, + binary, + cx.clone(), move |msg| { notifications_tx .try_send(( diff --git a/crates/markdown/examples/markdown.rs b/crates/markdown/examples/markdown.rs index 0514ebcf4e..643a54b54e 100644 --- a/crates/markdown/examples/markdown.rs +++ b/crates/markdown/examples/markdown.rs @@ -51,10 +51,16 @@ Links are created using the format [http://zed.dev](https://zed.dev). They can also be detected automatically, for example https://zed.dev/blog. +They may contain dollar signs: + +[https://svelte.dev/docs/svelte/$state](https://svelte.dev/docs/svelte/$state) + +https://svelte.dev/docs/svelte/$state + ## Images Images are like links, but with an exclamation mark `!` in front. -```todo! +```markdown ![This is an image](/images/logo.png) ``` @@ -178,7 +184,7 @@ impl MarkdownExample { cx: &mut WindowContext, ) -> Self { let markdown = - cx.new_view(|cx| Markdown::new(text, style, Some(language_registry), cx, None)); + cx.new_view(|cx| Markdown::new(text, style, Some(language_registry), None, cx)); Self { markdown } } } diff --git a/crates/markdown/examples/markdown_as_child.rs b/crates/markdown/examples/markdown_as_child.rs index 3700e64364..1fc4541645 100644 --- a/crates/markdown/examples/markdown_as_child.rs +++ b/crates/markdown/examples/markdown_as_child.rs @@ -1,5 +1,5 @@ use assets::Assets; -use gpui::*; +use gpui::{rgb, App, KeyBinding, Length, StyleRefinement, View, WindowOptions}; use language::{language_settings::AllLanguageSettings, LanguageRegistry}; use markdown::{Markdown, MarkdownStyle}; use node_runtime::NodeRuntime; @@ -87,7 +87,7 @@ pub fn main() { heading: Default::default(), }; let markdown = cx.new_view(|cx| { - Markdown::new(MARKDOWN_EXAMPLE.into(), markdown_style, None, cx, None) + Markdown::new(MARKDOWN_EXAMPLE.into(), markdown_style, None, None, cx) }); HelloWorld { markdown } diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index ff67c01a0e..0b80c77c9c 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -6,15 +6,15 @@ use gpui::{ actions, point, quad, AnyElement, AppContext, Bounds, ClipboardItem, CursorStyle, DispatchPhase, Edges, FocusHandle, FocusableView, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, KeyContext, Length, MouseDownEvent, MouseEvent, MouseMoveEvent, MouseUpEvent, - Point, Render, StrikethroughStyle, StyleRefinement, StyledText, Task, TextLayout, TextRun, - TextStyle, TextStyleRefinement, View, + Point, Render, Stateful, StrikethroughStyle, StyleRefinement, StyledText, Task, TextLayout, + TextRun, TextStyle, TextStyleRefinement, View, }; use language::{Language, LanguageRegistry, Rope}; use parser::{parse_links_only, parse_markdown, MarkdownEvent, MarkdownTag, MarkdownTagEnd}; use std::{iter, mem, ops::Range, rc::Rc, sync::Arc}; use theme::SyntaxTheme; -use ui::prelude::*; +use ui::{prelude::*, Tooltip}; use util::{ResultExt, TryFutureExt}; #[derive(Clone)] @@ -71,8 +71,8 @@ impl Markdown { source: String, style: MarkdownStyle, language_registry: Option>, - cx: &ViewContext, fallback_code_block_language: Option, + cx: &ViewContext, ) -> Self { let focus_handle = cx.focus_handle(); let mut this = Self { @@ -97,8 +97,8 @@ impl Markdown { source: String, style: MarkdownStyle, language_registry: Option>, - cx: &ViewContext, fallback_code_block_language: Option, + cx: &ViewContext, ) -> Self { let focus_handle = cx.focus_handle(); let mut this = Self { @@ -614,11 +614,11 @@ impl Element for MarkdownElement { }; builder.push_div( div() + .mb_1() .h_flex() - .mb_2() - .line_height(rems(1.3)) .items_start() .gap_1() + .line_height(rems(1.3)) .child(bullet), range, markdown_end, @@ -667,6 +667,31 @@ impl Element for MarkdownElement { } MarkdownTagEnd::CodeBlock => { builder.trim_trailing_newline(); + builder.flush_text(); + builder.modify_current_div(|el| { + let id = + ElementId::NamedInteger("copy-markdown-code".into(), range.end); + let copy_button = div().absolute().top_1().right_1().w_5().child( + IconButton::new(id, IconName::Copy) + .icon_color(Color::Muted) + .shape(ui::IconButtonShape::Square) + .tooltip(|cx| Tooltip::text("Copy Code Block", cx)) + .on_click({ + let code = without_fences( + parsed_markdown.source()[range.clone()].trim(), + ) + .to_string(); + + move |_, cx| { + cx.write_to_clipboard(ClipboardItem::new_string( + code.clone(), + )) + } + }), + ); + + el.child(copy_button) + }); builder.pop_div(); builder.pop_code_block(); if self.style.code_block.text.is_some() { @@ -785,8 +810,52 @@ impl IntoElement for MarkdownElement { } } +enum AnyDiv { + Div(Div), + Stateful(Stateful

), +} + +impl AnyDiv { + fn into_any_element(self) -> AnyElement { + match self { + Self::Div(div) => div.into_any_element(), + Self::Stateful(div) => div.into_any_element(), + } + } +} + +impl From
for AnyDiv { + fn from(value: Div) -> Self { + Self::Div(value) + } +} + +impl From> for AnyDiv { + fn from(value: Stateful
) -> Self { + Self::Stateful(value) + } +} + +impl Styled for AnyDiv { + fn style(&mut self) -> &mut StyleRefinement { + match self { + Self::Div(div) => div.style(), + Self::Stateful(div) => div.style(), + } + } +} + +impl ParentElement for AnyDiv { + fn extend(&mut self, elements: impl IntoIterator) { + match self { + Self::Div(div) => div.extend(elements), + Self::Stateful(div) => div.extend(elements), + } + } +} + struct MarkdownElementBuilder { - div_stack: Vec
, + div_stack: Vec, rendered_lines: Vec, pending_line: PendingLine, rendered_links: Vec, @@ -812,7 +881,7 @@ struct ListStackEntry { impl MarkdownElementBuilder { fn new(base_text_style: TextStyle, syntax_theme: Arc) -> Self { Self { - div_stack: vec![div().debug_selector(|| "inner".into())], + div_stack: vec![div().debug_selector(|| "inner".into()).into()], rendered_lines: Vec::new(), pending_line: PendingLine::default(), rendered_links: Vec::new(), @@ -841,11 +910,12 @@ impl MarkdownElementBuilder { self.text_style_stack.pop(); } - fn push_div(&mut self, mut div: Div, range: &Range, markdown_end: usize) { + fn push_div(&mut self, div: impl Into, range: &Range, markdown_end: usize) { + let mut div = div.into(); self.flush_text(); if range.start == 0 { - //first element, remove top margin + // Remove the top margin on the first element. div.style().refine(&StyleRefinement { margin: gpui::EdgesRefinement { top: Some(Length::Definite(px(0.).into())), @@ -856,6 +926,7 @@ impl MarkdownElementBuilder { ..Default::default() }); } + if range.end == markdown_end { div.style().refine(&StyleRefinement { margin: gpui::EdgesRefinement { @@ -867,12 +938,20 @@ impl MarkdownElementBuilder { ..Default::default() }); } + self.div_stack.push(div); } + fn modify_current_div(&mut self, f: impl FnOnce(AnyDiv) -> AnyDiv) { + self.flush_text(); + if let Some(div) = self.div_stack.pop() { + self.div_stack.push(f(div)); + } + } + fn pop_div(&mut self) { self.flush_text(); - let div = self.div_stack.pop().unwrap().into_any(); + let div = self.div_stack.pop().unwrap().into_any_element(); self.div_stack.last_mut().unwrap().extend(iter::once(div)); } @@ -954,7 +1033,7 @@ impl MarkdownElementBuilder { } } - fn flush_text(&mut self) { + pub fn flush_text(&mut self) { let line = mem::take(&mut self.pending_line); if line.text.is_empty() { return; @@ -973,7 +1052,7 @@ impl MarkdownElementBuilder { debug_assert_eq!(self.div_stack.len(), 1); self.flush_text(); RenderedMarkdown { - element: self.div_stack.pop().unwrap().into_any(), + element: self.div_stack.pop().unwrap().into_any_element(), text: RenderedText { lines: self.rendered_lines.into(), links: self.rendered_links.into(), @@ -1173,3 +1252,43 @@ impl RenderedText { .find(|link| link.source_range.contains(&source_index)) } } + +/// Some markdown blocks are indented, and others have e.g. ```rust … ``` around them. +/// If this block is fenced with backticks, strip them off (and the language name). +/// We use this when copying code blocks to the clipboard. +fn without_fences(mut markdown: &str) -> &str { + if let Some(opening_backticks) = markdown.find("```") { + markdown = &markdown[opening_backticks..]; + + // Trim off the next newline. This also trims off a language name if it's there. + if let Some(newline) = markdown.find('\n') { + markdown = &markdown[newline + 1..]; + } + }; + + if let Some(closing_backticks) = markdown.rfind("```") { + markdown = &markdown[..closing_backticks]; + }; + + markdown +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_without_fences() { + let input = "```rust\nlet x = 5;\n```"; + assert_eq!(without_fences(input), "let x = 5;\n"); + + let input = " ```\nno language\n``` "; + assert_eq!(without_fences(input), "no language\n"); + + let input = "plain text"; + assert_eq!(without_fences(input), "plain text"); + + let input = "```python\nprint('hello')\nprint('world')\n```"; + assert_eq!(without_fences(input), "print('hello')\nprint('world')\n"); + } +} diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index d21892b7de..b605a35af5 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -8,6 +8,7 @@ pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { let mut options = Options::all(); options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); options.remove(pulldown_cmark::Options::ENABLE_YAML_STYLE_METADATA_BLOCKS); + options.remove(pulldown_cmark::Options::ENABLE_MATH); let mut events = Vec::new(); let mut within_link = false; diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 46a33966f2..f1409c23a4 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -28,6 +28,7 @@ pulldown-cmark.workspace = true settings.workspace = true theme.workspace = true ui.workspace = true +util.workspace = true workspace.workspace = true [dev-dependencies] diff --git a/crates/markdown_preview/src/markdown_elements.rs b/crates/markdown_preview/src/markdown_elements.rs index 8423e4ec82..256ce6ee4a 100644 --- a/crates/markdown_preview/src/markdown_elements.rs +++ b/crates/markdown_preview/src/markdown_elements.rs @@ -13,21 +13,24 @@ pub enum ParsedMarkdownElement { BlockQuote(ParsedMarkdownBlockQuote), CodeBlock(ParsedMarkdownCodeBlock), /// A paragraph of text and other inline elements. - Paragraph(ParsedMarkdownText), + Paragraph(MarkdownParagraph), HorizontalRule(Range), } impl ParsedMarkdownElement { - pub fn source_range(&self) -> Range { - match self { + pub fn source_range(&self) -> Option> { + Some(match self { Self::Heading(heading) => heading.source_range.clone(), Self::ListItem(list_item) => list_item.source_range.clone(), Self::Table(table) => table.source_range.clone(), Self::BlockQuote(block_quote) => block_quote.source_range.clone(), Self::CodeBlock(code_block) => code_block.source_range.clone(), - Self::Paragraph(text) => text.source_range.clone(), + Self::Paragraph(text) => match text.get(0)? { + MarkdownParagraphChunk::Text(t) => t.source_range.clone(), + MarkdownParagraphChunk::Image(image) => image.source_range.clone(), + }, Self::HorizontalRule(range) => range.clone(), - } + }) } pub fn is_list_item(&self) -> bool { @@ -35,6 +38,15 @@ impl ParsedMarkdownElement { } } +pub type MarkdownParagraph = Vec; + +#[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub enum MarkdownParagraphChunk { + Text(ParsedMarkdownText), + Image(Image), +} + #[derive(Debug)] #[cfg_attr(test, derive(PartialEq))] pub struct ParsedMarkdown { @@ -73,7 +85,7 @@ pub struct ParsedMarkdownCodeBlock { pub struct ParsedMarkdownHeading { pub source_range: Range, pub level: HeadingLevel, - pub contents: ParsedMarkdownText, + pub contents: MarkdownParagraph, } #[derive(Debug, PartialEq)] @@ -107,7 +119,7 @@ pub enum ParsedMarkdownTableAlignment { #[derive(Debug)] #[cfg_attr(test, derive(PartialEq))] pub struct ParsedMarkdownTableRow { - pub children: Vec, + pub children: Vec, } impl Default for ParsedMarkdownTableRow { @@ -123,7 +135,7 @@ impl ParsedMarkdownTableRow { } } - pub fn with_children(children: Vec) -> Self { + pub fn with_children(children: Vec) -> Self { Self { children } } } @@ -135,7 +147,7 @@ pub struct ParsedMarkdownBlockQuote { pub children: Vec, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct ParsedMarkdownText { /// Where the text is located in the source Markdown document. pub source_range: Range, @@ -266,10 +278,35 @@ impl Display for Link { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Link::Web { url } => write!(f, "{}", url), - Link::Path { - display_path, - path: _, - } => write!(f, "{}", display_path.display()), + Link::Path { display_path, .. } => write!(f, "{}", display_path.display()), } } } + +/// A Markdown Image +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub struct Image { + pub link: Link, + pub source_range: Range, + pub alt_text: Option, +} + +impl Image { + pub fn identify( + text: String, + source_range: Range, + file_location_directory: Option, + ) -> Option { + let link = Link::identify(file_location_directory, text)?; + Some(Self { + source_range, + link, + alt_text: None, + }) + } + + pub fn set_alt_text(&mut self, alt_text: SharedString) { + self.alt_text = Some(alt_text); + } +} diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index d514b89e52..f433edf8b3 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -4,7 +4,7 @@ use collections::FxHashMap; use gpui::FontWeight; use language::LanguageRegistry; use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd}; -use std::{ops::Range, path::PathBuf, sync::Arc}; +use std::{ops::Range, path::PathBuf, sync::Arc, vec}; pub async fn parse_markdown( markdown_input: &str, @@ -101,11 +101,11 @@ impl<'a> MarkdownParser<'a> { | Event::Code(_) | Event::Html(_) | Event::FootnoteReference(_) - | Event::Start(Tag::Link { link_type: _, dest_url: _, title: _, id: _ }) + | Event::Start(Tag::Link { .. }) | Event::Start(Tag::Emphasis) | Event::Start(Tag::Strong) | Event::Start(Tag::Strikethrough) - | Event::Start(Tag::Image { link_type: _, dest_url: _, title: _, id: _ }) => { + | Event::Start(Tag::Image { .. }) => { true } _ => false, @@ -134,12 +134,7 @@ impl<'a> MarkdownParser<'a> { let text = self.parse_text(false, Some(source_range)); Some(vec![ParsedMarkdownElement::Paragraph(text)]) } - Tag::Heading { - level, - id: _, - classes: _, - attrs: _, - } => { + Tag::Heading { level, .. } => { let level = *level; self.cursor += 1; let heading = self.parse_heading(level); @@ -194,22 +189,23 @@ impl<'a> MarkdownParser<'a> { &mut self, should_complete_on_soft_break: bool, source_range: Option>, - ) -> ParsedMarkdownText { + ) -> MarkdownParagraph { let source_range = source_range.unwrap_or_else(|| { self.current() .map(|(_, range)| range.clone()) .unwrap_or_default() }); + let mut markdown_text_like = Vec::new(); let mut text = String::new(); let mut bold_depth = 0; let mut italic_depth = 0; let mut strikethrough_depth = 0; let mut link: Option = None; + let mut image: Option = None; let mut region_ranges: Vec> = vec![]; let mut regions: Vec = vec![]; let mut highlights: Vec<(Range, MarkdownHighlight)> = vec![]; - let mut link_urls: Vec = vec![]; let mut link_ranges: Vec> = vec![]; @@ -218,15 +214,13 @@ impl<'a> MarkdownParser<'a> { break; } - let (current, _source_range) = self.current().unwrap(); + let (current, _) = self.current().unwrap(); let prev_len = text.len(); match current { Event::SoftBreak => { if should_complete_on_soft_break { break; } - - // `Some text\nSome more text` should be treated as a single line. text.push(' '); } @@ -240,7 +234,6 @@ impl<'a> MarkdownParser<'a> { Event::Text(t) => { text.push_str(t.as_ref()); - let mut style = MarkdownHighlightStyle::default(); if bold_depth > 0 { @@ -299,7 +292,6 @@ impl<'a> MarkdownParser<'a> { url: link.as_str().to_string(), }), }); - last_link_len = end; } last_link_len @@ -316,13 +308,36 @@ impl<'a> MarkdownParser<'a> { } } if new_highlight { - highlights - .push((last_run_len..text.len(), MarkdownHighlight::Style(style))); + highlights.push(( + last_run_len..text.len(), + MarkdownHighlight::Style(style.clone()), + )); } } - } + if let Some(image) = image.as_mut() { + text.truncate(text.len() - t.len()); + image.set_alt_text(t.to_string().into()); + if !text.is_empty() { + let parsed_regions = MarkdownParagraphChunk::Text(ParsedMarkdownText { + source_range: source_range.clone(), + contents: text.clone(), + highlights: highlights.clone(), + region_ranges: region_ranges.clone(), + regions: regions.clone(), + }); + text = String::new(); + highlights = vec![]; + region_ranges = vec![]; + regions = vec![]; + markdown_text_like.push(parsed_regions); + } - // Note: This event means "inline code" and not "code block" + let parsed_image = MarkdownParagraphChunk::Image(image.clone()); + markdown_text_like.push(parsed_image); + style = MarkdownHighlightStyle::default(); + style.underline = true; + } + } Event::Code(t) => { text.push_str(t.as_ref()); region_ranges.push(prev_len..text.len()); @@ -336,46 +351,43 @@ impl<'a> MarkdownParser<'a> { }), )); } - regions.push(ParsedRegion { code: true, link: link.clone(), }); } - Event::Start(tag) => match tag { Tag::Emphasis => italic_depth += 1, Tag::Strong => bold_depth += 1, Tag::Strikethrough => strikethrough_depth += 1, - Tag::Link { - link_type: _, - dest_url, - title: _, - id: _, - } => { + Tag::Link { dest_url, .. } => { link = Link::identify( self.file_location_directory.clone(), dest_url.to_string(), ); } + Tag::Image { dest_url, .. } => { + image = Image::identify( + dest_url.to_string(), + source_range.clone(), + self.file_location_directory.clone(), + ); + } _ => { break; } }, Event::End(tag) => match tag { - TagEnd::Emphasis => { - italic_depth -= 1; - } - TagEnd::Strong => { - bold_depth -= 1; - } - TagEnd::Strikethrough => { - strikethrough_depth -= 1; - } + TagEnd::Emphasis => italic_depth -= 1, + TagEnd::Strong => bold_depth -= 1, + TagEnd::Strikethrough => strikethrough_depth -= 1, TagEnd::Link => { link = None; } + TagEnd::Image => { + image = None; + } TagEnd::Paragraph => { self.cursor += 1; break; @@ -384,7 +396,6 @@ impl<'a> MarkdownParser<'a> { break; } }, - _ => { break; } @@ -392,14 +403,16 @@ impl<'a> MarkdownParser<'a> { self.cursor += 1; } - - ParsedMarkdownText { - source_range, - contents: text, - highlights, - regions, - region_ranges, + if !text.is_empty() { + markdown_text_like.push(MarkdownParagraphChunk::Text(ParsedMarkdownText { + source_range: source_range.clone(), + contents: text, + highlights, + regions, + region_ranges, + })); } + markdown_text_like } fn parse_heading(&mut self, level: pulldown_cmark::HeadingLevel) -> ParsedMarkdownHeading { @@ -708,7 +721,6 @@ impl<'a> MarkdownParser<'a> { } } } - let highlights = if let Some(language) = &language { if let Some(registry) = &self.language_registry { let rope: language::Rope = code.as_str().into(); @@ -735,10 +747,14 @@ impl<'a> MarkdownParser<'a> { #[cfg(test)] mod tests { + use core::panic; + use super::*; use gpui::BackgroundExecutor; - use language::{tree_sitter_rust, HighlightId, Language, LanguageConfig, LanguageMatcher}; + use language::{ + tree_sitter_rust, HighlightId, Language, LanguageConfig, LanguageMatcher, LanguageRegistry, + }; use pretty_assertions::assert_eq; use ParsedMarkdownListItemType::*; @@ -810,20 +826,29 @@ mod tests { assert_eq!(parsed.children.len(), 1); assert_eq!( parsed.children[0], - ParsedMarkdownElement::Paragraph(ParsedMarkdownText { - source_range: 0..35, - contents: "Some bostrikethroughld text".to_string(), - highlights: Vec::new(), - region_ranges: Vec::new(), - regions: Vec::new(), - }) + ParsedMarkdownElement::Paragraph(vec![MarkdownParagraphChunk::Text( + ParsedMarkdownText { + source_range: 0..35, + contents: "Some bostrikethroughld text".to_string(), + highlights: Vec::new(), + region_ranges: Vec::new(), + regions: Vec::new(), + } + )]) ); - let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { + let new_text = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { text } else { panic!("Expected a paragraph"); }; + + let paragraph = if let MarkdownParagraphChunk::Text(text) = &new_text[0] { + text + } else { + panic!("Expected a text"); + }; + assert_eq!( paragraph.highlights, vec![ @@ -871,6 +896,23 @@ mod tests { parsed.children, vec![p("Checkout this https://zed.dev link", 0..34)] ); + } + + #[gpui::test] + async fn test_empty_image() { + let parsed = parse("![]()").await; + + let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { + text + } else { + panic!("Expected a paragraph"); + }; + assert_eq!(paragraph.len(), 0); + } + + #[gpui::test] + async fn test_image_links_detection() { + let parsed = parse("![test](https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png)").await; let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { text @@ -878,25 +920,15 @@ mod tests { panic!("Expected a paragraph"); }; assert_eq!( - paragraph.highlights, - vec![( - 14..29, - MarkdownHighlight::Style(MarkdownHighlightStyle { - underline: true, - ..Default::default() - }), - )] + paragraph[0], + MarkdownParagraphChunk::Image(Image { + source_range: 0..111, + link: Link::Web { + url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(), + }, + alt_text: Some("test".into()), + },) ); - assert_eq!( - paragraph.regions, - vec![ParsedRegion { - code: false, - link: Some(Link::Web { - url: "https://zed.dev".to_string() - }), - }] - ); - assert_eq!(paragraph.region_ranges, vec![14..29]); } #[gpui::test] @@ -1169,7 +1201,7 @@ Some other content vec![ list_item(0..8, 1, Unordered, vec![p("code", 2..8)]), list_item(9..19, 1, Unordered, vec![p("bold", 11..19)]), - list_item(20..49, 1, Unordered, vec![p("link", 22..49)],) + list_item(20..49, 1, Unordered, vec![p("link", 22..49)],), ], ); } @@ -1312,7 +1344,7 @@ fn main() { )) } - fn h1(contents: ParsedMarkdownText, source_range: Range) -> ParsedMarkdownElement { + fn h1(contents: MarkdownParagraph, source_range: Range) -> ParsedMarkdownElement { ParsedMarkdownElement::Heading(ParsedMarkdownHeading { source_range, level: HeadingLevel::H1, @@ -1320,7 +1352,7 @@ fn main() { }) } - fn h2(contents: ParsedMarkdownText, source_range: Range) -> ParsedMarkdownElement { + fn h2(contents: MarkdownParagraph, source_range: Range) -> ParsedMarkdownElement { ParsedMarkdownElement::Heading(ParsedMarkdownHeading { source_range, level: HeadingLevel::H2, @@ -1328,7 +1360,7 @@ fn main() { }) } - fn h3(contents: ParsedMarkdownText, source_range: Range) -> ParsedMarkdownElement { + fn h3(contents: MarkdownParagraph, source_range: Range) -> ParsedMarkdownElement { ParsedMarkdownElement::Heading(ParsedMarkdownHeading { source_range, level: HeadingLevel::H3, @@ -1340,14 +1372,14 @@ fn main() { ParsedMarkdownElement::Paragraph(text(contents, source_range)) } - fn text(contents: &str, source_range: Range) -> ParsedMarkdownText { - ParsedMarkdownText { + fn text(contents: &str, source_range: Range) -> MarkdownParagraph { + vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { highlights: Vec::new(), region_ranges: Vec::new(), regions: Vec::new(), source_range, contents: contents.to_string(), - } + })] } fn block_quote( @@ -1401,7 +1433,7 @@ fn main() { } } - fn row(children: Vec) -> ParsedMarkdownTableRow { + fn row(children: Vec) -> ParsedMarkdownTableRow { ParsedMarkdownTableRow { children } } diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 07fbd94b29..8d9c7e4145 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -192,11 +192,16 @@ impl MarkdownPreviewView { .group("markdown-block") .on_click(cx.listener(move |this, event: &ClickEvent, cx| { if event.down.click_count == 2 { - if let Some(block) = - this.contents.as_ref().and_then(|c| c.children.get(ix)) + if let Some(source_range) = this + .contents + .as_ref() + .and_then(|c| c.children.get(ix)) + .and_then(|block| block.source_range()) { - let start = block.source_range().start; - this.move_cursor_to_block(cx, start..start); + this.move_cursor_to_block( + cx, + source_range.start..source_range.start, + ); } } })) @@ -410,7 +415,9 @@ impl MarkdownPreviewView { let mut last_end = 0; if let Some(content) = &self.contents { for (i, block) in content.children.iter().enumerate() { - let Range { start, end } = block.source_range(); + let Some(Range { start, end }) = block.source_range() else { + continue; + }; // Check if the cursor is between the last block and the current block if last_end <= cursor && cursor < start { diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index f38e1c49b5..b6b19fab91 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -1,29 +1,33 @@ use crate::markdown_elements::{ - HeadingLevel, Link, ParsedMarkdown, ParsedMarkdownBlockQuote, ParsedMarkdownCodeBlock, - ParsedMarkdownElement, ParsedMarkdownHeading, ParsedMarkdownListItem, - ParsedMarkdownListItemType, ParsedMarkdownTable, ParsedMarkdownTableAlignment, - ParsedMarkdownTableRow, ParsedMarkdownText, + HeadingLevel, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown, + ParsedMarkdownBlockQuote, ParsedMarkdownCodeBlock, ParsedMarkdownElement, + ParsedMarkdownHeading, ParsedMarkdownListItem, ParsedMarkdownListItemType, ParsedMarkdownTable, + ParsedMarkdownTableAlignment, ParsedMarkdownTableRow, }; use gpui::{ - div, px, rems, AbsoluteLength, AnyElement, ClipboardItem, DefiniteLength, Div, Element, - ElementId, HighlightStyle, Hsla, InteractiveText, IntoElement, Keystroke, Length, Modifiers, - ParentElement, SharedString, Styled, StyledText, TextStyle, WeakView, WindowContext, + div, img, px, rems, AbsoluteLength, AnyElement, ClipboardItem, DefiniteLength, Div, Element, + ElementId, HighlightStyle, Hsla, ImageSource, InteractiveText, IntoElement, Keystroke, Length, + Modifiers, ParentElement, Render, Resource, SharedString, Styled, StyledText, TextStyle, View, + WeakView, WindowContext, }; use settings::Settings; use std::{ ops::{Mul, Range}, sync::Arc, + vec, }; use theme::{ActiveTheme, SyntaxTheme, ThemeSettings}; use ui::{ - h_flex, relative, v_flex, Checkbox, Clickable, FluentBuilder, IconButton, IconName, IconSize, - InteractiveElement, LinkPreview, Selection, StatefulInteractiveElement, StyledExt, Tooltip, - VisibleOnHover, + h_flex, relative, tooltip_container, v_flex, ButtonCommon, Checkbox, Clickable, Color, + FluentBuilder, IconButton, IconName, IconSize, InteractiveElement, Label, LabelCommon, + LabelSize, LinkPreview, StatefulInteractiveElement, StyledExt, StyledImage, ToggleState, + Tooltip, ViewContext, VisibleOnHover, VisualContext as _, }; use workspace::Workspace; type CheckboxClickedCallback = Arc, &mut WindowContext)>>; +#[derive(Clone)] pub struct RenderContext { workspace: Option>, next_id: usize, @@ -153,7 +157,7 @@ fn render_markdown_heading(parsed: &ParsedMarkdownHeading, cx: &mut RenderContex .text_color(color) .pt(rems(0.15)) .pb_1() - .child(render_markdown_text(&parsed.contents, cx)) + .children(render_markdown_text(&parsed.contents, cx)) .whitespace_normal() .into_any() } @@ -176,9 +180,9 @@ fn render_markdown_list_item( Checkbox::new( "checkbox", if *checked { - Selection::Selected + ToggleState::Selected } else { - Selection::Unselected + ToggleState::Unselected }, ) .when_some( @@ -188,8 +192,8 @@ fn render_markdown_list_item( let range = range.clone(); move |selection, cx| { let checked = match selection { - Selection::Selected => true, - Selection::Unselected => false, + ToggleState::Selected => true, + ToggleState::Unselected => false, _ => return, }; @@ -203,15 +207,7 @@ fn render_markdown_list_item( ) .hover(|s| s.cursor_pointer()) .tooltip(|cx| { - let secondary_modifier = Keystroke { - key: "".to_string(), - modifiers: Modifiers::secondary_key(), - ime_key: None, - }; - Tooltip::text( - format!("{}-click to toggle the checkbox", secondary_modifier), - cx, - ) + InteractiveMarkdownElementTooltip::new(None, "toggle checkbox", cx).into() }) .into_any_element(), }; @@ -231,17 +227,29 @@ fn render_markdown_list_item( cx.with_common_p(item).into_any() } +fn paragraph_len(paragraphs: &MarkdownParagraph) -> usize { + paragraphs + .iter() + .map(|paragraph| match paragraph { + MarkdownParagraphChunk::Text(text) => text.contents.len(), + // TODO: Scale column width based on image size + MarkdownParagraphChunk::Image(_) => 1, + }) + .sum() +} + fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -> AnyElement { let mut max_lengths: Vec = vec![0; parsed.header.children.len()]; for (index, cell) in parsed.header.children.iter().enumerate() { - let length = cell.contents.len(); + let length = paragraph_len(&cell); max_lengths[index] = length; } for row in &parsed.body { for (index, cell) in row.children.iter().enumerate() { - let length = cell.contents.len(); + let length = paragraph_len(&cell); + if length > max_lengths[index] { max_lengths[index] = length; } @@ -307,11 +315,10 @@ fn render_markdown_table_row( }; let max_width = max_column_widths.get(index).unwrap_or(&0.0); - let mut cell = container .w(Length::Definite(relative(*max_width))) .h_full() - .child(contents) + .children(contents) .px_2() .py_1() .border_color(cx.border_color); @@ -378,6 +385,7 @@ fn render_markdown_code_block( cx.write_to_clipboard(ClipboardItem::new_string(contents.to_string())); } }) + .tooltip(|cx| Tooltip::text("Copy code block", cx)) .visible_on_hover("markdown-block"); cx.with_common_p(div()) @@ -398,86 +406,202 @@ fn render_markdown_code_block( .into_any() } -fn render_markdown_paragraph(parsed: &ParsedMarkdownText, cx: &mut RenderContext) -> AnyElement { +fn render_markdown_paragraph(parsed: &MarkdownParagraph, cx: &mut RenderContext) -> AnyElement { cx.with_common_p(div()) - .child(render_markdown_text(parsed, cx)) + .children(render_markdown_text(parsed, cx)) + .flex() + .flex_col() .into_any_element() } -fn render_markdown_text(parsed: &ParsedMarkdownText, cx: &mut RenderContext) -> AnyElement { - let element_id = cx.next_id(&parsed.source_range); +fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext) -> Vec { + let mut any_element = vec![]; + // these values are cloned in-order satisfy borrow checker + let syntax_theme = cx.syntax_theme.clone(); + let workspace_clone = cx.workspace.clone(); + let code_span_bg_color = cx.code_span_background_color; + let text_style = cx.text_style.clone(); - let highlights = gpui::combine_highlights( - parsed.highlights.iter().filter_map(|(range, highlight)| { - let highlight = highlight.to_highlight_style(&cx.syntax_theme)?; - Some((range.clone(), highlight)) - }), - parsed - .regions - .iter() - .zip(&parsed.region_ranges) - .filter_map(|(region, range)| { - if region.code { - Some(( - range.clone(), - HighlightStyle { - background_color: Some(cx.code_span_background_color), - ..Default::default() + for parsed_region in parsed_new { + match parsed_region { + MarkdownParagraphChunk::Text(parsed) => { + let element_id = cx.next_id(&parsed.source_range); + + let highlights = gpui::combine_highlights( + parsed.highlights.iter().filter_map(|(range, highlight)| { + highlight + .to_highlight_style(&syntax_theme) + .map(|style| (range.clone(), style)) + }), + parsed.regions.iter().zip(&parsed.region_ranges).filter_map( + |(region, range)| { + if region.code { + Some(( + range.clone(), + HighlightStyle { + background_color: Some(code_span_bg_color), + ..Default::default() + }, + )) + } else { + None + } }, - )) - } else { - None + ), + ); + let mut links = Vec::new(); + let mut link_ranges = Vec::new(); + for (range, region) in parsed.region_ranges.iter().zip(&parsed.regions) { + if let Some(link) = region.link.clone() { + links.push(link); + link_ranges.push(range.clone()); + } } - }), - ); + let workspace = workspace_clone.clone(); + let element = div() + .child( + InteractiveText::new( + element_id, + StyledText::new(parsed.contents.clone()) + .with_highlights(&text_style, highlights), + ) + .tooltip({ + let links = links.clone(); + let link_ranges = link_ranges.clone(); + move |idx, cx| { + for (ix, range) in link_ranges.iter().enumerate() { + if range.contains(&idx) { + return Some(LinkPreview::new(&links[ix].to_string(), cx)); + } + } + None + } + }) + .on_click( + link_ranges, + move |clicked_range_ix, window_cx| match &links[clicked_range_ix] { + Link::Web { url } => window_cx.open_url(url), + Link::Path { path, .. } => { + if let Some(workspace) = &workspace { + _ = workspace.update(window_cx, |workspace, cx| { + workspace + .open_abs_path(path.clone(), false, cx) + .detach(); + }); + } + } + }, + ), + ) + .into_any(); + any_element.push(element); + } - let mut links = Vec::new(); - let mut link_ranges = Vec::new(); - for (range, region) in parsed.region_ranges.iter().zip(&parsed.regions) { - if let Some(link) = region.link.clone() { - links.push(link); - link_ranges.push(range.clone()); + MarkdownParagraphChunk::Image(image) => { + let image_resource = match image.link.clone() { + Link::Web { url } => Resource::Uri(url.into()), + Link::Path { path, .. } => Resource::Path(Arc::from(path)), + }; + + let element_id = cx.next_id(&image.source_range); + + let image_element = div() + .id(element_id) + .cursor_pointer() + .child(img(ImageSource::Resource(image_resource)).with_fallback({ + let alt_text = image.alt_text.clone(); + { + move || div().children(alt_text.clone()).into_any_element() + } + })) + .tooltip({ + let link = image.link.clone(); + move |cx| { + InteractiveMarkdownElementTooltip::new( + Some(link.to_string()), + "open image", + cx, + ) + .into() + } + }) + .on_click({ + let workspace = workspace_clone.clone(); + let link = image.link.clone(); + move |_, cx| { + if cx.modifiers().secondary() { + match &link { + Link::Web { url } => cx.open_url(url), + Link::Path { path, .. } => { + if let Some(workspace) = &workspace { + _ = workspace.update(cx, |workspace, cx| { + workspace + .open_abs_path(path.clone(), false, cx) + .detach(); + }); + } + } + } + } + } + }) + .into_any(); + any_element.push(image_element); + } } } - let workspace = cx.workspace.clone(); - - InteractiveText::new( - element_id, - StyledText::new(parsed.contents.clone()).with_highlights(&cx.text_style, highlights), - ) - .tooltip({ - let links = links.clone(); - let link_ranges = link_ranges.clone(); - move |idx, cx| { - for (ix, range) in link_ranges.iter().enumerate() { - if range.contains(&idx) { - return Some(LinkPreview::new(&links[ix].to_string(), cx)); - } - } - None - } - }) - .on_click( - link_ranges, - move |clicked_range_ix, window_cx| match &links[clicked_range_ix] { - Link::Web { url } => window_cx.open_url(url), - Link::Path { - path, - display_path: _, - } => { - if let Some(workspace) = &workspace { - _ = workspace.update(window_cx, |workspace, cx| { - workspace.open_abs_path(path.clone(), false, cx).detach(); - }); - } - } - }, - ) - .into_any_element() + any_element } fn render_markdown_rule(cx: &mut RenderContext) -> AnyElement { let rule = div().w_full().h(px(2.)).bg(cx.border_color); div().pt_3().pb_3().child(rule).into_any() } + +struct InteractiveMarkdownElementTooltip { + tooltip_text: Option, + action_text: String, +} + +impl InteractiveMarkdownElementTooltip { + pub fn new( + tooltip_text: Option, + action_text: &str, + cx: &mut WindowContext, + ) -> View { + let tooltip_text = tooltip_text.map(|t| util::truncate_and_trailoff(&t, 50).into()); + + cx.new_view(|_| Self { + tooltip_text, + action_text: action_text.to_string(), + }) + } +} + +impl Render for InteractiveMarkdownElementTooltip { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + tooltip_container(cx, |el, _| { + let secondary_modifier = Keystroke { + modifiers: Modifiers::secondary_key(), + ..Default::default() + }; + + el.child( + v_flex() + .gap_1() + .when_some(self.tooltip_text.clone(), |this, text| { + this.child(Label::new(text).size(LabelSize::Small)) + }) + .child( + Label::new(format!( + "{}-click to {}", + secondary_modifier, self.action_text + )) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + }) + } +} diff --git a/crates/media/Cargo.toml b/crates/media/Cargo.toml index 92940d1c52..1f2cfb0854 100644 --- a/crates/media/Cargo.toml +++ b/crates/media/Cargo.toml @@ -17,8 +17,9 @@ anyhow.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true +ctor.workspace = true foreign-types = "0.5" -metal = "0.29" +metal.workspace = true objc = "0.2" [build-dependencies] diff --git a/crates/media/src/media.rs b/crates/media/src/media.rs index 8757249c31..3f55475589 100644 --- a/crates/media/src/media.rs +++ b/crates/media/src/media.rs @@ -253,11 +253,14 @@ pub mod core_media { } } - pub fn image_buffer(&self) -> CVImageBuffer { + pub fn image_buffer(&self) -> Option { unsafe { - CVImageBuffer::wrap_under_get_rule(CMSampleBufferGetImageBuffer( - self.as_concrete_TypeRef(), - )) + let ptr = CMSampleBufferGetImageBuffer(self.as_concrete_TypeRef()); + if ptr.is_null() { + None + } else { + Some(CVImageBuffer::wrap_under_get_rule(ptr)) + } } } diff --git a/crates/menu/src/menu.rs b/crates/menu/src/menu.rs index 0818a6e6ff..3c5dc2521f 100644 --- a/crates/menu/src/menu.rs +++ b/crates/menu/src/menu.rs @@ -19,5 +19,6 @@ actions!( SelectNext, SelectFirst, SelectLast, + Restart ] ); diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index 444fe3c75c..42cacc34b1 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -39,6 +39,7 @@ smallvec.workspace = true sum_tree.workspace = true text.workspace = true theme.workspace = true +tree-sitter.workspace = true util.workspace = true [dev-dependencies] diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 5d111d81ce..3e6e651870 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1,4 +1,6 @@ mod anchor; +#[cfg(test)] +mod multi_buffer_tests; pub use anchor::{Anchor, AnchorRangeExt, Offset}; use anyhow::{anyhow, Result}; @@ -10,9 +12,9 @@ use itertools::Itertools; use language::{ language_settings::{language_settings, LanguageSettings}, AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, - CharKind, Chunk, CursorShape, DiagnosticEntry, File, IndentGuide, IndentSize, Language, - LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point, PointUtf16, Selection, - TextDimension, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, + CharKind, Chunk, CursorShape, DiagnosticEntry, DiskState, File, IndentGuide, IndentSize, + Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point, PointUtf16, + Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, TransactionId, Unclipped, }; use smallvec::SmallVec; @@ -89,16 +91,14 @@ pub enum Event { }, Edited { singleton_buffer_edited: bool, + edited_buffer: Option>, }, TransactionUndone { transaction_id: TransactionId, }, Reloaded, ReloadNeeded, - DiffBaseChanged, - DiffUpdated { - buffer: Model, - }, + LanguageChanged(BufferId), CapabilityChanged, Reparsed(BufferId), @@ -197,6 +197,7 @@ pub struct ExcerptInfo { pub buffer: BufferSnapshot, pub buffer_id: BufferId, pub range: ExcerptRange, + pub text_summary: TextSummary, } impl std::fmt::Debug for ExcerptInfo { @@ -257,6 +258,7 @@ struct Excerpt { pub struct MultiBufferExcerpt<'a> { excerpt: &'a Excerpt, excerpt_offset: usize, + excerpt_position: Point, } #[derive(Clone, Debug)] @@ -267,7 +269,7 @@ struct ExcerptIdMapping { /// A range of text from a single [`Buffer`], to be shown as an [`Excerpt`]. /// These ranges are relative to the buffer itself -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct ExcerptRange { /// The full range of text to be shown in the excerpt. pub context: Range, @@ -281,8 +283,7 @@ pub struct ExcerptSummary { excerpt_id: ExcerptId, /// The location of the last [`Excerpt`] being summarized excerpt_locator: Locator, - /// The maximum row of the [`Excerpt`]s being summarized - max_buffer_row: MultiBufferRow, + widest_line_number: u32, text: TextSummary, } @@ -325,6 +326,13 @@ struct ExcerptBytes<'a> { reversed: bool, } +struct BufferEdit { + range: Range, + new_text: Arc, + is_insertion: bool, + original_indent_column: u32, +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ExpandExcerptDirection { Up, @@ -525,57 +533,146 @@ impl MultiBuffer { pub fn edit( &self, edits: I, - mut autoindent_mode: Option, + autoindent_mode: Option, cx: &mut ModelContext, ) where I: IntoIterator, T)>, S: ToOffset, T: Into>, { - if self.read_only() { - return; - } - if self.buffers.borrow().is_empty() { - return; - } - let snapshot = self.read(cx); - let edits = edits.into_iter().map(|(range, new_text)| { - let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); - if range.start > range.end { - mem::swap(&mut range.start, &mut range.end); + let edits = edits + .into_iter() + .map(|(range, new_text)| { + let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + if range.start > range.end { + mem::swap(&mut range.start, &mut range.end); + } + (range, new_text.into()) + }) + .collect::>(); + + return edit_internal(self, snapshot, edits, autoindent_mode, cx); + + // Non-generic part of edit, hoisted out to avoid blowing up LLVM IR. + fn edit_internal( + this: &MultiBuffer, + snapshot: Ref, + edits: Vec<(Range, Arc)>, + mut autoindent_mode: Option, + cx: &mut ModelContext, + ) { + if this.read_only() || this.buffers.borrow().is_empty() { + return; + } + + if let Some(buffer) = this.as_singleton() { + buffer.update(cx, |buffer, cx| { + buffer.edit(edits, autoindent_mode, cx); + }); + cx.emit(Event::ExcerptsEdited { + ids: this.excerpt_ids(), + }); + return; + } + + let original_indent_columns = match &mut autoindent_mode { + Some(AutoindentMode::Block { + original_indent_columns, + }) => mem::take(original_indent_columns), + _ => Default::default(), + }; + + let (buffer_edits, edited_excerpt_ids) = + this.convert_edits_to_buffer_edits(edits, &snapshot, &original_indent_columns); + drop(snapshot); + + for (buffer_id, mut edits) in buffer_edits { + edits.sort_unstable_by_key(|edit| edit.range.start); + this.buffers.borrow()[&buffer_id] + .buffer + .update(cx, |buffer, cx| { + let mut edits = edits.into_iter().peekable(); + let mut insertions = Vec::new(); + let mut original_indent_columns = Vec::new(); + let mut deletions = Vec::new(); + let empty_str: Arc = Arc::default(); + while let Some(BufferEdit { + mut range, + new_text, + mut is_insertion, + original_indent_column, + }) = edits.next() + { + while let Some(BufferEdit { + range: next_range, + is_insertion: next_is_insertion, + .. + }) = edits.peek() + { + if range.end >= next_range.start { + range.end = cmp::max(next_range.end, range.end); + is_insertion |= *next_is_insertion; + edits.next(); + } else { + break; + } + } + + if is_insertion { + original_indent_columns.push(original_indent_column); + insertions.push(( + buffer.anchor_before(range.start) + ..buffer.anchor_before(range.end), + new_text.clone(), + )); + } else if !range.is_empty() { + deletions.push(( + buffer.anchor_before(range.start) + ..buffer.anchor_before(range.end), + empty_str.clone(), + )); + } + } + + let deletion_autoindent_mode = + if let Some(AutoindentMode::Block { .. }) = autoindent_mode { + Some(AutoindentMode::Block { + original_indent_columns: Default::default(), + }) + } else { + autoindent_mode.clone() + }; + let insertion_autoindent_mode = + if let Some(AutoindentMode::Block { .. }) = autoindent_mode { + Some(AutoindentMode::Block { + original_indent_columns, + }) + } else { + autoindent_mode.clone() + }; + + buffer.edit(deletions, deletion_autoindent_mode, cx); + buffer.edit(insertions, insertion_autoindent_mode, cx); + }) } - (range, new_text) - }); - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, cx| { - buffer.edit(edits, autoindent_mode, cx); - }); cx.emit(Event::ExcerptsEdited { - ids: self.excerpt_ids(), + ids: edited_excerpt_ids, }); - return; } + } - let original_indent_columns = match &mut autoindent_mode { - Some(AutoindentMode::Block { - original_indent_columns, - }) => mem::take(original_indent_columns), - _ => Default::default(), - }; - - struct BufferEdit { - range: Range, - new_text: Arc, - is_insertion: bool, - original_indent_column: u32, - } + fn convert_edits_to_buffer_edits( + &self, + edits: Vec<(Range, Arc)>, + snapshot: &MultiBufferSnapshot, + original_indent_columns: &[u32], + ) -> (HashMap>, Vec) { let mut buffer_edits: HashMap> = Default::default(); let mut edited_excerpt_ids = Vec::new(); let mut cursor = snapshot.excerpts.cursor::(&()); - for (ix, (range, new_text)) in edits.enumerate() { - let new_text: Arc = new_text.into(); + for (ix, (range, new_text)) in edits.into_iter().enumerate() { let original_indent_column = original_indent_columns.get(ix).copied().unwrap_or(0); cursor.seek(&range.start, Bias::Right, &()); if cursor.item().is_none() && range.start == *cursor.start() { @@ -667,84 +764,71 @@ impl MultiBuffer { } } } + (buffer_edits, edited_excerpt_ids) + } - drop(cursor); - drop(snapshot); - // Non-generic part of edit, hoisted out to avoid blowing up LLVM IR. - fn tail( + pub fn autoindent_ranges(&self, ranges: I, cx: &mut ModelContext) + where + I: IntoIterator>, + S: ToOffset, + { + let snapshot = self.read(cx); + let empty = Arc::::from(""); + let edits = ranges + .into_iter() + .map(|range| { + let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + if range.start > range.end { + mem::swap(&mut range.start, &mut range.end); + } + (range, empty.clone()) + }) + .collect::>(); + + return autoindent_ranges_internal(self, snapshot, edits, cx); + + fn autoindent_ranges_internal( this: &MultiBuffer, - buffer_edits: HashMap>, - autoindent_mode: Option, - edited_excerpt_ids: Vec, + snapshot: Ref, + edits: Vec<(Range, Arc)>, cx: &mut ModelContext, ) { + if this.read_only() || this.buffers.borrow().is_empty() { + return; + } + + if let Some(buffer) = this.as_singleton() { + buffer.update(cx, |buffer, cx| { + buffer.autoindent_ranges(edits.into_iter().map(|e| e.0), cx); + }); + cx.emit(Event::ExcerptsEdited { + ids: this.excerpt_ids(), + }); + return; + } + + let (buffer_edits, edited_excerpt_ids) = + this.convert_edits_to_buffer_edits(edits, &snapshot, &[]); + drop(snapshot); + for (buffer_id, mut edits) in buffer_edits { edits.sort_unstable_by_key(|edit| edit.range.start); + + let mut ranges: Vec> = Vec::new(); + for edit in edits { + if let Some(last_range) = ranges.last_mut() { + if edit.range.start <= last_range.end { + last_range.end = last_range.end.max(edit.range.end); + continue; + } + } + ranges.push(edit.range); + } + this.buffers.borrow()[&buffer_id] .buffer .update(cx, |buffer, cx| { - let mut edits = edits.into_iter().peekable(); - let mut insertions = Vec::new(); - let mut original_indent_columns = Vec::new(); - let mut deletions = Vec::new(); - let empty_str: Arc = Arc::default(); - while let Some(BufferEdit { - mut range, - new_text, - mut is_insertion, - original_indent_column, - }) = edits.next() - { - while let Some(BufferEdit { - range: next_range, - is_insertion: next_is_insertion, - .. - }) = edits.peek() - { - if range.end >= next_range.start { - range.end = cmp::max(next_range.end, range.end); - is_insertion |= *next_is_insertion; - edits.next(); - } else { - break; - } - } - - if is_insertion { - original_indent_columns.push(original_indent_column); - insertions.push(( - buffer.anchor_before(range.start) - ..buffer.anchor_before(range.end), - new_text.clone(), - )); - } else if !range.is_empty() { - deletions.push(( - buffer.anchor_before(range.start) - ..buffer.anchor_before(range.end), - empty_str.clone(), - )); - } - } - - let deletion_autoindent_mode = - if let Some(AutoindentMode::Block { .. }) = autoindent_mode { - Some(AutoindentMode::Block { - original_indent_columns: Default::default(), - }) - } else { - autoindent_mode.clone() - }; - let insertion_autoindent_mode = - if let Some(AutoindentMode::Block { .. }) = autoindent_mode { - Some(AutoindentMode::Block { - original_indent_columns, - }) - } else { - autoindent_mode.clone() - }; - - buffer.edit(deletions, deletion_autoindent_mode, cx); - buffer.edit(insertions, insertion_autoindent_mode, cx); + buffer.autoindent_ranges(ranges, cx); }) } @@ -752,7 +836,6 @@ impl MultiBuffer { ids: edited_excerpt_ids, }); } - tail(self, buffer_edits, autoindent_mode, edited_excerpt_ids, cx); } // Inserts newlines at the given position to create an empty line, returning the start of the new line. @@ -1406,6 +1489,7 @@ impl MultiBuffer { }]); cx.emit(Event::Edited { singleton_buffer_edited: false, + edited_buffer: None, }); cx.emit(Event::ExcerptsAdded { buffer, @@ -1433,6 +1517,7 @@ impl MultiBuffer { }]); cx.emit(Event::Edited { singleton_buffer_edited: false, + edited_buffer: None, }); cx.emit(Event::ExcerptsRemoved { ids }); cx.notify(); @@ -1464,6 +1549,33 @@ impl MultiBuffer { excerpts } + pub fn excerpt_ranges_for_buffer( + &self, + buffer_id: BufferId, + cx: &AppContext, + ) -> Vec> { + let snapshot = self.read(cx); + let buffers = self.buffers.borrow(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, Point)>(&()); + buffers + .get(&buffer_id) + .into_iter() + .flat_map(|state| &state.excerpts) + .filter_map(move |locator| { + cursor.seek_forward(&Some(locator), Bias::Left, &()); + cursor.item().and_then(|excerpt| { + if excerpt.locator == *locator { + let excerpt_start = cursor.start().1; + let excerpt_end = excerpt_start + excerpt.text_summary.lines; + Some(excerpt_start..excerpt_end) + } else { + None + } + }) + }) + .collect() + } + pub fn excerpt_buffer_ids(&self) -> Vec { self.snapshot .borrow() @@ -1555,42 +1667,6 @@ impl MultiBuffer { }) } - pub fn range_to_buffer_ranges( - &self, - range: Range, - cx: &AppContext, - ) -> Vec<(Model, Range, ExcerptId)> { - let snapshot = self.read(cx); - let start = range.start.to_offset(&snapshot); - let end = range.end.to_offset(&snapshot); - - let mut result = Vec::new(); - let mut cursor = snapshot.excerpts.cursor::(&()); - cursor.seek(&start, Bias::Right, &()); - if cursor.item().is_none() { - cursor.prev(&()); - } - - while let Some(excerpt) = cursor.item() { - if *cursor.start() > end { - break; - } - - let mut end_before_newline = cursor.end(&()); - if excerpt.has_trailing_newline { - end_before_newline -= 1; - } - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let start = excerpt_start + (cmp::max(start, *cursor.start()) - *cursor.start()); - let end = excerpt_start + (cmp::min(end, end_before_newline) - *cursor.start()); - let buffer = self.buffers.borrow()[&excerpt.buffer_id].buffer.clone(); - result.push((buffer, start..end, excerpt.id)); - cursor.next(&()); - } - - result - } - pub fn remove_excerpts( &mut self, excerpt_ids: impl IntoIterator, @@ -1674,6 +1750,7 @@ impl MultiBuffer { self.subscriptions.publish_mut(edits); cx.emit(Event::Edited { singleton_buffer_edited: false, + edited_buffer: None, }); cx.emit(Event::ExcerptsRemoved { ids }); cx.notify(); @@ -1737,14 +1814,13 @@ impl MultiBuffer { cx.emit(match event { language::BufferEvent::Edited => Event::Edited { singleton_buffer_edited: true, + edited_buffer: Some(buffer.clone()), }, language::BufferEvent::DirtyChanged => Event::DirtyChanged, language::BufferEvent::Saved => Event::Saved, language::BufferEvent::FileHandleChanged => Event::FileHandleChanged, language::BufferEvent::Reloaded => Event::Reloaded, language::BufferEvent::ReloadNeeded => Event::ReloadNeeded, - language::BufferEvent::DiffBaseChanged => Event::DiffBaseChanged, - language::BufferEvent::DiffUpdated => Event::DiffUpdated { buffer }, language::BufferEvent::LanguageChanged => { Event::LanguageChanged(buffer.read(cx).remote_id()) } @@ -1902,6 +1978,7 @@ impl MultiBuffer { self.subscriptions.publish_mut(edits); cx.emit(Event::Edited { singleton_buffer_edited: false, + edited_buffer: None, }); cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); cx.notify(); @@ -1999,6 +2076,7 @@ impl MultiBuffer { self.subscriptions.publish_mut(edits); cx.emit(Event::Edited { singleton_buffer_edited: false, + edited_buffer: None, }); cx.emit(Event::ExcerptsExpanded { ids }); cx.notify(); @@ -2035,7 +2113,9 @@ impl MultiBuffer { edited |= buffer_edited; non_text_state_updated |= buffer_non_text_state_updated; is_dirty |= buffer.is_dirty(); - has_deleted_file |= buffer.file().map_or(false, |file| file.is_deleted()); + has_deleted_file |= buffer + .file() + .map_or(false, |file| file.disk_state() == DiskState::Deleted); has_conflict |= buffer.has_conflict(); } if edited { @@ -2472,8 +2552,8 @@ impl MultiBufferSnapshot { self.excerpts.summary().text.len == 0 } - pub fn max_buffer_row(&self) -> MultiBufferRow { - self.excerpts.summary().max_buffer_row + pub fn widest_line_number(&self) -> u32 { + self.excerpts.summary().widest_line_number + 1 } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2942,6 +3022,10 @@ impl MultiBufferSnapshot { self.text_summary().lines } + pub fn max_row(&self) -> MultiBufferRow { + MultiBufferRow(self.text_summary().lines.row) + } + pub fn text_summary(&self) -> TextSummary { self.excerpts.summary().text.clone() } @@ -3329,6 +3413,24 @@ impl MultiBufferSnapshot { } } + pub fn buffer_ids_in_selected_rows( + &self, + selection: Selection, + ) -> impl Iterator + '_ { + let mut cursor = self.excerpts.cursor::(&()); + cursor.seek(&Point::new(selection.start.row, 0), Bias::Right, &()); + cursor.prev(&()); + + iter::from_fn(move || { + cursor.next(&()); + if cursor.start().row <= selection.end.row { + cursor.item().map(|item| item.buffer_id) + } else { + None + } + }) + } + pub fn excerpts( &self, ) -> impl Iterator)> { @@ -3337,26 +3439,89 @@ impl MultiBufferSnapshot { .map(|excerpt| (excerpt.id, &excerpt.buffer, excerpt.range.clone())) } - fn excerpts_for_range( + pub fn all_excerpts(&self) -> impl Iterator { + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); + cursor.next(&()); + std::iter::from_fn(move || { + let excerpt = cursor.item()?; + let excerpt = MultiBufferExcerpt::new(excerpt, *cursor.start()); + cursor.next(&()); + Some(excerpt) + }) + } + + pub fn excerpts_for_range( &self, range: Range, - ) -> impl Iterator + '_ { + ) -> impl Iterator + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(&()); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.seek(&range.start, Bias::Right, &()); cursor.prev(&()); iter::from_fn(move || { cursor.next(&()); - if cursor.start() < &range.end { - cursor.item().map(|item| (item, *cursor.start())) + if cursor.start().0 < range.end { + cursor + .item() + .map(|item| MultiBufferExcerpt::new(item, *cursor.start())) } else { None } }) } + pub fn excerpts_for_range_rev( + &self, + range: Range, + ) -> impl Iterator + '_ { + let range = range.start.to_offset(self)..range.end.to_offset(self); + + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); + cursor.seek(&range.end, Bias::Left, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + + std::iter::from_fn(move || { + let excerpt = cursor.item()?; + let excerpt = MultiBufferExcerpt::new(excerpt, *cursor.start()); + cursor.prev(&()); + Some(excerpt) + }) + } + + pub fn excerpt_before(&self, id: ExcerptId) -> Option> { + let start_locator = self.excerpt_locator_for_id(id); + let mut cursor = self.excerpts.cursor::(&()); + cursor.seek(start_locator, Bias::Left, &()); + cursor.prev(&()); + let excerpt = cursor.item()?; + let excerpt_offset = cursor.start().text.len; + let excerpt_position = cursor.start().text.lines; + Some(MultiBufferExcerpt { + excerpt, + excerpt_offset, + excerpt_position, + }) + } + + pub fn excerpt_after(&self, id: ExcerptId) -> Option> { + let start_locator = self.excerpt_locator_for_id(id); + let mut cursor = self.excerpts.cursor::(&()); + cursor.seek(start_locator, Bias::Left, &()); + cursor.next(&()); + let excerpt = cursor.item()?; + let excerpt_offset = cursor.start().text.len; + let excerpt_position = cursor.start().text.lines; + Some(MultiBufferExcerpt { + excerpt, + excerpt_offset, + excerpt_position, + }) + } + pub fn excerpt_boundaries_in_range( &self, range: R, @@ -3406,6 +3571,7 @@ impl MultiBufferSnapshot { buffer: excerpt.buffer.clone(), buffer_id: excerpt.buffer_id, range: excerpt.range.clone(), + text_summary: excerpt.text_summary.clone(), }); if next.is_none() { @@ -3421,6 +3587,7 @@ impl MultiBufferSnapshot { buffer: prev_excerpt.buffer.clone(), buffer_id: prev_excerpt.buffer_id, range: prev_excerpt.range.clone(), + text_summary: prev_excerpt.text_summary.clone(), }); let row = MultiBufferRow(cursor.start().1.row); @@ -3536,22 +3703,12 @@ impl MultiBufferSnapshot { ) -> impl Iterator> + 'a { let range = range.start.to_offset(self)..range.end.to_offset(self); self.excerpts_for_range(range.clone()) - .filter(move |&(excerpt, _)| redaction_enabled(excerpt.buffer.file())) - .flat_map(move |(excerpt, excerpt_offset)| { - let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - + .filter(move |excerpt| redaction_enabled(excerpt.buffer().file())) + .flat_map(move |excerpt| { excerpt - .buffer - .redacted_ranges(excerpt.range.context.clone()) - .map(move |mut redacted_range| { - // Re-base onto the excerpts coordinates in the multibuffer - redacted_range.start = excerpt_offset - + redacted_range.start.saturating_sub(excerpt_buffer_start); - redacted_range.end = excerpt_offset - + redacted_range.end.saturating_sub(excerpt_buffer_start); - - redacted_range - }) + .buffer() + .redacted_ranges(excerpt.buffer_range().clone()) + .map(move |redacted_range| excerpt.map_range_from_buffer(redacted_range)) .skip_while(move |redacted_range| redacted_range.end < range.start) .take_while(move |redacted_range| redacted_range.start < range.end) }) @@ -3563,12 +3720,13 @@ impl MultiBufferSnapshot { ) -> impl Iterator + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); self.excerpts_for_range(range.clone()) - .flat_map(move |(excerpt, excerpt_offset)| { - let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + .flat_map(move |excerpt| { + let excerpt_buffer_start = + excerpt.buffer_range().start.to_offset(&excerpt.buffer()); excerpt - .buffer - .runnable_ranges(excerpt.range.context.clone()) + .buffer() + .runnable_ranges(excerpt.buffer_range()) .filter_map(move |mut runnable| { // Re-base onto the excerpts coordinates in the multibuffer // @@ -3577,15 +3735,14 @@ impl MultiBufferSnapshot { if runnable.run_range.start < excerpt_buffer_start { return None; } - if language::ToPoint::to_point(&runnable.run_range.end, &excerpt.buffer).row - > excerpt.max_buffer_row + if language::ToPoint::to_point(&runnable.run_range.end, &excerpt.buffer()) + .row + > excerpt.max_buffer_row() { return None; } - runnable.run_range.start = - excerpt_offset + runnable.run_range.start - excerpt_buffer_start; - runnable.run_range.end = - excerpt_offset + runnable.run_range.end - excerpt_buffer_start; + runnable.run_range = excerpt.map_range_from_buffer(runnable.run_range); + Some(runnable) }) .skip_while(move |runnable| runnable.run_range.end < range.start) @@ -3619,15 +3776,15 @@ impl MultiBufferSnapshot { let range = range.start.to_offset(self)..range.end.to_offset(self); self.excerpts_for_range(range.clone()) - .flat_map(move |(excerpt, excerpt_offset)| { + .flat_map(move |excerpt| { let excerpt_buffer_start_row = - excerpt.range.context.start.to_point(&excerpt.buffer).row; - let excerpt_offset_row = crate::ToPoint::to_point(&excerpt_offset, self).row; + excerpt.buffer_range().start.to_point(&excerpt.buffer()).row; + let excerpt_offset_row = excerpt.start_point().row; excerpt - .buffer + .buffer() .indent_guides_in_range( - excerpt.range.context.clone(), + excerpt.buffer_range(), ignore_disabled_for_language, cx, ) @@ -3714,191 +3871,55 @@ impl MultiBufferSnapshot { .any(|excerpt| excerpt.buffer.has_diagnostics()) } - pub fn diagnostic_group<'a, O>( - &'a self, + pub fn diagnostic_group( + &self, group_id: usize, - ) -> impl Iterator> + 'a - where - O: text::FromAnchor + 'a, - { - self.as_singleton() - .into_iter() - .flat_map(move |(_, _, buffer)| buffer.diagnostic_group(group_id)) + ) -> impl Iterator> + '_ { + self.all_excerpts().flat_map(move |excerpt| { + excerpt.buffer().diagnostic_group(group_id).map( + move |DiagnosticEntry { diagnostic, range }| DiagnosticEntry { + diagnostic, + range: self.anchor_in_excerpt(excerpt.id(), range.start).unwrap() + ..self.anchor_in_excerpt(excerpt.id(), range.end).unwrap(), + }, + ) + }) } - pub fn diagnostics_in_range<'a, T, O>( + pub fn diagnostics_in_range<'a, T>( &'a self, range: Range, reversed: bool, - ) -> impl Iterator> + 'a + ) -> impl Iterator> + 'a where T: 'a + ToOffset, - O: 'a + text::FromAnchor + Ord, { - self.as_singleton() - .into_iter() - .flat_map(move |(_, _, buffer)| { - buffer.diagnostics_in_range( - range.start.to_offset(self)..range.end.to_offset(self), - reversed, - ) - }) - } - - pub fn has_git_diffs(&self) -> bool { - for excerpt in self.excerpts.iter() { - if excerpt.buffer.has_git_diff() { - return true; - } + let mut ranges = self.range_to_buffer_ranges(range); + if reversed { + ranges.reverse(); } - false - } - - pub fn git_diff_hunks_in_range_rev( - &self, - row_range: Range, - ) -> impl Iterator + '_ { - let mut cursor = self.excerpts.cursor::(&()); - - cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &()); - if cursor.item().is_none() { - cursor.prev(&()); - } - - std::iter::from_fn(move || { - let excerpt = cursor.item()?; - let multibuffer_start = *cursor.start(); - let multibuffer_end = multibuffer_start + excerpt.text_summary.lines; - if multibuffer_start.row >= row_range.end.0 { - return None; - } - - let mut buffer_start = excerpt.range.context.start; - let mut buffer_end = excerpt.range.context.end; - let excerpt_start_point = buffer_start.to_point(&excerpt.buffer); - let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines; - - if row_range.start.0 > multibuffer_start.row { - let buffer_start_point = - excerpt_start_point + Point::new(row_range.start.0 - multibuffer_start.row, 0); - buffer_start = excerpt.buffer.anchor_before(buffer_start_point); - } - - if row_range.end.0 < multibuffer_end.row { - let buffer_end_point = - excerpt_start_point + Point::new(row_range.end.0 - multibuffer_start.row, 0); - buffer_end = excerpt.buffer.anchor_before(buffer_end_point); - } - - let buffer_hunks = excerpt - .buffer - .git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end) - .map(move |hunk| { - let start = multibuffer_start.row - + hunk.row_range.start.saturating_sub(excerpt_start_point.row); - let end = multibuffer_start.row - + hunk - .row_range - .end - .min(excerpt_end_point.row + 1) - .saturating_sub(excerpt_start_point.row); - - MultiBufferDiffHunk { - row_range: MultiBufferRow(start)..MultiBufferRow(end), - diff_base_byte_range: hunk.diff_base_byte_range.clone(), - buffer_range: hunk.buffer_range.clone(), - buffer_id: excerpt.buffer_id, - } - }); - - cursor.prev(&()); - - Some(buffer_hunks) + ranges.into_iter().flat_map(move |(excerpt, range)| { + let excerpt_id = excerpt.id(); + excerpt.buffer().diagnostics_in_range(range, reversed).map( + move |DiagnosticEntry { diagnostic, range }| DiagnosticEntry { + diagnostic, + range: self.anchor_in_excerpt(excerpt_id, range.start).unwrap() + ..self.anchor_in_excerpt(excerpt_id, range.end).unwrap(), + }, + ) }) - .flatten() } - pub fn git_diff_hunks_in_range( + pub fn syntax_ancestor( &self, - row_range: Range, - ) -> impl Iterator + '_ { - let mut cursor = self.excerpts.cursor::(&()); - - cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &()); - - std::iter::from_fn(move || { - let excerpt = cursor.item()?; - let multibuffer_start = *cursor.start(); - let multibuffer_end = multibuffer_start + excerpt.text_summary.lines; - let mut buffer_start = excerpt.range.context.start; - let mut buffer_end = excerpt.range.context.end; - - let excerpt_rows = match multibuffer_start.row.cmp(&row_range.end.0) { - cmp::Ordering::Less => { - let excerpt_start_point = buffer_start.to_point(&excerpt.buffer); - let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines; - - if row_range.start.0 > multibuffer_start.row { - let buffer_start_point = excerpt_start_point - + Point::new(row_range.start.0 - multibuffer_start.row, 0); - buffer_start = excerpt.buffer.anchor_before(buffer_start_point); - } - - if row_range.end.0 < multibuffer_end.row { - let buffer_end_point = excerpt_start_point - + Point::new(row_range.end.0 - multibuffer_start.row, 0); - buffer_end = excerpt.buffer.anchor_before(buffer_end_point); - } - excerpt_start_point.row..excerpt_end_point.row - } - cmp::Ordering::Equal if row_range.end.0 == 0 => { - buffer_end = buffer_start; - 0..0 - } - cmp::Ordering::Greater | cmp::Ordering::Equal => return None, - }; - - let buffer_hunks = excerpt - .buffer - .git_diff_hunks_intersecting_range(buffer_start..buffer_end) - .map(move |hunk| { - let buffer_range = if excerpt_rows.start == 0 && excerpt_rows.end == 0 { - MultiBufferRow(0)..MultiBufferRow(1) - } else { - let start = multibuffer_start.row - + hunk.row_range.start.saturating_sub(excerpt_rows.start); - let end = multibuffer_start.row - + hunk - .row_range - .end - .min(excerpt_rows.end + 1) - .saturating_sub(excerpt_rows.start); - MultiBufferRow(start)..MultiBufferRow(end) - }; - MultiBufferDiffHunk { - row_range: buffer_range, - diff_base_byte_range: hunk.diff_base_byte_range.clone(), - buffer_range: hunk.buffer_range.clone(), - buffer_id: excerpt.buffer_id, - } - }); - - cursor.next(&()); - - Some(buffer_hunks) - }) - .flatten() - } - - pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + range: Range, + ) -> Option<(tree_sitter::Node, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); let excerpt = self.excerpt_containing(range.clone())?; - - let ancestor_buffer_range = excerpt + let node = excerpt .buffer() - .range_for_syntax_ancestor(excerpt.map_range_to_buffer(range))?; - - Some(excerpt.map_range_from_buffer(ancestor_buffer_range)) + .syntax_ancestor(excerpt.map_range_to_buffer(range))?; + Some((node, excerpt.map_range_from_buffer(node.byte_range()))) } pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option> { @@ -4068,7 +4089,7 @@ impl MultiBufferSnapshot { pub fn excerpt_containing(&self, range: Range) -> Option { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(&()); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.seek(&range.start, Bias::Right, &()); let start_excerpt = cursor.item()?; @@ -4093,12 +4114,12 @@ impl MultiBufferSnapshot { I: IntoIterator> + 'a, { let mut ranges = ranges.into_iter().map(|range| range.to_offset(self)); - let mut cursor = self.excerpts.cursor::(&()); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.next(&()); let mut current_range = ranges.next(); iter::from_fn(move || { let range = current_range.clone()?; - if range.start >= cursor.end(&()) { + if range.start >= cursor.end(&()).0 { cursor.seek_forward(&range.start, Bias::Right, &()); if range.start == self.len() { cursor.prev(&()); @@ -4106,11 +4127,11 @@ impl MultiBufferSnapshot { } let excerpt = cursor.item()?; - let range_start_in_excerpt = cmp::max(range.start, *cursor.start()); + let range_start_in_excerpt = cmp::max(range.start, cursor.start().0); let range_end_in_excerpt = if excerpt.has_trailing_newline { - cmp::min(range.end, cursor.end(&()) - 1) + cmp::min(range.end, cursor.end(&()).0 - 1) } else { - cmp::min(range.end, cursor.end(&())) + cmp::min(range.end, cursor.end(&()).0) }; let buffer_range = MultiBufferExcerpt::new(excerpt, *cursor.start()) .map_range_to_buffer(range_start_in_excerpt..range_end_in_excerpt); @@ -4126,7 +4147,7 @@ impl MultiBufferSnapshot { text_anchor: excerpt.buffer.anchor_after(buffer_range.end), }; - if range.end > cursor.end(&()) { + if range.end > cursor.end(&()).0 { cursor.next(&()); } else { current_range = ranges.next(); @@ -4136,6 +4157,42 @@ impl MultiBufferSnapshot { }) } + pub fn range_to_buffer_ranges( + &self, + range: Range, + ) -> Vec<(MultiBufferExcerpt<'_>, Range)> { + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); + + let mut result = Vec::new(); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); + cursor.seek(&start, Bias::Right, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + + while let Some(excerpt) = cursor.item() { + if cursor.start().0 > end { + break; + } + + let mut end_before_newline = cursor.end(&()).0; + if excerpt.has_trailing_newline { + end_before_newline -= 1; + } + let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let start = excerpt_start + (cmp::max(start, cursor.start().0) - cursor.start().0); + let end = excerpt_start + (cmp::min(end, end_before_newline) - cursor.start().0); + result.push(( + MultiBufferExcerpt::new(&excerpt, *cursor.start()), + start..end, + )); + cursor.next(&()); + } + + result + } + /// Returns excerpts overlapping the given ranges. If range spans multiple excerpts returns one range for each excerpt /// /// The ranges are specified in the coordinate space of the multibuffer, not the individual excerpted buffers. @@ -4145,12 +4202,12 @@ impl MultiBufferSnapshot { ranges: impl IntoIterator>, ) -> impl Iterator)> { let mut ranges = ranges.into_iter().map(|range| range.to_offset(self)); - let mut cursor = self.excerpts.cursor::(&()); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.next(&()); let mut current_range = ranges.next(); iter::from_fn(move || { let range = current_range.clone()?; - if range.start >= cursor.end(&()) { + if range.start >= cursor.end(&()).0 { cursor.seek_forward(&range.start, Bias::Right, &()); if range.start == self.len() { cursor.prev(&()); @@ -4158,16 +4215,16 @@ impl MultiBufferSnapshot { } let excerpt = cursor.item()?; - let range_start_in_excerpt = cmp::max(range.start, *cursor.start()); + let range_start_in_excerpt = cmp::max(range.start, cursor.start().0); let range_end_in_excerpt = if excerpt.has_trailing_newline { - cmp::min(range.end, cursor.end(&()) - 1) + cmp::min(range.end, cursor.end(&()).0 - 1) } else { - cmp::min(range.end, cursor.end(&())) + cmp::min(range.end, cursor.end(&()).0) }; let buffer_range = MultiBufferExcerpt::new(excerpt, *cursor.start()) .map_range_to_buffer(range_start_in_excerpt..range_end_in_excerpt); - if range.end > cursor.end(&()) { + if range.end > cursor.end(&()).0 { cursor.next(&()); } else { current_range = ranges.next(); @@ -4591,6 +4648,11 @@ impl Excerpt { self.range.context.start.to_offset(&self.buffer) } + /// The [`Excerpt`]'s start point in its [`Buffer`] + fn buffer_start_point(&self) -> Point { + self.range.context.start.to_point(&self.buffer) + } + /// The [`Excerpt`]'s end offset in its [`Buffer`] fn buffer_end_offset(&self) -> usize { self.buffer_start_offset() + self.text_summary.len @@ -4598,10 +4660,35 @@ impl Excerpt { } impl<'a> MultiBufferExcerpt<'a> { - fn new(excerpt: &'a Excerpt, excerpt_offset: usize) -> Self { + fn new(excerpt: &'a Excerpt, (excerpt_offset, excerpt_position): (usize, Point)) -> Self { MultiBufferExcerpt { excerpt, excerpt_offset, + excerpt_position, + } + } + + pub fn id(&self) -> ExcerptId { + self.excerpt.id + } + + pub fn buffer_id(&self) -> BufferId { + self.excerpt.buffer_id + } + + pub fn start_anchor(&self) -> Anchor { + Anchor { + buffer_id: Some(self.excerpt.buffer_id), + excerpt_id: self.excerpt.id, + text_anchor: self.excerpt.range.context.start, + } + } + + pub fn end_anchor(&self) -> Anchor { + Anchor { + buffer_id: Some(self.excerpt.buffer_id), + excerpt_id: self.excerpt.id, + text_anchor: self.excerpt.range.context.end, } } @@ -4609,9 +4696,32 @@ impl<'a> MultiBufferExcerpt<'a> { &self.excerpt.buffer } + pub fn buffer_range(&self) -> Range { + self.excerpt.range.context.clone() + } + + pub fn start_offset(&self) -> usize { + self.excerpt_offset + } + + pub fn start_point(&self) -> Point { + self.excerpt_position + } + /// Maps an offset within the [`MultiBuffer`] to an offset within the [`Buffer`] pub fn map_offset_to_buffer(&self, offset: usize) -> usize { - self.excerpt.buffer_start_offset() + offset.saturating_sub(self.excerpt_offset) + self.excerpt.buffer_start_offset() + + offset + .saturating_sub(self.excerpt_offset) + .min(self.excerpt.text_summary.len) + } + + /// Maps a point within the [`MultiBuffer`] to a point within the [`Buffer`] + pub fn map_point_to_buffer(&self, point: Point) -> Point { + self.excerpt.buffer_start_point() + + point + .saturating_sub(self.excerpt_position) + .min(self.excerpt.text_summary.lines) } /// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`] @@ -4621,14 +4731,20 @@ impl<'a> MultiBufferExcerpt<'a> { /// Map an offset within the [`Buffer`] to an offset within the [`MultiBuffer`] pub fn map_offset_from_buffer(&self, buffer_offset: usize) -> usize { - let mut buffer_offset_in_excerpt = - buffer_offset.saturating_sub(self.excerpt.buffer_start_offset()); - buffer_offset_in_excerpt = - cmp::min(buffer_offset_in_excerpt, self.excerpt.text_summary.len); - + let buffer_offset_in_excerpt = buffer_offset + .saturating_sub(self.excerpt.buffer_start_offset()) + .min(self.excerpt.text_summary.len); self.excerpt_offset + buffer_offset_in_excerpt } + /// Map a point within the [`Buffer`] to a point within the [`MultiBuffer`] + pub fn map_point_from_buffer(&self, buffer_position: Point) -> Point { + let position_in_excerpt = buffer_position.saturating_sub(self.excerpt.buffer_start_point()); + let position_in_excerpt = + position_in_excerpt.min(self.excerpt.text_summary.lines + Point::new(1, 0)); + self.excerpt_position + position_in_excerpt + } + /// Map a range within the [`Buffer`] to a range within the [`MultiBuffer`] pub fn map_range_from_buffer(&self, buffer_range: Range) -> Range { self.map_offset_from_buffer(buffer_range.start) @@ -4640,6 +4756,10 @@ impl<'a> MultiBufferExcerpt<'a> { range.start >= self.excerpt.buffer_start_offset() && range.end <= self.excerpt.buffer_end_offset() } + + pub fn max_buffer_row(&self) -> u32 { + self.excerpt.max_buffer_row + } } impl ExcerptId { @@ -4696,7 +4816,7 @@ impl sum_tree::Item for Excerpt { ExcerptSummary { excerpt_id: self.id, excerpt_locator: self.locator.clone(), - max_buffer_row: MultiBufferRow(self.max_buffer_row), + widest_line_number: self.max_buffer_row, text, } } @@ -4741,7 +4861,7 @@ impl sum_tree::Summary for ExcerptSummary { debug_assert!(summary.excerpt_locator > self.excerpt_locator); self.excerpt_locator = summary.excerpt_locator.clone(); self.text.add_summary(&summary.text, &()); - self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row); + self.widest_line_number = cmp::max(self.widest_line_number, summary.widest_line_number); } } @@ -5171,1997 +5291,3 @@ where (excerpt_ranges, range_counts) } - -#[cfg(test)] -mod tests { - use super::*; - use gpui::{AppContext, Context, TestAppContext}; - use language::{Buffer, Rope}; - use parking_lot::RwLock; - use rand::prelude::*; - use settings::SettingsStore; - use std::env; - use util::test::sample_text; - - #[ctor::ctor] - fn init_logger() { - if std::env::var("RUST_LOG").is_ok() { - env_logger::init(); - } - } - - #[gpui::test] - fn test_singleton(cx: &mut AppContext) { - let buffer = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); - - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot.text(), buffer.read(cx).text()); - - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), - (0..buffer.read(cx).row_count()) - .map(Some) - .collect::>() - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(1..3, "XXX\n")], None, cx)); - let snapshot = multibuffer.read(cx).snapshot(cx); - - assert_eq!(snapshot.text(), buffer.read(cx).text()); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), - (0..buffer.read(cx).row_count()) - .map(Some) - .collect::>() - ); - } - - #[gpui::test] - fn test_remote(cx: &mut AppContext) { - let host_buffer = cx.new_model(|cx| Buffer::local("a", cx)); - let guest_buffer = cx.new_model(|cx| { - let state = host_buffer.read(cx).to_proto(cx); - let ops = cx - .background_executor() - .block(host_buffer.read(cx).serialize_ops(None, cx)); - let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer.apply_ops( - ops.into_iter() - .map(|op| language::proto::deserialize_operation(op).unwrap()), - cx, - ); - buffer - }); - let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx)); - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot.text(), "a"); - - guest_buffer.update(cx, |buffer, cx| buffer.edit([(1..1, "b")], None, cx)); - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot.text(), "ab"); - - guest_buffer.update(cx, |buffer, cx| buffer.edit([(2..2, "c")], None, cx)); - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot.text(), "abc"); - } - - #[gpui::test] - fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - - let events = Arc::new(RwLock::new(Vec::::new())); - multibuffer.update(cx, |_, cx| { - let events = events.clone(); - cx.subscribe(&multibuffer, move |_, _, event, _| { - if let Event::Edited { .. } = event { - events.write().push(event.clone()) - } - }) - .detach(); - }); - - let subscription = multibuffer.update(cx, |multibuffer, cx| { - let subscription = multibuffer.subscribe(); - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: Point::new(1, 2)..Point::new(2, 5), - primary: None, - }], - cx, - ); - assert_eq!( - subscription.consume().into_inner(), - [Edit { - old: 0..0, - new: 0..10 - }] - ); - - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: Point::new(3, 3)..Point::new(4, 4), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: Point::new(3, 1)..Point::new(3, 3), - primary: None, - }], - cx, - ); - assert_eq!( - subscription.consume().into_inner(), - [Edit { - old: 10..10, - new: 10..22 - }] - ); - - subscription - }); - - // Adding excerpts emits an edited event. - assert_eq!( - events.read().as_slice(), - &[ - Event::Edited { - singleton_buffer_edited: false - }, - Event::Edited { - singleton_buffer_edited: false - }, - Event::Edited { - singleton_buffer_edited: false - } - ] - ); - - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!( - snapshot.text(), - concat!( - "bbbb\n", // Preserve newlines - "ccccc\n", // - "ddd\n", // - "eeee\n", // - "jj" // - ) - ); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), - [Some(1), Some(2), Some(3), Some(4), Some(3)] - ); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(2)).collect::>(), - [Some(3), Some(4), Some(3)] - ); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(4)).collect::>(), - [Some(3)] - ); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(5)).collect::>(), - [] - ); - - assert_eq!( - boundaries_in_range(Point::new(0, 0)..Point::new(4, 2), &snapshot), - &[ - (MultiBufferRow(0), "bbbb\nccccc".to_string(), true), - (MultiBufferRow(2), "ddd\neeee".to_string(), false), - (MultiBufferRow(4), "jj".to_string(), true), - ] - ); - assert_eq!( - boundaries_in_range(Point::new(0, 0)..Point::new(2, 0), &snapshot), - &[(MultiBufferRow(0), "bbbb\nccccc".to_string(), true)] - ); - assert_eq!( - boundaries_in_range(Point::new(1, 0)..Point::new(1, 5), &snapshot), - &[] - ); - assert_eq!( - boundaries_in_range(Point::new(1, 0)..Point::new(2, 0), &snapshot), - &[] - ); - assert_eq!( - boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), - &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] - ); - assert_eq!( - boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), - &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] - ); - assert_eq!( - boundaries_in_range(Point::new(2, 0)..Point::new(3, 0), &snapshot), - &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] - ); - assert_eq!( - boundaries_in_range(Point::new(4, 0)..Point::new(4, 2), &snapshot), - &[(MultiBufferRow(4), "jj".to_string(), true)] - ); - assert_eq!( - boundaries_in_range(Point::new(4, 2)..Point::new(4, 2), &snapshot), - &[] - ); - - buffer_1.update(cx, |buffer, cx| { - let text = "\n"; - buffer.edit( - [ - (Point::new(0, 0)..Point::new(0, 0), text), - (Point::new(2, 1)..Point::new(2, 3), text), - ], - None, - cx, - ); - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!( - snapshot.text(), - concat!( - "bbbb\n", // Preserve newlines - "c\n", // - "cc\n", // - "ddd\n", // - "eeee\n", // - "jj" // - ) - ); - - assert_eq!( - subscription.consume().into_inner(), - [Edit { - old: 6..8, - new: 6..7 - }] - ); - - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!( - snapshot.clip_point(Point::new(0, 5), Bias::Left), - Point::new(0, 4) - ); - assert_eq!( - snapshot.clip_point(Point::new(0, 5), Bias::Right), - Point::new(0, 4) - ); - assert_eq!( - snapshot.clip_point(Point::new(5, 1), Bias::Right), - Point::new(5, 1) - ); - assert_eq!( - snapshot.clip_point(Point::new(5, 2), Bias::Right), - Point::new(5, 2) - ); - assert_eq!( - snapshot.clip_point(Point::new(5, 3), Bias::Right), - Point::new(5, 2) - ); - - let snapshot = multibuffer.update(cx, |multibuffer, cx| { - let (buffer_2_excerpt_id, _) = - multibuffer.excerpts_for_buffer(&buffer_2, cx)[0].clone(); - multibuffer.remove_excerpts([buffer_2_excerpt_id], cx); - multibuffer.snapshot(cx) - }); - - assert_eq!( - snapshot.text(), - concat!( - "bbbb\n", // Preserve newlines - "c\n", // - "cc\n", // - "ddd\n", // - "eeee", // - ) - ); - - fn boundaries_in_range( - range: Range, - snapshot: &MultiBufferSnapshot, - ) -> Vec<(MultiBufferRow, String, bool)> { - snapshot - .excerpt_boundaries_in_range(range) - .filter_map(|boundary| { - let starts_new_buffer = boundary.starts_new_buffer(); - boundary.next.map(|next| { - ( - boundary.row, - next.buffer - .text_for_range(next.range.context) - .collect::(), - starts_new_buffer, - ) - }) - }) - .collect::>() - } - } - - #[gpui::test] - fn test_excerpt_events(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'm'), cx)); - - let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let follower_edit_event_count = Arc::new(RwLock::new(0)); - - follower_multibuffer.update(cx, |_, cx| { - let follower_edit_event_count = follower_edit_event_count.clone(); - cx.subscribe( - &leader_multibuffer, - move |follower, _, event, cx| match event.clone() { - Event::ExcerptsAdded { - buffer, - predecessor, - excerpts, - } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx), - Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx), - Event::Edited { .. } => { - *follower_edit_event_count.write() += 1; - } - _ => {} - }, - ) - .detach(); - }); - - leader_multibuffer.update(cx, |leader, cx| { - leader.push_excerpts( - buffer_1.clone(), - [ - ExcerptRange { - context: 0..8, - primary: None, - }, - ExcerptRange { - context: 12..16, - primary: None, - }, - ], - cx, - ); - leader.insert_excerpts_after( - leader.excerpt_ids()[0], - buffer_2.clone(), - [ - ExcerptRange { - context: 0..5, - primary: None, - }, - ExcerptRange { - context: 10..15, - primary: None, - }, - ], - cx, - ) - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 2); - - leader_multibuffer.update(cx, |leader, cx| { - let excerpt_ids = leader.excerpt_ids(); - leader.remove_excerpts([excerpt_ids[1], excerpt_ids[3]], cx); - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 3); - - // Removing an empty set of excerpts is a noop. - leader_multibuffer.update(cx, |leader, cx| { - leader.remove_excerpts([], cx); - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 3); - - // Adding an empty set of excerpts is a noop. - leader_multibuffer.update(cx, |leader, cx| { - leader.push_excerpts::(buffer_2.clone(), [], cx); - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 3); - - leader_multibuffer.update(cx, |leader, cx| { - leader.clear(cx); - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 4); - } - - #[gpui::test] - fn test_expand_excerpts(cx: &mut AppContext) { - let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts_with_context_lines( - buffer.clone(), - vec![ - // Note that in this test, this first excerpt - // does not contain a new line - Point::new(3, 2)..Point::new(3, 3), - Point::new(7, 1)..Point::new(7, 3), - Point::new(15, 0)..Point::new(15, 0), - ], - 1, - cx, - ) - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - assert_eq!( - snapshot.text(), - concat!( - "ccc\n", // - "ddd\n", // - "eee", // - "\n", // End of excerpt - "ggg\n", // - "hhh\n", // - "iii", // - "\n", // End of excerpt - "ooo\n", // - "ppp\n", // - "qqq", // End of excerpt - ) - ); - drop(snapshot); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_excerpts( - multibuffer.excerpt_ids(), - 1, - ExpandExcerptDirection::UpAndDown, - cx, - ) - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - // Expanding context lines causes the line containing 'fff' to appear in two different excerpts. - // We don't attempt to merge them, because removing the excerpt could create inconsistency with other layers - // that are tracking excerpt ids. - assert_eq!( - snapshot.text(), - concat!( - "bbb\n", // - "ccc\n", // - "ddd\n", // - "eee\n", // - "fff\n", // End of excerpt - "fff\n", // - "ggg\n", // - "hhh\n", // - "iii\n", // - "jjj\n", // End of excerpt - "nnn\n", // - "ooo\n", // - "ppp\n", // - "qqq\n", // - "rrr", // End of excerpt - ) - ); - } - - #[gpui::test] - fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { - let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts_with_context_lines( - buffer.clone(), - vec![ - // Note that in this test, this first excerpt - // does contain a new line - Point::new(3, 2)..Point::new(4, 2), - Point::new(7, 1)..Point::new(7, 3), - Point::new(15, 0)..Point::new(15, 0), - ], - 2, - cx, - ) - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!( - snapshot.text(), - concat!( - "bbb\n", // Preserve newlines - "ccc\n", // - "ddd\n", // - "eee\n", // - "fff\n", // - "ggg\n", // - "hhh\n", // - "iii\n", // - "jjj\n", // - "nnn\n", // - "ooo\n", // - "ppp\n", // - "qqq\n", // - "rrr", // - ) - ); - - assert_eq!( - anchor_ranges - .iter() - .map(|range| range.to_point(&snapshot)) - .collect::>(), - vec![ - Point::new(2, 2)..Point::new(3, 2), - Point::new(6, 1)..Point::new(6, 3), - Point::new(11, 0)..Point::new(11, 0) - ] - ); - } - - #[gpui::test(iterations = 100)] - async fn test_push_multiple_excerpts_with_context_lines(cx: &mut TestAppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(15, 4, 'a'), cx)); - let snapshot_1 = buffer_1.update(cx, |buffer, _| buffer.snapshot()); - let snapshot_2 = buffer_2.update(cx, |buffer, _| buffer.snapshot()); - let ranges_1 = vec![ - snapshot_1.anchor_before(Point::new(3, 2))..snapshot_1.anchor_before(Point::new(4, 2)), - snapshot_1.anchor_before(Point::new(7, 1))..snapshot_1.anchor_before(Point::new(7, 3)), - snapshot_1.anchor_before(Point::new(15, 0)) - ..snapshot_1.anchor_before(Point::new(15, 0)), - ]; - let ranges_2 = vec![ - snapshot_2.anchor_before(Point::new(2, 1))..snapshot_2.anchor_before(Point::new(3, 1)), - snapshot_2.anchor_before(Point::new(10, 0)) - ..snapshot_2.anchor_before(Point::new(10, 2)), - ]; - - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let anchor_ranges = multibuffer - .update(cx, |multibuffer, cx| { - multibuffer.push_multiple_excerpts_with_context_lines( - vec![(buffer_1.clone(), ranges_1), (buffer_2.clone(), ranges_2)], - 2, - cx, - ) - }) - .await; - - let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - assert_eq!( - snapshot.text(), - concat!( - "bbb\n", // buffer_1 - "ccc\n", // - "ddd\n", // <-- excerpt 1 - "eee\n", // <-- excerpt 1 - "fff\n", // - "ggg\n", // - "hhh\n", // <-- excerpt 2 - "iii\n", // - "jjj\n", // - // - "nnn\n", // - "ooo\n", // - "ppp\n", // <-- excerpt 3 - "qqq\n", // - "rrr\n", // - // - "aaaa\n", // buffer 2 - "bbbb\n", // - "cccc\n", // <-- excerpt 4 - "dddd\n", // <-- excerpt 4 - "eeee\n", // - "ffff\n", // - // - "iiii\n", // - "jjjj\n", // - "kkkk\n", // <-- excerpt 5 - "llll\n", // - "mmmm", // - ) - ); - - assert_eq!( - anchor_ranges - .iter() - .map(|range| range.to_point(&snapshot)) - .collect::>(), - vec![ - Point::new(2, 2)..Point::new(3, 2), - Point::new(6, 1)..Point::new(6, 3), - Point::new(11, 0)..Point::new(11, 0), - Point::new(16, 1)..Point::new(17, 1), - Point::new(22, 0)..Point::new(22, 2) - ] - ); - } - - #[gpui::test] - fn test_empty_multibuffer(cx: &mut AppContext) { - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - - let snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot.text(), ""); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), - &[Some(0)] - ); - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(1)).collect::>(), - &[] - ); - } - - #[gpui::test] - fn test_singleton_multibuffer_anchors(cx: &mut AppContext) { - let buffer = cx.new_model(|cx| Buffer::local("abcd", cx)); - let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); - let old_snapshot = multibuffer.read(cx).snapshot(cx); - buffer.update(cx, |buffer, cx| { - buffer.edit([(0..0, "X")], None, cx); - buffer.edit([(5..5, "Y")], None, cx); - }); - let new_snapshot = multibuffer.read(cx).snapshot(cx); - - assert_eq!(old_snapshot.text(), "abcd"); - assert_eq!(new_snapshot.text(), "XabcdY"); - - assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); - assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); - assert_eq!(old_snapshot.anchor_before(4).to_offset(&new_snapshot), 5); - assert_eq!(old_snapshot.anchor_after(4).to_offset(&new_snapshot), 6); - } - - #[gpui::test] - fn test_multibuffer_anchors(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local("efghi", cx)); - let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..4, - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..5, - primary: None, - }], - cx, - ); - multibuffer - }); - let old_snapshot = multibuffer.read(cx).snapshot(cx); - - assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 0); - assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 0); - assert_eq!(Anchor::min().to_offset(&old_snapshot), 0); - assert_eq!(Anchor::min().to_offset(&old_snapshot), 0); - assert_eq!(Anchor::max().to_offset(&old_snapshot), 10); - assert_eq!(Anchor::max().to_offset(&old_snapshot), 10); - - buffer_1.update(cx, |buffer, cx| { - buffer.edit([(0..0, "W")], None, cx); - buffer.edit([(5..5, "X")], None, cx); - }); - buffer_2.update(cx, |buffer, cx| { - buffer.edit([(0..0, "Y")], None, cx); - buffer.edit([(6..6, "Z")], None, cx); - }); - let new_snapshot = multibuffer.read(cx).snapshot(cx); - - assert_eq!(old_snapshot.text(), "abcd\nefghi"); - assert_eq!(new_snapshot.text(), "WabcdX\nYefghiZ"); - - assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); - assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); - assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 2); - assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 2); - assert_eq!(old_snapshot.anchor_before(2).to_offset(&new_snapshot), 3); - assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3); - assert_eq!(old_snapshot.anchor_before(5).to_offset(&new_snapshot), 7); - assert_eq!(old_snapshot.anchor_after(5).to_offset(&new_snapshot), 8); - assert_eq!(old_snapshot.anchor_before(10).to_offset(&new_snapshot), 13); - assert_eq!(old_snapshot.anchor_after(10).to_offset(&new_snapshot), 14); - } - - #[gpui::test] - fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - - // Create an insertion id in buffer 1 that doesn't exist in buffer 2. - // Add an excerpt from buffer 1 that spans this new insertion. - buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx)); - let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..7, - primary: None, - }], - cx, - ) - .pop() - .unwrap() - }); - - let snapshot_1 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_1.text(), "abcd123"); - - // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([excerpt_id_1], cx); - let mut ids = multibuffer - .push_excerpts( - buffer_2.clone(), - [ - ExcerptRange { - context: 0..4, - primary: None, - }, - ExcerptRange { - context: 6..10, - primary: None, - }, - ExcerptRange { - context: 12..16, - primary: None, - }, - ], - cx, - ) - .into_iter(); - (ids.next().unwrap(), ids.next().unwrap()) - }); - let snapshot_2 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); - - // The old excerpt id doesn't get reused. - assert_ne!(excerpt_id_2, excerpt_id_1); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The current excerpts are from a different buffer, so we don't attempt to - // resolve the old text anchor in the new buffer. - assert_eq!( - snapshot_2.summary_for_anchor::(&snapshot_1.anchor_before(2)), - 0 - ); - assert_eq!( - snapshot_2.summaries_for_anchors::(&[ - snapshot_1.anchor_before(2), - snapshot_1.anchor_after(3) - ]), - vec![0, 0] - ); - - // Refresh anchors from the old snapshot. The return value indicates that both - // anchors lost their original excerpt. - let refresh = - snapshot_2.refresh_anchors(&[snapshot_1.anchor_before(2), snapshot_1.anchor_after(3)]); - assert_eq!( - refresh, - &[ - (0, snapshot_2.anchor_before(0), false), - (1, snapshot_2.anchor_after(0), false), - ] - ); - - // Replace the middle excerpt with a smaller excerpt in buffer 2, - // that intersects the old excerpt. - let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([excerpt_id_3], cx); - multibuffer - .insert_excerpts_after( - excerpt_id_2, - buffer_2.clone(), - [ExcerptRange { - context: 5..8, - primary: None, - }], - cx, - ) - .pop() - .unwrap() - }); - - let snapshot_3 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP"); - assert_ne!(excerpt_id_5, excerpt_id_3); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The third anchor can't be resolved, since its excerpt has been removed, - // so it resolves to the same position as its predecessor. - let anchors = [ - snapshot_2.anchor_before(0), - snapshot_2.anchor_after(2), - snapshot_2.anchor_after(6), - snapshot_2.anchor_after(14), - ]; - assert_eq!( - snapshot_3.summaries_for_anchors::(&anchors), - &[0, 2, 9, 13] - ); - - let new_anchors = snapshot_3.refresh_anchors(&anchors); - assert_eq!( - new_anchors.iter().map(|a| (a.0, a.2)).collect::>(), - &[(0, true), (1, true), (2, true), (3, true)] - ); - assert_eq!( - snapshot_3.summaries_for_anchors::(new_anchors.iter().map(|a| &a.1)), - &[0, 2, 7, 13] - ); - } - - #[gpui::test(iterations = 100)] - fn test_random_multibuffer(cx: &mut AppContext, mut rng: StdRng) { - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(10); - - let mut buffers: Vec> = Vec::new(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let mut excerpt_ids = Vec::::new(); - let mut expected_excerpts = Vec::<(Model, Range)>::new(); - let mut anchors = Vec::new(); - let mut old_versions = Vec::new(); - - for _ in 0..operations { - match rng.gen_range(0..100) { - 0..=14 if !buffers.is_empty() => { - let buffer = buffers.choose(&mut rng).unwrap(); - buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); - } - 15..=19 if !expected_excerpts.is_empty() => { - multibuffer.update(cx, |multibuffer, cx| { - let ids = multibuffer.excerpt_ids(); - let mut excerpts = HashSet::default(); - for _ in 0..rng.gen_range(0..ids.len()) { - excerpts.extend(ids.choose(&mut rng).copied()); - } - - let line_count = rng.gen_range(0..5); - - let excerpt_ixs = excerpts - .iter() - .map(|id| excerpt_ids.iter().position(|i| i == id).unwrap()) - .collect::>(); - log::info!("Expanding excerpts {excerpt_ixs:?} by {line_count} lines"); - multibuffer.expand_excerpts( - excerpts.iter().cloned(), - line_count, - ExpandExcerptDirection::UpAndDown, - cx, - ); - - if line_count > 0 { - for id in excerpts { - let excerpt_ix = excerpt_ids.iter().position(|&i| i == id).unwrap(); - let (buffer, range) = &mut expected_excerpts[excerpt_ix]; - let snapshot = buffer.read(cx).snapshot(); - let mut point_range = range.to_point(&snapshot); - point_range.start = - Point::new(point_range.start.row.saturating_sub(line_count), 0); - point_range.end = snapshot.clip_point( - Point::new(point_range.end.row + line_count, 0), - Bias::Left, - ); - point_range.end.column = snapshot.line_len(point_range.end.row); - *range = snapshot.anchor_before(point_range.start) - ..snapshot.anchor_after(point_range.end); - } - } - }); - } - 20..=29 if !expected_excerpts.is_empty() => { - let mut ids_to_remove = vec![]; - for _ in 0..rng.gen_range(1..=3) { - if expected_excerpts.is_empty() { - break; - } - - let ix = rng.gen_range(0..expected_excerpts.len()); - ids_to_remove.push(excerpt_ids.remove(ix)); - let (buffer, range) = expected_excerpts.remove(ix); - let buffer = buffer.read(cx); - log::info!( - "Removing excerpt {}: {:?}", - ix, - buffer - .text_for_range(range.to_offset(buffer)) - .collect::(), - ); - } - let snapshot = multibuffer.read(cx).read(cx); - ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts(ids_to_remove, cx) - }); - } - 30..=39 if !expected_excerpts.is_empty() => { - let multibuffer = multibuffer.read(cx).read(cx); - let offset = - multibuffer.clip_offset(rng.gen_range(0..=multibuffer.len()), Bias::Left); - let bias = if rng.gen() { Bias::Left } else { Bias::Right }; - log::info!("Creating anchor at {} with bias {:?}", offset, bias); - anchors.push(multibuffer.anchor_at(offset, bias)); - anchors.sort_by(|a, b| a.cmp(b, &multibuffer)); - } - 40..=44 if !anchors.is_empty() => { - let multibuffer = multibuffer.read(cx).read(cx); - let prev_len = anchors.len(); - anchors = multibuffer - .refresh_anchors(&anchors) - .into_iter() - .map(|a| a.1) - .collect(); - - // Ensure the newly-refreshed anchors point to a valid excerpt and don't - // overshoot its boundaries. - assert_eq!(anchors.len(), prev_len); - for anchor in &anchors { - if anchor.excerpt_id == ExcerptId::min() - || anchor.excerpt_id == ExcerptId::max() - { - continue; - } - - let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap(); - assert_eq!(excerpt.id, anchor.excerpt_id); - assert!(excerpt.contains(anchor)); - } - } - _ => { - let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { - let base_text = util::RandomCharIter::new(&mut rng) - .take(25) - .collect::(); - - buffers.push(cx.new_model(|cx| Buffer::local(base_text, cx))); - buffers.last().unwrap() - } else { - buffers.choose(&mut rng).unwrap() - }; - - let buffer = buffer_handle.read(cx); - let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); - let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); - let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); - let prev_excerpt_ix = rng.gen_range(0..=expected_excerpts.len()); - let prev_excerpt_id = excerpt_ids - .get(prev_excerpt_ix) - .cloned() - .unwrap_or_else(ExcerptId::max); - let excerpt_ix = (prev_excerpt_ix + 1).min(expected_excerpts.len()); - - log::info!( - "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}", - excerpt_ix, - expected_excerpts.len(), - buffer_handle.read(cx).remote_id(), - buffer.text(), - start_ix..end_ix, - &buffer.text()[start_ix..end_ix] - ); - - let excerpt_id = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .insert_excerpts_after( - prev_excerpt_id, - buffer_handle.clone(), - [ExcerptRange { - context: start_ix..end_ix, - primary: None, - }], - cx, - ) - .pop() - .unwrap() - }); - - excerpt_ids.insert(excerpt_ix, excerpt_id); - expected_excerpts.insert(excerpt_ix, (buffer_handle.clone(), anchor_range)); - } - } - - if rng.gen_bool(0.3) { - multibuffer.update(cx, |multibuffer, cx| { - old_versions.push((multibuffer.snapshot(cx), multibuffer.subscribe())); - }) - } - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let mut excerpt_starts = Vec::new(); - let mut expected_text = String::new(); - let mut expected_buffer_rows = Vec::new(); - for (buffer, range) in &expected_excerpts { - let buffer = buffer.read(cx); - let buffer_range = range.to_offset(buffer); - - excerpt_starts.push(TextSummary::from(expected_text.as_str())); - expected_text.extend(buffer.text_for_range(buffer_range.clone())); - expected_text.push('\n'); - - let buffer_row_range = buffer.offset_to_point(buffer_range.start).row - ..=buffer.offset_to_point(buffer_range.end).row; - for row in buffer_row_range { - expected_buffer_rows.push(Some(row)); - } - } - // Remove final trailing newline. - if !expected_excerpts.is_empty() { - expected_text.pop(); - } - - // Always report one buffer row - if expected_buffer_rows.is_empty() { - expected_buffer_rows.push(Some(0)); - } - - assert_eq!(snapshot.text(), expected_text); - log::info!("MultiBuffer text: {:?}", expected_text); - - assert_eq!( - snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), - expected_buffer_rows, - ); - - for _ in 0..5 { - let start_row = rng.gen_range(0..=expected_buffer_rows.len()); - assert_eq!( - snapshot - .buffer_rows(MultiBufferRow(start_row as u32)) - .collect::>(), - &expected_buffer_rows[start_row..], - "buffer_rows({})", - start_row - ); - } - - assert_eq!( - snapshot.max_buffer_row().0, - expected_buffer_rows.into_iter().flatten().max().unwrap() - ); - - let mut excerpt_starts = excerpt_starts.into_iter(); - for (buffer, range) in &expected_excerpts { - let buffer = buffer.read(cx); - let buffer_id = buffer.remote_id(); - let buffer_range = range.to_offset(buffer); - let buffer_start_point = buffer.offset_to_point(buffer_range.start); - let buffer_start_point_utf16 = - buffer.text_summary_for_range::(0..buffer_range.start); - - let excerpt_start = excerpt_starts.next().unwrap(); - let mut offset = excerpt_start.len; - let mut buffer_offset = buffer_range.start; - let mut point = excerpt_start.lines; - let mut buffer_point = buffer_start_point; - let mut point_utf16 = excerpt_start.lines_utf16(); - let mut buffer_point_utf16 = buffer_start_point_utf16; - for ch in buffer - .snapshot() - .chunks(buffer_range.clone(), false) - .flat_map(|c| c.text.chars()) - { - for _ in 0..ch.len_utf8() { - let left_offset = snapshot.clip_offset(offset, Bias::Left); - let right_offset = snapshot.clip_offset(offset, Bias::Right); - let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left); - let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right); - assert_eq!( - left_offset, - excerpt_start.len + (buffer_left_offset - buffer_range.start), - "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}", - offset, - buffer_id, - buffer_offset, - ); - assert_eq!( - right_offset, - excerpt_start.len + (buffer_right_offset - buffer_range.start), - "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}", - offset, - buffer_id, - buffer_offset, - ); - - let left_point = snapshot.clip_point(point, Bias::Left); - let right_point = snapshot.clip_point(point, Bias::Right); - let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left); - let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right); - assert_eq!( - left_point, - excerpt_start.lines + (buffer_left_point - buffer_start_point), - "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}", - point, - buffer_id, - buffer_point, - ); - assert_eq!( - right_point, - excerpt_start.lines + (buffer_right_point - buffer_start_point), - "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}", - point, - buffer_id, - buffer_point, - ); - - assert_eq!( - snapshot.point_to_offset(left_point), - left_offset, - "point_to_offset({:?})", - left_point, - ); - assert_eq!( - snapshot.offset_to_point(left_offset), - left_point, - "offset_to_point({:?})", - left_offset, - ); - - offset += 1; - buffer_offset += 1; - if ch == '\n' { - point += Point::new(1, 0); - buffer_point += Point::new(1, 0); - } else { - point += Point::new(0, 1); - buffer_point += Point::new(0, 1); - } - } - - for _ in 0..ch.len_utf16() { - let left_point_utf16 = - snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Left); - let right_point_utf16 = - snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Right); - let buffer_left_point_utf16 = - buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Left); - let buffer_right_point_utf16 = - buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Right); - assert_eq!( - left_point_utf16, - excerpt_start.lines_utf16() - + (buffer_left_point_utf16 - buffer_start_point_utf16), - "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}", - point_utf16, - buffer_id, - buffer_point_utf16, - ); - assert_eq!( - right_point_utf16, - excerpt_start.lines_utf16() - + (buffer_right_point_utf16 - buffer_start_point_utf16), - "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}", - point_utf16, - buffer_id, - buffer_point_utf16, - ); - - if ch == '\n' { - point_utf16 += PointUtf16::new(1, 0); - buffer_point_utf16 += PointUtf16::new(1, 0); - } else { - point_utf16 += PointUtf16::new(0, 1); - buffer_point_utf16 += PointUtf16::new(0, 1); - } - } - } - } - - for (row, line) in expected_text.split('\n').enumerate() { - assert_eq!( - snapshot.line_len(MultiBufferRow(row as u32)), - line.len() as u32, - "line_len({}).", - row - ); - } - - let text_rope = Rope::from(expected_text.as_str()); - for _ in 0..10 { - let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); - let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); - - let text_for_range = snapshot - .text_for_range(start_ix..end_ix) - .collect::(); - assert_eq!( - text_for_range, - &expected_text[start_ix..end_ix], - "incorrect text for range {:?}", - start_ix..end_ix - ); - - let excerpted_buffer_ranges = multibuffer - .read(cx) - .range_to_buffer_ranges(start_ix..end_ix, cx); - let excerpted_buffers_text = excerpted_buffer_ranges - .iter() - .map(|(buffer, buffer_range, _)| { - buffer - .read(cx) - .text_for_range(buffer_range.clone()) - .collect::() - }) - .collect::>() - .join("\n"); - assert_eq!(excerpted_buffers_text, text_for_range); - if !expected_excerpts.is_empty() { - assert!(!excerpted_buffer_ranges.is_empty()); - } - - let expected_summary = TextSummary::from(&expected_text[start_ix..end_ix]); - assert_eq!( - snapshot.text_summary_for_range::(start_ix..end_ix), - expected_summary, - "incorrect summary for range {:?}", - start_ix..end_ix - ); - } - - // Anchor resolution - let summaries = snapshot.summaries_for_anchors::(&anchors); - assert_eq!(anchors.len(), summaries.len()); - for (anchor, resolved_offset) in anchors.iter().zip(summaries) { - assert!(resolved_offset <= snapshot.len()); - assert_eq!( - snapshot.summary_for_anchor::(anchor), - resolved_offset - ); - } - - for _ in 0..10 { - let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); - assert_eq!( - snapshot.reversed_chars_at(end_ix).collect::(), - expected_text[..end_ix].chars().rev().collect::(), - ); - } - - for _ in 0..10 { - let end_ix = rng.gen_range(0..=text_rope.len()); - let start_ix = rng.gen_range(0..=end_ix); - assert_eq!( - snapshot - .bytes_in_range(start_ix..end_ix) - .flatten() - .copied() - .collect::>(), - expected_text.as_bytes()[start_ix..end_ix].to_vec(), - "bytes_in_range({:?})", - start_ix..end_ix, - ); - } - } - - let snapshot = multibuffer.read(cx).snapshot(cx); - for (old_snapshot, subscription) in old_versions { - let edits = subscription.consume().into_inner(); - - log::info!( - "applying subscription edits to old text: {:?}: {:?}", - old_snapshot.text(), - edits, - ); - - let mut text = old_snapshot.text(); - for edit in edits { - let new_text: String = snapshot.text_for_range(edit.new.clone()).collect(); - text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text); - } - assert_eq!(text.to_string(), snapshot.text()); - } - } - - #[gpui::test] - fn test_history(cx: &mut AppContext) { - let test_settings = SettingsStore::test(cx); - cx.set_global(test_settings); - let group_interval: Duration = Duration::from_millis(1); - let buffer_1 = cx.new_model(|cx| { - let mut buf = Buffer::local("1234", cx); - buf.set_group_interval(group_interval); - buf - }); - let buffer_2 = cx.new_model(|cx| { - let mut buf = Buffer::local("5678", cx); - buf.set_group_interval(group_interval); - buf - }); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - multibuffer.update(cx, |this, _| { - this.history.group_interval = group_interval; - }); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - ); - }); - - let mut now = Instant::now(); - - multibuffer.update(cx, |multibuffer, cx| { - let transaction_1 = multibuffer.start_transaction_at(now, cx).unwrap(); - multibuffer.edit( - [ - (Point::new(0, 0)..Point::new(0, 0), "A"), - (Point::new(1, 0)..Point::new(1, 0), "A"), - ], - None, - cx, - ); - multibuffer.edit( - [ - (Point::new(0, 1)..Point::new(0, 1), "B"), - (Point::new(1, 1)..Point::new(1, 1), "B"), - ], - None, - cx, - ); - multibuffer.end_transaction_at(now, cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); - - // Verify edited ranges for transaction 1 - assert_eq!( - multibuffer.edited_ranges_for_transaction(transaction_1, cx), - &[ - Point::new(0, 0)..Point::new(0, 2), - Point::new(1, 0)..Point::new(1, 2) - ] - ); - - // Edit buffer 1 through the multibuffer - now += 2 * group_interval; - multibuffer.start_transaction_at(now, cx); - multibuffer.edit([(2..2, "C")], None, cx); - multibuffer.end_transaction_at(now, cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); - - // Edit buffer 1 independently - buffer_1.update(cx, |buffer_1, cx| { - buffer_1.start_transaction_at(now); - buffer_1.edit([(3..3, "D")], None, cx); - buffer_1.end_transaction_at(now, cx); - - now += 2 * group_interval; - buffer_1.start_transaction_at(now); - buffer_1.edit([(4..4, "E")], None, cx); - buffer_1.end_transaction_at(now, cx); - }); - assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678"); - - // An undo in the multibuffer undoes the multibuffer transaction - // and also any individual buffer edits that have occurred since - // that transaction. - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); - - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); - - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); - - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678"); - - // Undo buffer 2 independently. - buffer_2.update(cx, |buffer_2, cx| buffer_2.undo(cx)); - assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\n5678"); - - // An undo in the multibuffer undoes the components of the - // the last multibuffer transaction that are not already undone. - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\n5678"); - - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); - - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); - - buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx)); - assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678"); - - // Redo stack gets cleared after an edit. - now += 2 * group_interval; - multibuffer.start_transaction_at(now, cx); - multibuffer.edit([(0..0, "X")], None, cx); - multibuffer.end_transaction_at(now, cx); - assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678"); - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); - - // Transactions can be grouped manually. - multibuffer.redo(cx); - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); - multibuffer.group_until_transaction(transaction_1, cx); - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); - }); - } - - #[gpui::test] - fn test_excerpts_in_ranges_no_ranges(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - ); - }); - - let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - - let mut excerpts = snapshot.excerpts_in_ranges(iter::from_fn(|| None)); - - assert!(excerpts.next().is_none()); - } - - fn validate_excerpts( - actual: &[(ExcerptId, BufferId, Range)], - expected: &Vec<(ExcerptId, BufferId, Range)>, - ) { - assert_eq!(actual.len(), expected.len()); - - actual - .iter() - .zip(expected) - .map(|(actual, expected)| { - assert_eq!(actual.0, expected.0); - assert_eq!(actual.1, expected.1); - assert_eq!(actual.2.start, expected.2.start); - assert_eq!(actual.2.end, expected.2.end); - }) - .collect_vec(); - } - - fn map_range_from_excerpt( - snapshot: &MultiBufferSnapshot, - excerpt_id: ExcerptId, - excerpt_buffer: &BufferSnapshot, - range: Range, - ) -> Range { - snapshot - .anchor_in_excerpt(excerpt_id, excerpt_buffer.anchor_before(range.start)) - .unwrap() - ..snapshot - .anchor_in_excerpt(excerpt_id, excerpt_buffer.anchor_after(range.end)) - .unwrap() - } - - fn make_expected_excerpt_info( - snapshot: &MultiBufferSnapshot, - cx: &mut AppContext, - excerpt_id: ExcerptId, - buffer: &Model, - range: Range, - ) -> (ExcerptId, BufferId, Range) { - ( - excerpt_id, - buffer.read(cx).remote_id(), - map_range_from_excerpt(snapshot, excerpt_id, &buffer.read(cx).snapshot(), range), - ) - } - - #[gpui::test] - fn test_excerpts_in_ranges_range_inside_the_excerpt(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let mut expected_excerpt_id = ExcerptId(0); - - multibuffer.update(cx, |multibuffer, cx| { - expected_excerpt_id = multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - )[0]; - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - ); - }); - - let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - - let range = snapshot - .anchor_in_excerpt(expected_excerpt_id, buffer_1.read(cx).anchor_before(1)) - .unwrap() - ..snapshot - .anchor_in_excerpt( - expected_excerpt_id, - buffer_1.read(cx).anchor_after(buffer_len / 2), - ) - .unwrap(); - - let expected_excerpts = vec![make_expected_excerpt_info( - &snapshot, - cx, - expected_excerpt_id, - &buffer_1, - 1..(buffer_len / 2), - )]; - - let excerpts = snapshot - .excerpts_in_ranges(vec![range.clone()].into_iter()) - .map(|(excerpt_id, buffer, actual_range)| { - ( - excerpt_id, - buffer.remote_id(), - map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), - ) - }) - .collect_vec(); - - validate_excerpts(&excerpts, &expected_excerpts); - } - - #[gpui::test] - fn test_excerpts_in_ranges_range_crosses_excerpts_boundary(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let mut excerpt_1_id = ExcerptId(0); - let mut excerpt_2_id = ExcerptId(0); - - multibuffer.update(cx, |multibuffer, cx| { - excerpt_1_id = multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - )[0]; - excerpt_2_id = multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - )[0]; - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let expected_range = snapshot - .anchor_in_excerpt( - excerpt_1_id, - buffer_1.read(cx).anchor_before(buffer_len / 2), - ) - .unwrap() - ..snapshot - .anchor_in_excerpt(excerpt_2_id, buffer_2.read(cx).anchor_after(buffer_len / 2)) - .unwrap(); - - let expected_excerpts = vec![ - make_expected_excerpt_info( - &snapshot, - cx, - excerpt_1_id, - &buffer_1, - (buffer_len / 2)..buffer_len, - ), - make_expected_excerpt_info(&snapshot, cx, excerpt_2_id, &buffer_2, 0..buffer_len / 2), - ]; - - let excerpts = snapshot - .excerpts_in_ranges(vec![expected_range.clone()].into_iter()) - .map(|(excerpt_id, buffer, actual_range)| { - ( - excerpt_id, - buffer.remote_id(), - map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), - ) - }) - .collect_vec(); - - validate_excerpts(&excerpts, &expected_excerpts); - } - - #[gpui::test] - fn test_excerpts_in_ranges_range_encloses_excerpt(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'r'), cx)); - let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let mut excerpt_1_id = ExcerptId(0); - let mut excerpt_2_id = ExcerptId(0); - let mut excerpt_3_id = ExcerptId(0); - - multibuffer.update(cx, |multibuffer, cx| { - excerpt_1_id = multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - )[0]; - excerpt_2_id = multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - )[0]; - excerpt_3_id = multibuffer.push_excerpts( - buffer_3.clone(), - [ExcerptRange { - context: 0..buffer_3.read(cx).len(), - primary: None, - }], - cx, - )[0]; - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let expected_range = snapshot - .anchor_in_excerpt( - excerpt_1_id, - buffer_1.read(cx).anchor_before(buffer_len / 2), - ) - .unwrap() - ..snapshot - .anchor_in_excerpt(excerpt_3_id, buffer_3.read(cx).anchor_after(buffer_len / 2)) - .unwrap(); - - let expected_excerpts = vec![ - make_expected_excerpt_info( - &snapshot, - cx, - excerpt_1_id, - &buffer_1, - (buffer_len / 2)..buffer_len, - ), - make_expected_excerpt_info(&snapshot, cx, excerpt_2_id, &buffer_2, 0..buffer_len), - make_expected_excerpt_info(&snapshot, cx, excerpt_3_id, &buffer_3, 0..buffer_len / 2), - ]; - - let excerpts = snapshot - .excerpts_in_ranges(vec![expected_range.clone()].into_iter()) - .map(|(excerpt_id, buffer, actual_range)| { - ( - excerpt_id, - buffer.remote_id(), - map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), - ) - }) - .collect_vec(); - - validate_excerpts(&excerpts, &expected_excerpts); - } - - #[gpui::test] - fn test_excerpts_in_ranges_multiple_ranges(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let mut excerpt_1_id = ExcerptId(0); - let mut excerpt_2_id = ExcerptId(0); - - multibuffer.update(cx, |multibuffer, cx| { - excerpt_1_id = multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - )[0]; - excerpt_2_id = multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - )[0]; - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let ranges = vec![ - 1..(buffer_len / 4), - (buffer_len / 3)..(buffer_len / 2), - (buffer_len / 4 * 3)..(buffer_len), - ]; - - let expected_excerpts = ranges - .iter() - .map(|range| { - make_expected_excerpt_info(&snapshot, cx, excerpt_1_id, &buffer_1, range.clone()) - }) - .collect_vec(); - - let ranges = ranges.into_iter().map(|range| { - map_range_from_excerpt( - &snapshot, - excerpt_1_id, - &buffer_1.read(cx).snapshot(), - range, - ) - }); - - let excerpts = snapshot - .excerpts_in_ranges(ranges) - .map(|(excerpt_id, buffer, actual_range)| { - ( - excerpt_id, - buffer.remote_id(), - map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), - ) - }) - .collect_vec(); - - validate_excerpts(&excerpts, &expected_excerpts); - } - - #[gpui::test] - fn test_excerpts_in_ranges_range_ends_at_excerpt_end(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let mut excerpt_1_id = ExcerptId(0); - let mut excerpt_2_id = ExcerptId(0); - - multibuffer.update(cx, |multibuffer, cx| { - excerpt_1_id = multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - )[0]; - excerpt_2_id = multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - )[0]; - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let ranges = [0..buffer_len, (buffer_len / 3)..(buffer_len / 2)]; - - let expected_excerpts = vec![ - make_expected_excerpt_info(&snapshot, cx, excerpt_1_id, &buffer_1, ranges[0].clone()), - make_expected_excerpt_info(&snapshot, cx, excerpt_2_id, &buffer_2, ranges[1].clone()), - ]; - - let ranges = [ - map_range_from_excerpt( - &snapshot, - excerpt_1_id, - &buffer_1.read(cx).snapshot(), - ranges[0].clone(), - ), - map_range_from_excerpt( - &snapshot, - excerpt_2_id, - &buffer_2.read(cx).snapshot(), - ranges[1].clone(), - ), - ]; - - let excerpts = snapshot - .excerpts_in_ranges(ranges.into_iter()) - .map(|(excerpt_id, buffer, actual_range)| { - ( - excerpt_id, - buffer.remote_id(), - map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), - ) - }) - .collect_vec(); - - validate_excerpts(&excerpts, &expected_excerpts); - } - - #[gpui::test] - fn test_split_ranges(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - ); - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let buffer_1_len = buffer_1.read(cx).len(); - let buffer_2_len = buffer_2.read(cx).len(); - let buffer_1_midpoint = buffer_1_len / 2; - let buffer_2_start = buffer_1_len + '\n'.len_utf8(); - let buffer_2_midpoint = buffer_2_start + buffer_2_len / 2; - let total_len = buffer_2_start + buffer_2_len; - - let input_ranges = [ - 0..buffer_1_midpoint, - buffer_1_midpoint..buffer_2_midpoint, - buffer_2_midpoint..total_len, - ] - .map(|range| snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end)); - - let actual_ranges = snapshot - .split_ranges(input_ranges.into_iter()) - .map(|range| range.to_offset(&snapshot)) - .collect::>(); - - let expected_ranges = vec![ - 0..buffer_1_midpoint, - buffer_1_midpoint..buffer_1_len, - buffer_2_start..buffer_2_midpoint, - buffer_2_midpoint..total_len, - ]; - - assert_eq!(actual_ranges, expected_ranges); - } - - #[gpui::test] - fn test_split_ranges_single_range_spanning_three_excerpts(cx: &mut AppContext) { - let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'm'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( - buffer_1.clone(), - [ExcerptRange { - context: 0..buffer_1.read(cx).len(), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ExcerptRange { - context: 0..buffer_2.read(cx).len(), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - buffer_3.clone(), - [ExcerptRange { - context: 0..buffer_3.read(cx).len(), - primary: None, - }], - cx, - ); - }); - - let snapshot = multibuffer.read(cx).snapshot(cx); - - let buffer_1_len = buffer_1.read(cx).len(); - let buffer_2_len = buffer_2.read(cx).len(); - let buffer_3_len = buffer_3.read(cx).len(); - let buffer_2_start = buffer_1_len + '\n'.len_utf8(); - let buffer_3_start = buffer_2_start + buffer_2_len + '\n'.len_utf8(); - let buffer_1_midpoint = buffer_1_len / 2; - let buffer_3_midpoint = buffer_3_start + buffer_3_len / 2; - - let input_range = - snapshot.anchor_before(buffer_1_midpoint)..snapshot.anchor_after(buffer_3_midpoint); - - let actual_ranges = snapshot - .split_ranges(std::iter::once(input_range)) - .map(|range| range.to_offset(&snapshot)) - .collect::>(); - - let expected_ranges = vec![ - buffer_1_midpoint..buffer_1_len, - buffer_2_start..buffer_2_start + buffer_2_len, - buffer_3_start..buffer_3_midpoint, - ]; - - assert_eq!(actual_ranges, expected_ranges); - } -} diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs new file mode 100644 index 0000000000..059b279b78 --- /dev/null +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -0,0 +1,1989 @@ +use super::*; +use gpui::{AppContext, Context, TestAppContext}; +use language::{Buffer, Rope}; +use parking_lot::RwLock; +use rand::prelude::*; +use settings::SettingsStore; +use std::env; +use util::test::sample_text; + +#[ctor::ctor] +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} + +#[gpui::test] +fn test_singleton(cx: &mut AppContext) { + let buffer = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), buffer.read(cx).text()); + + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), + (0..buffer.read(cx).row_count()) + .map(Some) + .collect::>() + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(1..3, "XXX\n")], None, cx)); + let snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(snapshot.text(), buffer.read(cx).text()); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), + (0..buffer.read(cx).row_count()) + .map(Some) + .collect::>() + ); +} + +#[gpui::test] +fn test_remote(cx: &mut AppContext) { + let host_buffer = cx.new_model(|cx| Buffer::local("a", cx)); + let guest_buffer = cx.new_model(|cx| { + let state = host_buffer.read(cx).to_proto(cx); + let ops = cx + .background_executor() + .block(host_buffer.read(cx).serialize_ops(None, cx)); + let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| language::proto::deserialize_operation(op).unwrap()), + cx, + ); + buffer + }); + let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx)); + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), "a"); + + guest_buffer.update(cx, |buffer, cx| buffer.edit([(1..1, "b")], None, cx)); + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), "ab"); + + guest_buffer.update(cx, |buffer, cx| buffer.edit([(2..2, "c")], None, cx)); + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), "abc"); +} + +#[gpui::test] +fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + let events = Arc::new(RwLock::new(Vec::::new())); + multibuffer.update(cx, |_, cx| { + let events = events.clone(); + cx.subscribe(&multibuffer, move |_, _, event, _| { + if let Event::Edited { .. } = event { + events.write().push(event.clone()) + } + }) + .detach(); + }); + + let subscription = multibuffer.update(cx, |multibuffer, cx| { + let subscription = multibuffer.subscribe(); + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: Point::new(1, 2)..Point::new(2, 5), + primary: None, + }], + cx, + ); + assert_eq!( + subscription.consume().into_inner(), + [Edit { + old: 0..0, + new: 0..10 + }] + ); + + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: Point::new(3, 3)..Point::new(4, 4), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: Point::new(3, 1)..Point::new(3, 3), + primary: None, + }], + cx, + ); + assert_eq!( + subscription.consume().into_inner(), + [Edit { + old: 10..10, + new: 10..22 + }] + ); + + subscription + }); + + // Adding excerpts emits an edited event. + assert_eq!( + events.read().as_slice(), + &[ + Event::Edited { + singleton_buffer_edited: false, + edited_buffer: None, + }, + Event::Edited { + singleton_buffer_edited: false, + edited_buffer: None, + }, + Event::Edited { + singleton_buffer_edited: false, + edited_buffer: None, + } + ] + ); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!( + snapshot.text(), + concat!( + "bbbb\n", // Preserve newlines + "ccccc\n", // + "ddd\n", // + "eeee\n", // + "jj" // + ) + ); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), + [Some(1), Some(2), Some(3), Some(4), Some(3)] + ); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(2)).collect::>(), + [Some(3), Some(4), Some(3)] + ); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(4)).collect::>(), + [Some(3)] + ); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(5)).collect::>(), + [] + ); + + assert_eq!( + boundaries_in_range(Point::new(0, 0)..Point::new(4, 2), &snapshot), + &[ + (MultiBufferRow(0), "bbbb\nccccc".to_string(), true), + (MultiBufferRow(2), "ddd\neeee".to_string(), false), + (MultiBufferRow(4), "jj".to_string(), true), + ] + ); + assert_eq!( + boundaries_in_range(Point::new(0, 0)..Point::new(2, 0), &snapshot), + &[(MultiBufferRow(0), "bbbb\nccccc".to_string(), true)] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(1, 5), &snapshot), + &[] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(2, 0), &snapshot), + &[] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), + &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), + &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] + ); + assert_eq!( + boundaries_in_range(Point::new(2, 0)..Point::new(3, 0), &snapshot), + &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] + ); + assert_eq!( + boundaries_in_range(Point::new(4, 0)..Point::new(4, 2), &snapshot), + &[(MultiBufferRow(4), "jj".to_string(), true)] + ); + assert_eq!( + boundaries_in_range(Point::new(4, 2)..Point::new(4, 2), &snapshot), + &[] + ); + + buffer_1.update(cx, |buffer, cx| { + let text = "\n"; + buffer.edit( + [ + (Point::new(0, 0)..Point::new(0, 0), text), + (Point::new(2, 1)..Point::new(2, 3), text), + ], + None, + cx, + ); + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!( + snapshot.text(), + concat!( + "bbbb\n", // Preserve newlines + "c\n", // + "cc\n", // + "ddd\n", // + "eeee\n", // + "jj" // + ) + ); + + assert_eq!( + subscription.consume().into_inner(), + [Edit { + old: 6..8, + new: 6..7 + }] + ); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!( + snapshot.clip_point(Point::new(0, 5), Bias::Left), + Point::new(0, 4) + ); + assert_eq!( + snapshot.clip_point(Point::new(0, 5), Bias::Right), + Point::new(0, 4) + ); + assert_eq!( + snapshot.clip_point(Point::new(5, 1), Bias::Right), + Point::new(5, 1) + ); + assert_eq!( + snapshot.clip_point(Point::new(5, 2), Bias::Right), + Point::new(5, 2) + ); + assert_eq!( + snapshot.clip_point(Point::new(5, 3), Bias::Right), + Point::new(5, 2) + ); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| { + let (buffer_2_excerpt_id, _) = multibuffer.excerpts_for_buffer(&buffer_2, cx)[0].clone(); + multibuffer.remove_excerpts([buffer_2_excerpt_id], cx); + multibuffer.snapshot(cx) + }); + + assert_eq!( + snapshot.text(), + concat!( + "bbbb\n", // Preserve newlines + "c\n", // + "cc\n", // + "ddd\n", // + "eeee", // + ) + ); + + fn boundaries_in_range( + range: Range, + snapshot: &MultiBufferSnapshot, + ) -> Vec<(MultiBufferRow, String, bool)> { + snapshot + .excerpt_boundaries_in_range(range) + .filter_map(|boundary| { + let starts_new_buffer = boundary.starts_new_buffer(); + boundary.next.map(|next| { + ( + boundary.row, + next.buffer + .text_for_range(next.range.context) + .collect::(), + starts_new_buffer, + ) + }) + }) + .collect::>() + } +} + +#[gpui::test] +fn test_excerpt_events(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'm'), cx)); + + let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let follower_edit_event_count = Arc::new(RwLock::new(0)); + + follower_multibuffer.update(cx, |_, cx| { + let follower_edit_event_count = follower_edit_event_count.clone(); + cx.subscribe( + &leader_multibuffer, + move |follower, _, event, cx| match event.clone() { + Event::ExcerptsAdded { + buffer, + predecessor, + excerpts, + } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx), + Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx), + Event::Edited { .. } => { + *follower_edit_event_count.write() += 1; + } + _ => {} + }, + ) + .detach(); + }); + + leader_multibuffer.update(cx, |leader, cx| { + leader.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: 0..8, + primary: None, + }, + ExcerptRange { + context: 12..16, + primary: None, + }, + ], + cx, + ); + leader.insert_excerpts_after( + leader.excerpt_ids()[0], + buffer_2.clone(), + [ + ExcerptRange { + context: 0..5, + primary: None, + }, + ExcerptRange { + context: 10..15, + primary: None, + }, + ], + cx, + ) + }); + assert_eq!( + leader_multibuffer.read(cx).snapshot(cx).text(), + follower_multibuffer.read(cx).snapshot(cx).text(), + ); + assert_eq!(*follower_edit_event_count.read(), 2); + + leader_multibuffer.update(cx, |leader, cx| { + let excerpt_ids = leader.excerpt_ids(); + leader.remove_excerpts([excerpt_ids[1], excerpt_ids[3]], cx); + }); + assert_eq!( + leader_multibuffer.read(cx).snapshot(cx).text(), + follower_multibuffer.read(cx).snapshot(cx).text(), + ); + assert_eq!(*follower_edit_event_count.read(), 3); + + // Removing an empty set of excerpts is a noop. + leader_multibuffer.update(cx, |leader, cx| { + leader.remove_excerpts([], cx); + }); + assert_eq!( + leader_multibuffer.read(cx).snapshot(cx).text(), + follower_multibuffer.read(cx).snapshot(cx).text(), + ); + assert_eq!(*follower_edit_event_count.read(), 3); + + // Adding an empty set of excerpts is a noop. + leader_multibuffer.update(cx, |leader, cx| { + leader.push_excerpts::(buffer_2.clone(), [], cx); + }); + assert_eq!( + leader_multibuffer.read(cx).snapshot(cx).text(), + follower_multibuffer.read(cx).snapshot(cx).text(), + ); + assert_eq!(*follower_edit_event_count.read(), 3); + + leader_multibuffer.update(cx, |leader, cx| { + leader.clear(cx); + }); + assert_eq!( + leader_multibuffer.read(cx).snapshot(cx).text(), + follower_multibuffer.read(cx).snapshot(cx).text(), + ); + assert_eq!(*follower_edit_event_count.read(), 4); +} + +#[gpui::test] +fn test_expand_excerpts(cx: &mut AppContext) { + let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts_with_context_lines( + buffer.clone(), + vec![ + // Note that in this test, this first excerpt + // does not contain a new line + Point::new(3, 2)..Point::new(3, 3), + Point::new(7, 1)..Point::new(7, 3), + Point::new(15, 0)..Point::new(15, 0), + ], + 1, + cx, + ) + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!( + snapshot.text(), + concat!( + "ccc\n", // + "ddd\n", // + "eee", // + "\n", // End of excerpt + "ggg\n", // + "hhh\n", // + "iii", // + "\n", // End of excerpt + "ooo\n", // + "ppp\n", // + "qqq", // End of excerpt + ) + ); + drop(snapshot); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_excerpts( + multibuffer.excerpt_ids(), + 1, + ExpandExcerptDirection::UpAndDown, + cx, + ) + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + // Expanding context lines causes the line containing 'fff' to appear in two different excerpts. + // We don't attempt to merge them, because removing the excerpt could create inconsistency with other layers + // that are tracking excerpt ids. + assert_eq!( + snapshot.text(), + concat!( + "bbb\n", // + "ccc\n", // + "ddd\n", // + "eee\n", // + "fff\n", // End of excerpt + "fff\n", // + "ggg\n", // + "hhh\n", // + "iii\n", // + "jjj\n", // End of excerpt + "nnn\n", // + "ooo\n", // + "ppp\n", // + "qqq\n", // + "rrr", // End of excerpt + ) + ); +} + +#[gpui::test] +fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { + let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts_with_context_lines( + buffer.clone(), + vec![ + // Note that in this test, this first excerpt + // does contain a new line + Point::new(3, 2)..Point::new(4, 2), + Point::new(7, 1)..Point::new(7, 3), + Point::new(15, 0)..Point::new(15, 0), + ], + 2, + cx, + ) + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!( + snapshot.text(), + concat!( + "bbb\n", // Preserve newlines + "ccc\n", // + "ddd\n", // + "eee\n", // + "fff\n", // + "ggg\n", // + "hhh\n", // + "iii\n", // + "jjj\n", // + "nnn\n", // + "ooo\n", // + "ppp\n", // + "qqq\n", // + "rrr", // + ) + ); + + assert_eq!( + anchor_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(2, 2)..Point::new(3, 2), + Point::new(6, 1)..Point::new(6, 3), + Point::new(11, 0)..Point::new(11, 0) + ] + ); +} + +#[gpui::test(iterations = 100)] +async fn test_push_multiple_excerpts_with_context_lines(cx: &mut TestAppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(15, 4, 'a'), cx)); + let snapshot_1 = buffer_1.update(cx, |buffer, _| buffer.snapshot()); + let snapshot_2 = buffer_2.update(cx, |buffer, _| buffer.snapshot()); + let ranges_1 = vec![ + snapshot_1.anchor_before(Point::new(3, 2))..snapshot_1.anchor_before(Point::new(4, 2)), + snapshot_1.anchor_before(Point::new(7, 1))..snapshot_1.anchor_before(Point::new(7, 3)), + snapshot_1.anchor_before(Point::new(15, 0))..snapshot_1.anchor_before(Point::new(15, 0)), + ]; + let ranges_2 = vec![ + snapshot_2.anchor_before(Point::new(2, 1))..snapshot_2.anchor_before(Point::new(3, 1)), + snapshot_2.anchor_before(Point::new(10, 0))..snapshot_2.anchor_before(Point::new(10, 2)), + ]; + + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let anchor_ranges = multibuffer + .update(cx, |multibuffer, cx| { + multibuffer.push_multiple_excerpts_with_context_lines( + vec![(buffer_1.clone(), ranges_1), (buffer_2.clone(), ranges_2)], + 2, + cx, + ) + }) + .await; + + let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + assert_eq!( + snapshot.text(), + concat!( + "bbb\n", // buffer_1 + "ccc\n", // + "ddd\n", // <-- excerpt 1 + "eee\n", // <-- excerpt 1 + "fff\n", // + "ggg\n", // + "hhh\n", // <-- excerpt 2 + "iii\n", // + "jjj\n", // + // + "nnn\n", // + "ooo\n", // + "ppp\n", // <-- excerpt 3 + "qqq\n", // + "rrr\n", // + // + "aaaa\n", // buffer 2 + "bbbb\n", // + "cccc\n", // <-- excerpt 4 + "dddd\n", // <-- excerpt 4 + "eeee\n", // + "ffff\n", // + // + "iiii\n", // + "jjjj\n", // + "kkkk\n", // <-- excerpt 5 + "llll\n", // + "mmmm", // + ) + ); + + assert_eq!( + anchor_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(2, 2)..Point::new(3, 2), + Point::new(6, 1)..Point::new(6, 3), + Point::new(11, 0)..Point::new(11, 0), + Point::new(16, 1)..Point::new(17, 1), + Point::new(22, 0)..Point::new(22, 2) + ] + ); +} + +#[gpui::test] +fn test_empty_multibuffer(cx: &mut AppContext) { + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), ""); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), + &[Some(0)] + ); + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(1)).collect::>(), + &[] + ); +} + +#[gpui::test] +fn test_singleton_multibuffer_anchors(cx: &mut AppContext) { + let buffer = cx.new_model(|cx| Buffer::local("abcd", cx)); + let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + let old_snapshot = multibuffer.read(cx).snapshot(cx); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "X")], None, cx); + buffer.edit([(5..5, "Y")], None, cx); + }); + let new_snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(old_snapshot.text(), "abcd"); + assert_eq!(new_snapshot.text(), "XabcdY"); + + assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(4).to_offset(&new_snapshot), 5); + assert_eq!(old_snapshot.anchor_after(4).to_offset(&new_snapshot), 6); +} + +#[gpui::test] +fn test_multibuffer_anchors(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local("efghi", cx)); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..4, + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..5, + primary: None, + }], + cx, + ); + multibuffer + }); + let old_snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 0); + assert_eq!(Anchor::min().to_offset(&old_snapshot), 0); + assert_eq!(Anchor::min().to_offset(&old_snapshot), 0); + assert_eq!(Anchor::max().to_offset(&old_snapshot), 10); + assert_eq!(Anchor::max().to_offset(&old_snapshot), 10); + + buffer_1.update(cx, |buffer, cx| { + buffer.edit([(0..0, "W")], None, cx); + buffer.edit([(5..5, "X")], None, cx); + }); + buffer_2.update(cx, |buffer, cx| { + buffer.edit([(0..0, "Y")], None, cx); + buffer.edit([(6..6, "Z")], None, cx); + }); + let new_snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(old_snapshot.text(), "abcd\nefghi"); + assert_eq!(new_snapshot.text(), "WabcdX\nYefghiZ"); + + assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 2); + assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 2); + assert_eq!(old_snapshot.anchor_before(2).to_offset(&new_snapshot), 3); + assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3); + assert_eq!(old_snapshot.anchor_before(5).to_offset(&new_snapshot), 7); + assert_eq!(old_snapshot.anchor_after(5).to_offset(&new_snapshot), 8); + assert_eq!(old_snapshot.anchor_before(10).to_offset(&new_snapshot), 13); + assert_eq!(old_snapshot.anchor_after(10).to_offset(&new_snapshot), 14); +} + +#[gpui::test] +fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + // Create an insertion id in buffer 1 that doesn't exist in buffer 2. + // Add an excerpt from buffer 1 that spans this new insertion. + buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx)); + let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { + multibuffer + .push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..7, + primary: None, + }], + cx, + ) + .pop() + .unwrap() + }); + + let snapshot_1 = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot_1.text(), "abcd123"); + + // Replace the buffer 1 excerpt with new excerpts from buffer 2. + let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([excerpt_id_1], cx); + let mut ids = multibuffer + .push_excerpts( + buffer_2.clone(), + [ + ExcerptRange { + context: 0..4, + primary: None, + }, + ExcerptRange { + context: 6..10, + primary: None, + }, + ExcerptRange { + context: 12..16, + primary: None, + }, + ], + cx, + ) + .into_iter(); + (ids.next().unwrap(), ids.next().unwrap()) + }); + let snapshot_2 = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); + + // The old excerpt id doesn't get reused. + assert_ne!(excerpt_id_2, excerpt_id_1); + + // Resolve some anchors from the previous snapshot in the new snapshot. + // The current excerpts are from a different buffer, so we don't attempt to + // resolve the old text anchor in the new buffer. + assert_eq!( + snapshot_2.summary_for_anchor::(&snapshot_1.anchor_before(2)), + 0 + ); + assert_eq!( + snapshot_2.summaries_for_anchors::(&[ + snapshot_1.anchor_before(2), + snapshot_1.anchor_after(3) + ]), + vec![0, 0] + ); + + // Refresh anchors from the old snapshot. The return value indicates that both + // anchors lost their original excerpt. + let refresh = + snapshot_2.refresh_anchors(&[snapshot_1.anchor_before(2), snapshot_1.anchor_after(3)]); + assert_eq!( + refresh, + &[ + (0, snapshot_2.anchor_before(0), false), + (1, snapshot_2.anchor_after(0), false), + ] + ); + + // Replace the middle excerpt with a smaller excerpt in buffer 2, + // that intersects the old excerpt. + let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([excerpt_id_3], cx); + multibuffer + .insert_excerpts_after( + excerpt_id_2, + buffer_2.clone(), + [ExcerptRange { + context: 5..8, + primary: None, + }], + cx, + ) + .pop() + .unwrap() + }); + + let snapshot_3 = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP"); + assert_ne!(excerpt_id_5, excerpt_id_3); + + // Resolve some anchors from the previous snapshot in the new snapshot. + // The third anchor can't be resolved, since its excerpt has been removed, + // so it resolves to the same position as its predecessor. + let anchors = [ + snapshot_2.anchor_before(0), + snapshot_2.anchor_after(2), + snapshot_2.anchor_after(6), + snapshot_2.anchor_after(14), + ]; + assert_eq!( + snapshot_3.summaries_for_anchors::(&anchors), + &[0, 2, 9, 13] + ); + + let new_anchors = snapshot_3.refresh_anchors(&anchors); + assert_eq!( + new_anchors.iter().map(|a| (a.0, a.2)).collect::>(), + &[(0, true), (1, true), (2, true), (3, true)] + ); + assert_eq!( + snapshot_3.summaries_for_anchors::(new_anchors.iter().map(|a| &a.1)), + &[0, 2, 7, 13] + ); +} + +#[gpui::test(iterations = 100)] +fn test_random_multibuffer(cx: &mut AppContext, mut rng: StdRng) { + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let mut buffers: Vec> = Vec::new(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let mut excerpt_ids = Vec::::new(); + let mut expected_excerpts = Vec::<(Model, Range)>::new(); + let mut anchors = Vec::new(); + let mut old_versions = Vec::new(); + + for _ in 0..operations { + match rng.gen_range(0..100) { + 0..=14 if !buffers.is_empty() => { + let buffer = buffers.choose(&mut rng).unwrap(); + buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); + } + 15..=19 if !expected_excerpts.is_empty() => { + multibuffer.update(cx, |multibuffer, cx| { + let ids = multibuffer.excerpt_ids(); + let mut excerpts = HashSet::default(); + for _ in 0..rng.gen_range(0..ids.len()) { + excerpts.extend(ids.choose(&mut rng).copied()); + } + + let line_count = rng.gen_range(0..5); + + let excerpt_ixs = excerpts + .iter() + .map(|id| excerpt_ids.iter().position(|i| i == id).unwrap()) + .collect::>(); + log::info!("Expanding excerpts {excerpt_ixs:?} by {line_count} lines"); + multibuffer.expand_excerpts( + excerpts.iter().cloned(), + line_count, + ExpandExcerptDirection::UpAndDown, + cx, + ); + + if line_count > 0 { + for id in excerpts { + let excerpt_ix = excerpt_ids.iter().position(|&i| i == id).unwrap(); + let (buffer, range) = &mut expected_excerpts[excerpt_ix]; + let snapshot = buffer.read(cx).snapshot(); + let mut point_range = range.to_point(&snapshot); + point_range.start = + Point::new(point_range.start.row.saturating_sub(line_count), 0); + point_range.end = snapshot.clip_point( + Point::new(point_range.end.row + line_count, 0), + Bias::Left, + ); + point_range.end.column = snapshot.line_len(point_range.end.row); + *range = snapshot.anchor_before(point_range.start) + ..snapshot.anchor_after(point_range.end); + } + } + }); + } + 20..=29 if !expected_excerpts.is_empty() => { + let mut ids_to_remove = vec![]; + for _ in 0..rng.gen_range(1..=3) { + if expected_excerpts.is_empty() { + break; + } + + let ix = rng.gen_range(0..expected_excerpts.len()); + ids_to_remove.push(excerpt_ids.remove(ix)); + let (buffer, range) = expected_excerpts.remove(ix); + let buffer = buffer.read(cx); + log::info!( + "Removing excerpt {}: {:?}", + ix, + buffer + .text_for_range(range.to_offset(buffer)) + .collect::(), + ); + } + let snapshot = multibuffer.read(cx).read(cx); + ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); + drop(snapshot); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts(ids_to_remove, cx) + }); + } + 30..=39 if !expected_excerpts.is_empty() => { + let multibuffer = multibuffer.read(cx).read(cx); + let offset = + multibuffer.clip_offset(rng.gen_range(0..=multibuffer.len()), Bias::Left); + let bias = if rng.gen() { Bias::Left } else { Bias::Right }; + log::info!("Creating anchor at {} with bias {:?}", offset, bias); + anchors.push(multibuffer.anchor_at(offset, bias)); + anchors.sort_by(|a, b| a.cmp(b, &multibuffer)); + } + 40..=44 if !anchors.is_empty() => { + let multibuffer = multibuffer.read(cx).read(cx); + let prev_len = anchors.len(); + anchors = multibuffer + .refresh_anchors(&anchors) + .into_iter() + .map(|a| a.1) + .collect(); + + // Ensure the newly-refreshed anchors point to a valid excerpt and don't + // overshoot its boundaries. + assert_eq!(anchors.len(), prev_len); + for anchor in &anchors { + if anchor.excerpt_id == ExcerptId::min() + || anchor.excerpt_id == ExcerptId::max() + { + continue; + } + + let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap(); + assert_eq!(excerpt.id, anchor.excerpt_id); + assert!(excerpt.contains(anchor)); + } + } + _ => { + let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { + let base_text = util::RandomCharIter::new(&mut rng) + .take(25) + .collect::(); + + buffers.push(cx.new_model(|cx| Buffer::local(base_text, cx))); + buffers.last().unwrap() + } else { + buffers.choose(&mut rng).unwrap() + }; + + let buffer = buffer_handle.read(cx); + let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); + let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); + let prev_excerpt_ix = rng.gen_range(0..=expected_excerpts.len()); + let prev_excerpt_id = excerpt_ids + .get(prev_excerpt_ix) + .cloned() + .unwrap_or_else(ExcerptId::max); + let excerpt_ix = (prev_excerpt_ix + 1).min(expected_excerpts.len()); + + log::info!( + "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}", + excerpt_ix, + expected_excerpts.len(), + buffer_handle.read(cx).remote_id(), + buffer.text(), + start_ix..end_ix, + &buffer.text()[start_ix..end_ix] + ); + + let excerpt_id = multibuffer.update(cx, |multibuffer, cx| { + multibuffer + .insert_excerpts_after( + prev_excerpt_id, + buffer_handle.clone(), + [ExcerptRange { + context: start_ix..end_ix, + primary: None, + }], + cx, + ) + .pop() + .unwrap() + }); + + excerpt_ids.insert(excerpt_ix, excerpt_id); + expected_excerpts.insert(excerpt_ix, (buffer_handle.clone(), anchor_range)); + } + } + + if rng.gen_bool(0.3) { + multibuffer.update(cx, |multibuffer, cx| { + old_versions.push((multibuffer.snapshot(cx), multibuffer.subscribe())); + }) + } + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let mut excerpt_starts = Vec::new(); + let mut expected_text = String::new(); + let mut expected_buffer_rows = Vec::new(); + for (buffer, range) in &expected_excerpts { + let buffer = buffer.read(cx); + let buffer_range = range.to_offset(buffer); + + excerpt_starts.push(TextSummary::from(expected_text.as_str())); + expected_text.extend(buffer.text_for_range(buffer_range.clone())); + expected_text.push('\n'); + + let buffer_row_range = buffer.offset_to_point(buffer_range.start).row + ..=buffer.offset_to_point(buffer_range.end).row; + for row in buffer_row_range { + expected_buffer_rows.push(Some(row)); + } + } + // Remove final trailing newline. + if !expected_excerpts.is_empty() { + expected_text.pop(); + } + + // Always report one buffer row + if expected_buffer_rows.is_empty() { + expected_buffer_rows.push(Some(0)); + } + + assert_eq!(snapshot.text(), expected_text); + log::info!("MultiBuffer text: {:?}", expected_text); + + assert_eq!( + snapshot.buffer_rows(MultiBufferRow(0)).collect::>(), + expected_buffer_rows, + ); + + for _ in 0..5 { + let start_row = rng.gen_range(0..=expected_buffer_rows.len()); + assert_eq!( + snapshot + .buffer_rows(MultiBufferRow(start_row as u32)) + .collect::>(), + &expected_buffer_rows[start_row..], + "buffer_rows({})", + start_row + ); + } + + assert_eq!( + snapshot.widest_line_number(), + expected_buffer_rows.into_iter().flatten().max().unwrap() + 1 + ); + + let mut excerpt_starts = excerpt_starts.into_iter(); + for (buffer, range) in &expected_excerpts { + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + let buffer_range = range.to_offset(buffer); + let buffer_start_point = buffer.offset_to_point(buffer_range.start); + let buffer_start_point_utf16 = + buffer.text_summary_for_range::(0..buffer_range.start); + + let excerpt_start = excerpt_starts.next().unwrap(); + let mut offset = excerpt_start.len; + let mut buffer_offset = buffer_range.start; + let mut point = excerpt_start.lines; + let mut buffer_point = buffer_start_point; + let mut point_utf16 = excerpt_start.lines_utf16(); + let mut buffer_point_utf16 = buffer_start_point_utf16; + for ch in buffer + .snapshot() + .chunks(buffer_range.clone(), false) + .flat_map(|c| c.text.chars()) + { + for _ in 0..ch.len_utf8() { + let left_offset = snapshot.clip_offset(offset, Bias::Left); + let right_offset = snapshot.clip_offset(offset, Bias::Right); + let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left); + let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right); + assert_eq!( + left_offset, + excerpt_start.len + (buffer_left_offset - buffer_range.start), + "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}", + offset, + buffer_id, + buffer_offset, + ); + assert_eq!( + right_offset, + excerpt_start.len + (buffer_right_offset - buffer_range.start), + "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}", + offset, + buffer_id, + buffer_offset, + ); + + let left_point = snapshot.clip_point(point, Bias::Left); + let right_point = snapshot.clip_point(point, Bias::Right); + let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left); + let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right); + assert_eq!( + left_point, + excerpt_start.lines + (buffer_left_point - buffer_start_point), + "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}", + point, + buffer_id, + buffer_point, + ); + assert_eq!( + right_point, + excerpt_start.lines + (buffer_right_point - buffer_start_point), + "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}", + point, + buffer_id, + buffer_point, + ); + + assert_eq!( + snapshot.point_to_offset(left_point), + left_offset, + "point_to_offset({:?})", + left_point, + ); + assert_eq!( + snapshot.offset_to_point(left_offset), + left_point, + "offset_to_point({:?})", + left_offset, + ); + + offset += 1; + buffer_offset += 1; + if ch == '\n' { + point += Point::new(1, 0); + buffer_point += Point::new(1, 0); + } else { + point += Point::new(0, 1); + buffer_point += Point::new(0, 1); + } + } + + for _ in 0..ch.len_utf16() { + let left_point_utf16 = + snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Left); + let right_point_utf16 = + snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Right); + let buffer_left_point_utf16 = + buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Left); + let buffer_right_point_utf16 = + buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Right); + assert_eq!( + left_point_utf16, + excerpt_start.lines_utf16() + + (buffer_left_point_utf16 - buffer_start_point_utf16), + "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}", + point_utf16, + buffer_id, + buffer_point_utf16, + ); + assert_eq!( + right_point_utf16, + excerpt_start.lines_utf16() + + (buffer_right_point_utf16 - buffer_start_point_utf16), + "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}", + point_utf16, + buffer_id, + buffer_point_utf16, + ); + + if ch == '\n' { + point_utf16 += PointUtf16::new(1, 0); + buffer_point_utf16 += PointUtf16::new(1, 0); + } else { + point_utf16 += PointUtf16::new(0, 1); + buffer_point_utf16 += PointUtf16::new(0, 1); + } + } + } + } + + for (row, line) in expected_text.split('\n').enumerate() { + assert_eq!( + snapshot.line_len(MultiBufferRow(row as u32)), + line.len() as u32, + "line_len({}).", + row + ); + } + + let text_rope = Rope::from(expected_text.as_str()); + for _ in 0..10 { + let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); + let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + + let text_for_range = snapshot + .text_for_range(start_ix..end_ix) + .collect::(); + assert_eq!( + text_for_range, + &expected_text[start_ix..end_ix], + "incorrect text for range {:?}", + start_ix..end_ix + ); + + let snapshot = multibuffer.read(cx).snapshot(cx); + let excerpted_buffer_ranges = snapshot.range_to_buffer_ranges(start_ix..end_ix); + let excerpted_buffers_text = excerpted_buffer_ranges + .iter() + .map(|(excerpt, buffer_range)| { + excerpt + .buffer() + .text_for_range(buffer_range.clone()) + .collect::() + }) + .collect::>() + .join("\n"); + assert_eq!(excerpted_buffers_text, text_for_range); + if !expected_excerpts.is_empty() { + assert!(!excerpted_buffer_ranges.is_empty()); + } + + let expected_summary = TextSummary::from(&expected_text[start_ix..end_ix]); + assert_eq!( + snapshot.text_summary_for_range::(start_ix..end_ix), + expected_summary, + "incorrect summary for range {:?}", + start_ix..end_ix + ); + } + + // Anchor resolution + let summaries = snapshot.summaries_for_anchors::(&anchors); + assert_eq!(anchors.len(), summaries.len()); + for (anchor, resolved_offset) in anchors.iter().zip(summaries) { + assert!(resolved_offset <= snapshot.len()); + assert_eq!( + snapshot.summary_for_anchor::(anchor), + resolved_offset + ); + } + + for _ in 0..10 { + let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); + assert_eq!( + snapshot.reversed_chars_at(end_ix).collect::(), + expected_text[..end_ix].chars().rev().collect::(), + ); + } + + for _ in 0..10 { + let end_ix = rng.gen_range(0..=text_rope.len()); + let start_ix = rng.gen_range(0..=end_ix); + assert_eq!( + snapshot + .bytes_in_range(start_ix..end_ix) + .flatten() + .copied() + .collect::>(), + expected_text.as_bytes()[start_ix..end_ix].to_vec(), + "bytes_in_range({:?})", + start_ix..end_ix, + ); + } + } + + let snapshot = multibuffer.read(cx).snapshot(cx); + for (old_snapshot, subscription) in old_versions { + let edits = subscription.consume().into_inner(); + + log::info!( + "applying subscription edits to old text: {:?}: {:?}", + old_snapshot.text(), + edits, + ); + + let mut text = old_snapshot.text(); + for edit in edits { + let new_text: String = snapshot.text_for_range(edit.new.clone()).collect(); + text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text); + } + assert_eq!(text.to_string(), snapshot.text()); + } +} + +#[gpui::test] +fn test_history(cx: &mut AppContext) { + let test_settings = SettingsStore::test(cx); + cx.set_global(test_settings); + let group_interval: Duration = Duration::from_millis(1); + let buffer_1 = cx.new_model(|cx| { + let mut buf = Buffer::local("1234", cx); + buf.set_group_interval(group_interval); + buf + }); + let buffer_2 = cx.new_model(|cx| { + let mut buf = Buffer::local("5678", cx); + buf.set_group_interval(group_interval); + buf + }); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |this, _| { + this.history.group_interval = group_interval; + }); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + ); + }); + + let mut now = Instant::now(); + + multibuffer.update(cx, |multibuffer, cx| { + let transaction_1 = multibuffer.start_transaction_at(now, cx).unwrap(); + multibuffer.edit( + [ + (Point::new(0, 0)..Point::new(0, 0), "A"), + (Point::new(1, 0)..Point::new(1, 0), "A"), + ], + None, + cx, + ); + multibuffer.edit( + [ + (Point::new(0, 1)..Point::new(0, 1), "B"), + (Point::new(1, 1)..Point::new(1, 1), "B"), + ], + None, + cx, + ); + multibuffer.end_transaction_at(now, cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); + + // Verify edited ranges for transaction 1 + assert_eq!( + multibuffer.edited_ranges_for_transaction(transaction_1, cx), + &[ + Point::new(0, 0)..Point::new(0, 2), + Point::new(1, 0)..Point::new(1, 2) + ] + ); + + // Edit buffer 1 through the multibuffer + now += 2 * group_interval; + multibuffer.start_transaction_at(now, cx); + multibuffer.edit([(2..2, "C")], None, cx); + multibuffer.end_transaction_at(now, cx); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); + + // Edit buffer 1 independently + buffer_1.update(cx, |buffer_1, cx| { + buffer_1.start_transaction_at(now); + buffer_1.edit([(3..3, "D")], None, cx); + buffer_1.end_transaction_at(now, cx); + + now += 2 * group_interval; + buffer_1.start_transaction_at(now); + buffer_1.edit([(4..4, "E")], None, cx); + buffer_1.end_transaction_at(now, cx); + }); + assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678"); + + // An undo in the multibuffer undoes the multibuffer transaction + // and also any individual buffer edits that have occurred since + // that transaction. + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678"); + + // Undo buffer 2 independently. + buffer_2.update(cx, |buffer_2, cx| buffer_2.undo(cx)); + assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\n5678"); + + // An undo in the multibuffer undoes the components of the + // the last multibuffer transaction that are not already undone. + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\n5678"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); + + buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx)); + assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678"); + + // Redo stack gets cleared after an edit. + now += 2 * group_interval; + multibuffer.start_transaction_at(now, cx); + multibuffer.edit([(0..0, "X")], None, cx); + multibuffer.end_transaction_at(now, cx); + assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678"); + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); + + // Transactions can be grouped manually. + multibuffer.redo(cx); + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); + multibuffer.group_until_transaction(transaction_1, cx); + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678"); + }); +} + +#[gpui::test] +fn test_excerpts_in_ranges_no_ranges(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + ); + }); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let mut excerpts = snapshot.excerpts_in_ranges(iter::from_fn(|| None)); + + assert!(excerpts.next().is_none()); +} + +fn validate_excerpts( + actual: &[(ExcerptId, BufferId, Range)], + expected: &Vec<(ExcerptId, BufferId, Range)>, +) { + assert_eq!(actual.len(), expected.len()); + + actual + .iter() + .zip(expected) + .map(|(actual, expected)| { + assert_eq!(actual.0, expected.0); + assert_eq!(actual.1, expected.1); + assert_eq!(actual.2.start, expected.2.start); + assert_eq!(actual.2.end, expected.2.end); + }) + .collect_vec(); +} + +fn map_range_from_excerpt( + snapshot: &MultiBufferSnapshot, + excerpt_id: ExcerptId, + excerpt_buffer: &BufferSnapshot, + range: Range, +) -> Range { + snapshot + .anchor_in_excerpt(excerpt_id, excerpt_buffer.anchor_before(range.start)) + .unwrap() + ..snapshot + .anchor_in_excerpt(excerpt_id, excerpt_buffer.anchor_after(range.end)) + .unwrap() +} + +fn make_expected_excerpt_info( + snapshot: &MultiBufferSnapshot, + cx: &mut AppContext, + excerpt_id: ExcerptId, + buffer: &Model, + range: Range, +) -> (ExcerptId, BufferId, Range) { + ( + excerpt_id, + buffer.read(cx).remote_id(), + map_range_from_excerpt(snapshot, excerpt_id, &buffer.read(cx).snapshot(), range), + ) +} + +#[gpui::test] +fn test_excerpts_in_ranges_range_inside_the_excerpt(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_len = buffer_1.read(cx).len(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let mut expected_excerpt_id = ExcerptId(0); + + multibuffer.update(cx, |multibuffer, cx| { + expected_excerpt_id = multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + )[0]; + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + ); + }); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let range = snapshot + .anchor_in_excerpt(expected_excerpt_id, buffer_1.read(cx).anchor_before(1)) + .unwrap() + ..snapshot + .anchor_in_excerpt( + expected_excerpt_id, + buffer_1.read(cx).anchor_after(buffer_len / 2), + ) + .unwrap(); + + let expected_excerpts = vec![make_expected_excerpt_info( + &snapshot, + cx, + expected_excerpt_id, + &buffer_1, + 1..(buffer_len / 2), + )]; + + let excerpts = snapshot + .excerpts_in_ranges(vec![range.clone()].into_iter()) + .map(|(excerpt_id, buffer, actual_range)| { + ( + excerpt_id, + buffer.remote_id(), + map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), + ) + }) + .collect_vec(); + + validate_excerpts(&excerpts, &expected_excerpts); +} + +#[gpui::test] +fn test_excerpts_in_ranges_range_crosses_excerpts_boundary(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_len = buffer_1.read(cx).len(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let mut excerpt_1_id = ExcerptId(0); + let mut excerpt_2_id = ExcerptId(0); + + multibuffer.update(cx, |multibuffer, cx| { + excerpt_1_id = multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + )[0]; + excerpt_2_id = multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + )[0]; + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let expected_range = snapshot + .anchor_in_excerpt( + excerpt_1_id, + buffer_1.read(cx).anchor_before(buffer_len / 2), + ) + .unwrap() + ..snapshot + .anchor_in_excerpt(excerpt_2_id, buffer_2.read(cx).anchor_after(buffer_len / 2)) + .unwrap(); + + let expected_excerpts = vec![ + make_expected_excerpt_info( + &snapshot, + cx, + excerpt_1_id, + &buffer_1, + (buffer_len / 2)..buffer_len, + ), + make_expected_excerpt_info(&snapshot, cx, excerpt_2_id, &buffer_2, 0..buffer_len / 2), + ]; + + let excerpts = snapshot + .excerpts_in_ranges(vec![expected_range.clone()].into_iter()) + .map(|(excerpt_id, buffer, actual_range)| { + ( + excerpt_id, + buffer.remote_id(), + map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), + ) + }) + .collect_vec(); + + validate_excerpts(&excerpts, &expected_excerpts); +} + +#[gpui::test] +fn test_excerpts_in_ranges_range_encloses_excerpt(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'r'), cx)); + let buffer_len = buffer_1.read(cx).len(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let mut excerpt_1_id = ExcerptId(0); + let mut excerpt_2_id = ExcerptId(0); + let mut excerpt_3_id = ExcerptId(0); + + multibuffer.update(cx, |multibuffer, cx| { + excerpt_1_id = multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + )[0]; + excerpt_2_id = multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + )[0]; + excerpt_3_id = multibuffer.push_excerpts( + buffer_3.clone(), + [ExcerptRange { + context: 0..buffer_3.read(cx).len(), + primary: None, + }], + cx, + )[0]; + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let expected_range = snapshot + .anchor_in_excerpt( + excerpt_1_id, + buffer_1.read(cx).anchor_before(buffer_len / 2), + ) + .unwrap() + ..snapshot + .anchor_in_excerpt(excerpt_3_id, buffer_3.read(cx).anchor_after(buffer_len / 2)) + .unwrap(); + + let expected_excerpts = vec![ + make_expected_excerpt_info( + &snapshot, + cx, + excerpt_1_id, + &buffer_1, + (buffer_len / 2)..buffer_len, + ), + make_expected_excerpt_info(&snapshot, cx, excerpt_2_id, &buffer_2, 0..buffer_len), + make_expected_excerpt_info(&snapshot, cx, excerpt_3_id, &buffer_3, 0..buffer_len / 2), + ]; + + let excerpts = snapshot + .excerpts_in_ranges(vec![expected_range.clone()].into_iter()) + .map(|(excerpt_id, buffer, actual_range)| { + ( + excerpt_id, + buffer.remote_id(), + map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), + ) + }) + .collect_vec(); + + validate_excerpts(&excerpts, &expected_excerpts); +} + +#[gpui::test] +fn test_excerpts_in_ranges_multiple_ranges(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_len = buffer_1.read(cx).len(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let mut excerpt_1_id = ExcerptId(0); + let mut excerpt_2_id = ExcerptId(0); + + multibuffer.update(cx, |multibuffer, cx| { + excerpt_1_id = multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + )[0]; + excerpt_2_id = multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + )[0]; + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let ranges = vec![ + 1..(buffer_len / 4), + (buffer_len / 3)..(buffer_len / 2), + (buffer_len / 4 * 3)..(buffer_len), + ]; + + let expected_excerpts = ranges + .iter() + .map(|range| { + make_expected_excerpt_info(&snapshot, cx, excerpt_1_id, &buffer_1, range.clone()) + }) + .collect_vec(); + + let ranges = ranges.into_iter().map(|range| { + map_range_from_excerpt( + &snapshot, + excerpt_1_id, + &buffer_1.read(cx).snapshot(), + range, + ) + }); + + let excerpts = snapshot + .excerpts_in_ranges(ranges) + .map(|(excerpt_id, buffer, actual_range)| { + ( + excerpt_id, + buffer.remote_id(), + map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), + ) + }) + .collect_vec(); + + validate_excerpts(&excerpts, &expected_excerpts); +} + +#[gpui::test] +fn test_excerpts_in_ranges_range_ends_at_excerpt_end(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_len = buffer_1.read(cx).len(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let mut excerpt_1_id = ExcerptId(0); + let mut excerpt_2_id = ExcerptId(0); + + multibuffer.update(cx, |multibuffer, cx| { + excerpt_1_id = multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + )[0]; + excerpt_2_id = multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + )[0]; + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let ranges = [0..buffer_len, (buffer_len / 3)..(buffer_len / 2)]; + + let expected_excerpts = vec![ + make_expected_excerpt_info(&snapshot, cx, excerpt_1_id, &buffer_1, ranges[0].clone()), + make_expected_excerpt_info(&snapshot, cx, excerpt_2_id, &buffer_2, ranges[1].clone()), + ]; + + let ranges = [ + map_range_from_excerpt( + &snapshot, + excerpt_1_id, + &buffer_1.read(cx).snapshot(), + ranges[0].clone(), + ), + map_range_from_excerpt( + &snapshot, + excerpt_2_id, + &buffer_2.read(cx).snapshot(), + ranges[1].clone(), + ), + ]; + + let excerpts = snapshot + .excerpts_in_ranges(ranges.into_iter()) + .map(|(excerpt_id, buffer, actual_range)| { + ( + excerpt_id, + buffer.remote_id(), + map_range_from_excerpt(&snapshot, excerpt_id, buffer, actual_range), + ) + }) + .collect_vec(); + + validate_excerpts(&excerpts, &expected_excerpts); +} + +#[gpui::test] +fn test_split_ranges(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + ); + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let buffer_1_len = buffer_1.read(cx).len(); + let buffer_2_len = buffer_2.read(cx).len(); + let buffer_1_midpoint = buffer_1_len / 2; + let buffer_2_start = buffer_1_len + '\n'.len_utf8(); + let buffer_2_midpoint = buffer_2_start + buffer_2_len / 2; + let total_len = buffer_2_start + buffer_2_len; + + let input_ranges = [ + 0..buffer_1_midpoint, + buffer_1_midpoint..buffer_2_midpoint, + buffer_2_midpoint..total_len, + ] + .map(|range| snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end)); + + let actual_ranges = snapshot + .split_ranges(input_ranges.into_iter()) + .map(|range| range.to_offset(&snapshot)) + .collect::>(); + + let expected_ranges = vec![ + 0..buffer_1_midpoint, + buffer_1_midpoint..buffer_1_len, + buffer_2_start..buffer_2_midpoint, + buffer_2_midpoint..total_len, + ]; + + assert_eq!(actual_ranges, expected_ranges); +} + +#[gpui::test] +fn test_split_ranges_single_range_spanning_three_excerpts(cx: &mut AppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'm'), cx)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: 0..buffer_1.read(cx).len(), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: 0..buffer_2.read(cx).len(), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_3.clone(), + [ExcerptRange { + context: 0..buffer_3.read(cx).len(), + primary: None, + }], + cx, + ); + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + + let buffer_1_len = buffer_1.read(cx).len(); + let buffer_2_len = buffer_2.read(cx).len(); + let buffer_3_len = buffer_3.read(cx).len(); + let buffer_2_start = buffer_1_len + '\n'.len_utf8(); + let buffer_3_start = buffer_2_start + buffer_2_len + '\n'.len_utf8(); + let buffer_1_midpoint = buffer_1_len / 2; + let buffer_3_midpoint = buffer_3_start + buffer_3_len / 2; + + let input_range = + snapshot.anchor_before(buffer_1_midpoint)..snapshot.anchor_after(buffer_3_midpoint); + + let actual_ranges = snapshot + .split_ranges(std::iter::once(input_range)) + .map(|range| range.to_offset(&snapshot)) + .collect::>(); + + let expected_ranges = vec![ + buffer_1_midpoint..buffer_1_len, + buffer_2_start..buffer_2_start + buffer_2_len, + buffer_3_start..buffer_3_midpoint, + ]; + + assert_eq!(actual_ranges, expected_ranges); +} diff --git a/crates/node_runtime/Cargo.toml b/crates/node_runtime/Cargo.toml index d852b7ebdf..20b6be407f 100644 --- a/crates/node_runtime/Cargo.toml +++ b/crates/node_runtime/Cargo.toml @@ -37,7 +37,6 @@ which.workspace = true [target.'cfg(windows)'.dependencies] async-std = { version = "1.12.0", features = ["unstable"] } -windows.workspace = true [dev-dependencies] tempfile.workspace = true diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 9ad14bddc4..33df4f7d15 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -9,7 +9,7 @@ use http_client::{HttpClient, Uri}; use semver::Version; use serde::Deserialize; use smol::io::BufReader; -use smol::{fs, lock::Mutex, process::Command}; +use smol::{fs, lock::Mutex}; use std::ffi::OsString; use std::io; use std::process::{Output, Stdio}; @@ -20,9 +20,6 @@ use std::{ }; use util::ResultExt; -#[cfg(windows)] -use smol::process::windows::CommandExt; - #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct NodeBinaryOptions { pub allow_path_lookup: bool, @@ -315,9 +312,7 @@ impl ManagedNodeRuntime { let node_binary = node_dir.join(Self::NODE_PATH); let npm_file = node_dir.join(Self::NPM_PATH); - let mut command = Command::new(&node_binary); - - command + let result = util::command::new_smol_command(&node_binary) .env_clear() .arg(npm_file) .arg("--version") @@ -326,12 +321,9 @@ impl ManagedNodeRuntime { .stderr(Stdio::null()) .args(["--cache".into(), node_dir.join("cache")]) .args(["--userconfig".into(), node_dir.join("blank_user_npmrc")]) - .args(["--globalconfig".into(), node_dir.join("blank_global_npmrc")]); - - #[cfg(windows)] - command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - - let result = command.status().await; + .args(["--globalconfig".into(), node_dir.join("blank_global_npmrc")]) + .status() + .await; let valid = matches!(result, Ok(status) if status.success()); if !valid { @@ -412,7 +404,7 @@ impl NodeRuntimeTrait for ManagedNodeRuntime { return Err(anyhow!("missing npm file")); } - let mut command = Command::new(node_binary); + let mut command = util::command::new_smol_command(node_binary); command.env_clear(); command.env("PATH", env_path); command.arg(npm_file).arg(subcommand); @@ -473,7 +465,7 @@ pub struct SystemNodeRuntime { impl SystemNodeRuntime { const MIN_VERSION: semver::Version = Version::new(18, 0, 0); async fn new(node: PathBuf, npm: PathBuf) -> Result> { - let output = Command::new(&node) + let output = util::command::new_smol_command(&node) .arg("--version") .output() .await @@ -543,7 +535,7 @@ impl NodeRuntimeTrait for SystemNodeRuntime { subcommand: &str, args: &[&str], ) -> anyhow::Result { - let mut command = Command::new(self.npm.clone()); + let mut command = util::command::new_smol_command(self.npm.clone()); command .env_clear() .env("PATH", std::env::var_os("PATH").unwrap_or_default()) @@ -639,7 +631,11 @@ impl NodeRuntimeTrait for UnavailableNodeRuntime { } } -fn configure_npm_command(command: &mut Command, directory: Option<&Path>, proxy: Option<&Uri>) { +fn configure_npm_command( + command: &mut smol::process::Command, + directory: Option<&Path>, + proxy: Option<&Uri>, +) { if let Some(directory) = directory { command.current_dir(directory); command.args(["--prefix".into(), directory.to_path_buf()]); @@ -674,6 +670,5 @@ fn configure_npm_command(command: &mut Command, directory: Option<&Path>, proxy: { command.env("ComSpec", val); } - command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); } } diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index 5c3de53ee1..a61f1da1c4 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -238,11 +238,8 @@ impl NotificationStore { ) -> Result<()> { this.update(&mut cx, |this, cx| { if let Some(notification) = envelope.payload.notification { - if let Some(rpc::Notification::ChannelMessageMention { - message_id, - sender_id: _, - channel_id: _, - }) = Notification::from_proto(¬ification) + if let Some(rpc::Notification::ChannelMessageMention { message_id, .. }) = + Notification::from_proto(¬ification) { let fetch_message_task = this.channel_store.update(cx, |this, cx| { this.fetch_channel_messages(vec![message_id], cx) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index a133085020..5168da38be 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -81,9 +81,10 @@ fn get_max_tokens(name: &str) -> usize { "llama2" | "yi" | "vicuna" | "stablelm2" => 4096, "llama3" | "gemma2" | "gemma" | "codegemma" | "starcoder" | "aya" => 8192, "codellama" | "starcoder2" => 16384, - "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768, + "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "qwen2.5-coder" + | "dolphin-mixtral" => 32768, "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" - | "llama3.2" | "qwen2.5-coder" => 128000, + | "llama3.2" => 128000, _ => DEFAULT_TOKENS, } .clamp(1, MAXIMUM_TOKENS) diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index dfafff2089..1e92841249 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -169,6 +169,24 @@ pub struct Request { pub tools: Vec, } +#[derive(Debug, Serialize, Deserialize)] +pub struct CompletionRequest { + pub model: String, + pub prompt: String, + pub max_tokens: u32, + pub temperature: f32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub prediction: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub rewrite_speculation: Option, +} + +#[derive(Clone, Deserialize, Serialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum Prediction { + Content { content: String }, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(untagged)] pub enum ToolChoice { @@ -285,6 +303,21 @@ pub struct ResponseStreamEvent { pub usage: Option, } +#[derive(Serialize, Deserialize, Debug)] +pub struct CompletionResponse { + pub id: String, + pub object: String, + pub created: u64, + pub model: String, + pub choices: Vec, + pub usage: Usage, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct CompletionChoice { + pub text: String, +} + #[derive(Serialize, Deserialize, Debug)] pub struct Response { pub id: String, @@ -355,6 +388,56 @@ pub async fn complete( } } +pub async fn complete_text( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: CompletionRequest, +) -> Result { + let uri = format!("{api_url}/completions"); + let request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_key)); + + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; + let mut response = client.send(request).await?; + + if response.status().is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + let response = serde_json::from_str(&body)?; + Ok(response) + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + #[derive(Deserialize)] + struct OpenAiResponse { + error: OpenAiError, + } + + #[derive(Deserialize)] + struct OpenAiError { + message: String, + } + + match serde_json::from_str::(&body) { + Ok(response) if !response.error.message.is_empty() => Err(anyhow!( + "Failed to connect to OpenAI API: {}", + response.error.message, + )), + + _ => Err(anyhow!( + "Failed to connect to OpenAI API: {} {}", + response.status(), + body, + )), + } + } +} + fn adapt_response_to_stream(response: Response) -> ResponseStreamEvent { ResponseStreamEvent { created: response.created as u32, diff --git a/crates/outline/Cargo.toml b/crates/outline/Cargo.toml index 6f385f5d8d..064c8b3ef8 100644 --- a/crates/outline/Cargo.toml +++ b/crates/outline/Cargo.toml @@ -25,6 +25,7 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 154b9297a3..8235a7e41e 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -4,9 +4,7 @@ use std::{ sync::Arc, }; -use editor::{ - actions::ToggleOutline, scroll::Autoscroll, Anchor, AnchorRangeExt, Editor, EditorMode, -}; +use editor::{scroll::Autoscroll, Anchor, AnchorRangeExt, Editor, EditorMode}; use fuzzy::StringMatch; use gpui::{ div, rems, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, HighlightStyle, @@ -24,9 +22,22 @@ use workspace::{DismissDecision, ModalView}; pub fn init(cx: &mut AppContext) { cx.observe_new_views(OutlineView::register).detach(); + zed_actions::outline::TOGGLE_OUTLINE + .set(|view, cx| { + let Ok(view) = view.downcast::() else { + return; + }; + + toggle(view, &Default::default(), cx); + }) + .ok(); } -pub fn toggle(editor: View, _: &ToggleOutline, cx: &mut WindowContext) { +pub fn toggle( + editor: View, + _: &zed_actions::outline::ToggleOutline, + cx: &mut WindowContext, +) { let outline = editor .read(cx) .buffer() @@ -280,7 +291,7 @@ impl PickerDelegate for OutlineViewDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child( div() .text_ui(cx) @@ -459,7 +470,7 @@ mod tests { workspace: &View, cx: &mut VisualTestContext, ) -> View> { - cx.dispatch_action(ToggleOutline); + cx.dispatch_action(zed_actions::outline::ToggleOutline); workspace.update(cx, |workspace, cx| { workspace .active_modal::(cx) diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index 6dfe1ceccc..0333e487cc 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -19,8 +19,8 @@ db.workspace = true editor.workspace = true file_icons.workspace = true fuzzy.workspace = true -itertools.workspace = true gpui.workspace = true +itertools.workspace = true language.workspace = true log.workspace = true menu.workspace = true @@ -36,8 +36,8 @@ smol.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -worktree.workspace = true workspace.workspace = true +worktree.workspace = true [dev-dependencies] search = { workspace = true, features = ["test-support"] } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index a6d1903282..31c9e76ec2 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -36,7 +36,7 @@ use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrev}; use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings, ShowIndentGuides}; -use project::{File, Fs, Item, Project}; +use project::{File, Fs, Project, ProjectItem}; use search::{BufferSearchBar, ProjectSearchView}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; @@ -51,12 +51,12 @@ use workspace::{ ui::{ h_flex, v_flex, ActiveTheme, ButtonCommon, Clickable, Color, ContextMenu, FluentBuilder, HighlightedLabel, Icon, IconButton, IconButtonShape, IconName, IconSize, Label, - LabelCommon, ListItem, Scrollbar, ScrollbarState, Selectable, StyledExt, StyledTypography, + LabelCommon, ListItem, Scrollbar, ScrollbarState, StyledExt, StyledTypography, Toggleable, Tooltip, }, OpenInTerminal, WeakItemHandle, Workspace, }; -use worktree::{Entry, ProjectEntryId, WorktreeId}; +use worktree::{Entry, GitEntry, ProjectEntryId, WorktreeId}; actions!( outline_panel, @@ -103,6 +103,7 @@ pub struct OutlinePanel { active_item: Option, _subscriptions: Vec, updating_fs_entries: bool, + new_entries_for_fs_update: HashSet, fs_entries_update_task: Task<()>, cached_entries_update_task: Task<()>, reveal_selection_task: Task>, @@ -116,6 +117,7 @@ pub struct OutlinePanel { horizontal_scrollbar_state: ScrollbarState, hide_scrollbar_task: Option>, max_width_item_index: Option, + preserve_selection_on_buffer_fold_toggles: HashSet, } #[derive(Debug)] @@ -147,7 +149,7 @@ impl SearchState { previous_matches: HashMap, Arc>>, new_matches: Vec>, theme: Arc, - cx: &mut ViewContext<'_, OutlinePanel>, + cx: &mut ViewContext, ) -> Self { let (highlight_search_match_tx, highlight_search_match_rx) = channel::unbounded(); let (notify_tx, notify_rx) = channel::unbounded::<()>(); @@ -346,10 +348,17 @@ enum ExcerptOutlines { NotFetched, } +#[derive(Clone, Debug, PartialEq, Eq)] +struct FoldedDirsEntry { + worktree_id: WorktreeId, + entries: Vec, +} + +// TODO: collapse the inner enums into panel entry #[derive(Clone, Debug)] enum PanelEntry { Fs(FsEntry), - FoldedDirs(WorktreeId, Vec), + FoldedDirs(FoldedDirsEntry), Outline(OutlineEntry), Search(SearchEntry), } @@ -381,7 +390,16 @@ impl PartialEq for PanelEntry { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::Fs(a), Self::Fs(b)) => a == b, - (Self::FoldedDirs(a1, a2), Self::FoldedDirs(b1, b2)) => a1 == b1 && a2 == b2, + ( + Self::FoldedDirs(FoldedDirsEntry { + worktree_id: worktree_id_a, + entries: entries_a, + }), + Self::FoldedDirs(FoldedDirsEntry { + worktree_id: worktree_id_b, + entries: entries_b, + }), + ) => worktree_id_a == worktree_id_b && entries_a == entries_b, (Self::Outline(a), Self::Outline(b)) => a == b, ( Self::Search(SearchEntry { @@ -503,54 +521,123 @@ impl SearchData { } } -#[derive(Clone, Debug, PartialEq, Eq)] -enum OutlineEntry { - Excerpt(BufferId, ExcerptId, ExcerptRange), - Outline(BufferId, ExcerptId, Outline), +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +struct OutlineEntryExcerpt { + id: ExcerptId, + buffer_id: BufferId, + range: ExcerptRange, } #[derive(Clone, Debug, Eq)] -enum FsEntry { - ExternalFile(BufferId, Vec), - Directory(WorktreeId, Entry), - File(WorktreeId, Entry, BufferId, Vec), +struct OutlineEntryOutline { + buffer_id: BufferId, + excerpt_id: ExcerptId, + outline: Outline, } -impl PartialEq for FsEntry { +impl PartialEq for OutlineEntryOutline { fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::ExternalFile(id_a, _), Self::ExternalFile(id_b, _)) => id_a == id_b, - (Self::Directory(id_a, entry_a), Self::Directory(id_b, entry_b)) => { - id_a == id_b && entry_a.id == entry_b.id - } - ( - Self::File(worktree_a, entry_a, id_a, ..), - Self::File(worktree_b, entry_b, id_b, ..), - ) => worktree_a == worktree_b && entry_a.id == entry_b.id && id_a == id_b, - _ => false, + self.buffer_id == other.buffer_id + && self.excerpt_id == other.excerpt_id + && self.outline.depth == other.outline.depth + && self.outline.range == other.outline.range + && self.outline.text == other.outline.text + } +} + +impl Hash for OutlineEntryOutline { + fn hash(&self, state: &mut H) { + ( + self.buffer_id, + self.excerpt_id, + self.outline.depth, + &self.outline.range, + &self.outline.text, + ) + .hash(state); + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +enum OutlineEntry { + Excerpt(OutlineEntryExcerpt), + Outline(OutlineEntryOutline), +} + +impl OutlineEntry { + fn ids(&self) -> (BufferId, ExcerptId) { + match self { + OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id), + OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id), } } } -impl Hash for FsEntry { - fn hash(&self, state: &mut H) { - match self { - Self::ExternalFile(buffer_id, _) => { - buffer_id.hash(state); - } - Self::Directory(worktree_id, entry) => { - worktree_id.hash(state); - entry.id.hash(state); - } - Self::File(worktree_id, entry, buffer_id, _) => { - worktree_id.hash(state); - entry.id.hash(state); - buffer_id.hash(state); - } - } +#[derive(Debug, Clone, Eq)] +struct FsEntryFile { + worktree_id: WorktreeId, + entry: GitEntry, + buffer_id: BufferId, + excerpts: Vec, +} + +impl PartialEq for FsEntryFile { + fn eq(&self, other: &Self) -> bool { + self.worktree_id == other.worktree_id + && self.entry.id == other.entry.id + && self.buffer_id == other.buffer_id } } +impl Hash for FsEntryFile { + fn hash(&self, state: &mut H) { + (self.buffer_id, self.entry.id, self.worktree_id).hash(state); + } +} + +#[derive(Debug, Clone, Eq)] +struct FsEntryDirectory { + worktree_id: WorktreeId, + entry: GitEntry, +} + +impl PartialEq for FsEntryDirectory { + fn eq(&self, other: &Self) -> bool { + self.worktree_id == other.worktree_id && self.entry.id == other.entry.id + } +} + +impl Hash for FsEntryDirectory { + fn hash(&self, state: &mut H) { + (self.worktree_id, self.entry.id).hash(state); + } +} + +#[derive(Debug, Clone, Eq)] +struct FsEntryExternalFile { + buffer_id: BufferId, + excerpts: Vec, +} + +impl PartialEq for FsEntryExternalFile { + fn eq(&self, other: &Self) -> bool { + self.buffer_id == other.buffer_id + } +} + +impl Hash for FsEntryExternalFile { + fn hash(&self, state: &mut H) { + self.buffer_id.hash(state); + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +enum FsEntry { + ExternalFile(FsEntryExternalFile), + Directory(FsEntryDirectory), + File(FsEntryFile), +} + struct ActiveItem { item_handle: Box, active_editor: WeakView, @@ -716,6 +803,8 @@ impl OutlinePanel { active_item: None, pending_serialization: Task::ready(None), updating_fs_entries: false, + new_entries_for_fs_update: HashSet::default(), + preserve_selection_on_buffer_fold_toggles: HashSet::default(), fs_entries_update_task: Task::ready(()), cached_entries_update_task: Task::ready(()), reveal_selection_task: Task::ready(Ok(())), @@ -771,7 +860,12 @@ impl OutlinePanel { } fn unfold_directory(&mut self, _: &UnfoldDirectory, cx: &mut ViewContext) { - if let Some(PanelEntry::FoldedDirs(worktree_id, entries)) = self.selected_entry().cloned() { + if let Some(PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + })) = self.selected_entry().cloned() + { self.unfolded_dirs .entry(worktree_id) .or_default() @@ -782,11 +876,11 @@ impl OutlinePanel { fn fold_directory(&mut self, _: &FoldDirectory, cx: &mut ViewContext) { let (worktree_id, entry) = match self.selected_entry().cloned() { - Some(PanelEntry::Fs(FsEntry::Directory(worktree_id, entry))) => { - (worktree_id, Some(entry)) + Some(PanelEntry::Fs(FsEntry::Directory(directory))) => { + (directory.worktree_id, Some(directory.entry)) } - Some(PanelEntry::FoldedDirs(worktree_id, entries)) => { - (worktree_id, entries.last().cloned()) + Some(PanelEntry::FoldedDirs(folded_dirs)) => { + (folded_dirs.worktree_id, folded_dirs.entries.last().cloned()) } _ => return, }; @@ -811,7 +905,8 @@ impl OutlinePanel { if self.filter_editor.focus_handle(cx).is_focused(cx) { cx.propagate() } else if let Some(selected_entry) = self.selected_entry().cloned() { - self.open_entry(&selected_entry, true, cx); + self.toggle_expanded(&selected_entry, cx); + self.scroll_editor_to_entry(&selected_entry, true, false, cx); } } @@ -834,7 +929,7 @@ impl OutlinePanel { } else if let Some((active_editor, selected_entry)) = self.active_editor().zip(self.selected_entry().cloned()) { - self.open_entry(&selected_entry, true, cx); + self.scroll_editor_to_entry(&selected_entry, true, true, cx); active_editor.update(cx, |editor, cx| editor.open_excerpts(action, cx)); } } @@ -849,15 +944,16 @@ impl OutlinePanel { } else if let Some((active_editor, selected_entry)) = self.active_editor().zip(self.selected_entry().cloned()) { - self.open_entry(&selected_entry, true, cx); + self.scroll_editor_to_entry(&selected_entry, true, true, cx); active_editor.update(cx, |editor, cx| editor.open_excerpts_in_split(action, cx)); } } - fn open_entry( + fn scroll_editor_to_entry( &mut self, entry: &PanelEntry, - change_selection: bool, + prefer_selection_change: bool, + change_focus: bool, cx: &mut ViewContext, ) { let Some(active_editor) = self.active_editor() else { @@ -865,33 +961,32 @@ impl OutlinePanel { }; let active_multi_buffer = active_editor.read(cx).buffer().clone(); let multi_buffer_snapshot = active_multi_buffer.read(cx).snapshot(cx); - let offset_from_top = if active_multi_buffer.read(cx).is_singleton() { - Point::default() - } else { - Point::new(0.0, -(active_editor.read(cx).file_header_size() as f32)) - }; - + let mut change_selection = prefer_selection_change; + let mut scroll_to_buffer = None; let scroll_target = match entry { PanelEntry::FoldedDirs(..) | PanelEntry::Fs(FsEntry::Directory(..)) => None, - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - let scroll_target = multi_buffer_snapshot.excerpts().find_map( + PanelEntry::Fs(FsEntry::ExternalFile(file)) => { + change_selection = false; + scroll_to_buffer = Some(file.buffer_id); + multi_buffer_snapshot.excerpts().find_map( |(excerpt_id, buffer_snapshot, excerpt_range)| { - if &buffer_snapshot.remote_id() == buffer_id { + if buffer_snapshot.remote_id() == file.buffer_id { multi_buffer_snapshot .anchor_in_excerpt(excerpt_id, excerpt_range.context.start) } else { None } }, - ); - Some(offset_from_top).zip(scroll_target) + ) } - PanelEntry::Fs(FsEntry::File(_, file_entry, ..)) => { - let scroll_target = self - .project + + PanelEntry::Fs(FsEntry::File(file)) => { + change_selection = false; + scroll_to_buffer = Some(file.buffer_id); + self.project .update(cx, |project, cx| { project - .path_for_entry(file_entry.id, cx) + .path_for_entry(file.entry.id, cx) .and_then(|path| project.get_open_buffer(&path, cx)) }) .map(|buffer| { @@ -903,35 +998,29 @@ impl OutlinePanel { let (excerpt_id, excerpt_range) = excerpts.first()?; multi_buffer_snapshot .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) - }); - Some(offset_from_top).zip(scroll_target) + }) } - PanelEntry::Outline(OutlineEntry::Outline(_, excerpt_id, outline)) => { - let scroll_target = multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, outline.range.start) - .or_else(|| { - multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, outline.range.end) - }); - Some(Point::default()).zip(scroll_target) - } - PanelEntry::Outline(OutlineEntry::Excerpt(_, excerpt_id, excerpt_range)) => { - let scroll_target = multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start); - Some(Point::default()).zip(scroll_target) - } - PanelEntry::Search(SearchEntry { match_range, .. }) => { - Some((Point::default(), match_range.start)) + PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot + .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start) + .or_else(|| { + multi_buffer_snapshot + .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end) + }), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + change_selection = false; + multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start) } + PanelEntry::Search(search_entry) => Some(search_entry.match_range.start), }; - if let Some((offset, anchor)) = scroll_target { + if let Some(anchor) = scroll_target { let activate = self .workspace .update(cx, |workspace, cx| match self.active_item() { Some(active_item) => { - workspace.activate_item(active_item.as_ref(), true, change_selection, cx) + workspace.activate_item(active_item.as_ref(), true, change_focus, cx) } - None => workspace.activate_item(&active_editor, true, change_selection, cx), + None => workspace.activate_item(&active_editor, true, change_focus, cx), }); if activate.is_ok() { @@ -939,16 +1028,58 @@ impl OutlinePanel { if change_selection { active_editor.update(cx, |editor, cx| { editor.change_selections( - Some(Autoscroll::Strategy(AutoscrollStrategy::Top)), + Some(Autoscroll::Strategy(AutoscrollStrategy::Center)), cx, |s| s.select_ranges(Some(anchor..anchor)), ); }); - active_editor.focus_handle(cx).focus(cx); } else { + let mut offset = Point::default(); + let show_excerpt_controls = active_editor + .read(cx) + .display_map + .read(cx) + .show_excerpt_controls(); + let expand_excerpt_control_height = 1.0; + if let Some(buffer_id) = scroll_to_buffer { + let current_folded = active_editor.read(cx).buffer_folded(buffer_id, cx); + if current_folded { + if show_excerpt_controls { + let previous_buffer_id = self + .fs_entries + .iter() + .rev() + .filter_map(|entry| match entry { + FsEntry::File(file) => Some(file.buffer_id), + FsEntry::ExternalFile(external_file) => { + Some(external_file.buffer_id) + } + FsEntry::Directory(..) => None, + }) + .skip_while(|id| *id != buffer_id) + .nth(1); + if let Some(previous_buffer_id) = previous_buffer_id { + if !active_editor.read(cx).buffer_folded(previous_buffer_id, cx) + { + offset.y += expand_excerpt_control_height; + } + } + } + } else { + offset.y = -(active_editor.read(cx).file_header_size() as f32); + if show_excerpt_controls { + offset.y -= expand_excerpt_control_height; + } + } + } active_editor.update(cx, |editor, cx| { editor.set_scroll_anchor(ScrollAnchor { offset, anchor }, cx); }); + } + + if change_focus { + active_editor.focus_handle(cx).focus(cx); + } else { self.focus_handle.focus(cx); } } @@ -969,7 +1100,7 @@ impl OutlinePanel { self.select_first(&SelectFirst {}, cx) } if let Some(selected_entry) = self.selected_entry().cloned() { - self.open_entry(&selected_entry, false, cx); + self.scroll_editor_to_entry(&selected_entry, true, false, cx); } } @@ -988,7 +1119,7 @@ impl OutlinePanel { self.select_last(&SelectLast, cx) } if let Some(selected_entry) = self.selected_entry().cloned() { - self.open_entry(&selected_entry, false, cx); + self.scroll_editor_to_entry(&selected_entry, true, false, cx); } } @@ -1004,69 +1135,68 @@ impl OutlinePanel { match &selected_entry { PanelEntry::Fs(fs_entry) => match fs_entry { FsEntry::ExternalFile(..) => None, - FsEntry::File(worktree_id, entry, ..) - | FsEntry::Directory(worktree_id, entry) => { - entry.path.parent().and_then(|parent_path| { - previous_entries.find(|entry| match entry { - PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) => { - dir_worktree_id == worktree_id - && dir_entry.path.as_ref() == parent_path - } - PanelEntry::FoldedDirs(dirs_worktree_id, dirs) => { - dirs_worktree_id == worktree_id - && dirs - .last() - .map_or(false, |dir| dir.path.as_ref() == parent_path) - } - _ => false, - }) + FsEntry::File(FsEntryFile { + worktree_id, entry, .. + }) + | FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => entry.path.parent().and_then(|parent_path| { + previous_entries.find(|entry| match entry { + PanelEntry::Fs(FsEntry::Directory(directory)) => { + directory.worktree_id == *worktree_id + && directory.entry.path.as_ref() == parent_path + } + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id: dirs_worktree_id, + entries: dirs, + .. + }) => { + dirs_worktree_id == worktree_id + && dirs + .last() + .map_or(false, |dir| dir.path.as_ref() == parent_path) + } + _ => false, }) - } + }), }, - PanelEntry::FoldedDirs(worktree_id, entries) => entries + PanelEntry::FoldedDirs(folded_dirs) => folded_dirs + .entries .first() .and_then(|entry| entry.path.parent()) .and_then(|parent_path| { previous_entries.find(|entry| { - if let PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) = - entry - { - dir_worktree_id == worktree_id - && dir_entry.path.as_ref() == parent_path + if let PanelEntry::Fs(FsEntry::Directory(directory)) = entry { + directory.worktree_id == folded_dirs.worktree_id + && directory.entry.path.as_ref() == parent_path } else { false } }) }), - PanelEntry::Outline(OutlineEntry::Excerpt(excerpt_buffer_id, excerpt_id, _)) => { + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { previous_entries.find(|entry| match entry { - PanelEntry::Fs(FsEntry::File(_, _, file_buffer_id, file_excerpts)) => { - file_buffer_id == excerpt_buffer_id - && file_excerpts.contains(excerpt_id) + PanelEntry::Fs(FsEntry::File(file)) => { + file.buffer_id == excerpt.buffer_id + && file.excerpts.contains(&excerpt.id) } - PanelEntry::Fs(FsEntry::ExternalFile(file_buffer_id, file_excerpts)) => { - file_buffer_id == excerpt_buffer_id - && file_excerpts.contains(excerpt_id) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + external_file.buffer_id == excerpt.buffer_id + && external_file.excerpts.contains(&excerpt.id) } _ => false, }) } - PanelEntry::Outline(OutlineEntry::Outline( - outline_buffer_id, - outline_excerpt_id, - _, - )) => previous_entries.find(|entry| { - if let PanelEntry::Outline(OutlineEntry::Excerpt( - excerpt_buffer_id, - excerpt_id, - _, - )) = entry - { - outline_buffer_id == excerpt_buffer_id && outline_excerpt_id == excerpt_id - } else { - false - } - }), + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + previous_entries.find(|entry| { + if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry { + outline.buffer_id == excerpt.buffer_id + && outline.excerpt_id == excerpt.id + } else { + false + } + }) + } PanelEntry::Search(_) => { previous_entries.find(|entry| !matches!(entry, PanelEntry::Search(_))) } @@ -1124,8 +1254,12 @@ impl OutlinePanel { ) { self.select_entry(entry.clone(), true, cx); let is_root = match &entry { - PanelEntry::Fs(FsEntry::File(worktree_id, entry, ..)) - | PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, entry, .. + })) + | PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => self .project .read(cx) .worktree_for_id(*worktree_id, cx) @@ -1133,7 +1267,11 @@ impl OutlinePanel { worktree.read(cx).root_entry().map(|entry| entry.id) == Some(entry.id) }) .unwrap_or(false), - PanelEntry::FoldedDirs(worktree_id, entries) => entries + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => entries .first() .and_then(|entry| { self.project @@ -1192,9 +1330,11 @@ impl OutlinePanel { fn is_foldable(&self, entry: &PanelEntry) -> bool { let (directory_worktree, directory_entry) = match entry { - PanelEntry::Fs(FsEntry::Directory(directory_worktree, directory_entry)) => { - (*directory_worktree, Some(directory_entry)) - } + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry: directory_entry, + .. + })) => (*worktree_id, Some(directory_entry)), _ => return false, }; let Some(directory_entry) = directory_entry else { @@ -1222,23 +1362,44 @@ impl OutlinePanel { } fn expand_selected_entry(&mut self, _: &ExpandSelectedEntry, cx: &mut ViewContext) { - let entry_to_expand = match self.selected_entry() { - Some(PanelEntry::FoldedDirs(worktree_id, dir_entries)) => dir_entries - .last() - .map(|entry| CollapsedEntry::Dir(*worktree_id, entry.id)), - Some(PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry))) => { - Some(CollapsedEntry::Dir(*worktree_id, dir_entry.id)) + let Some(active_editor) = self.active_editor() else { + return; + }; + let Some(selected_entry) = self.selected_entry().cloned() else { + return; + }; + let mut buffers_to_unfold = HashSet::default(); + let entry_to_expand = match &selected_entry { + PanelEntry::FoldedDirs(FoldedDirsEntry { + entries: dir_entries, + worktree_id, + .. + }) => dir_entries.last().map(|entry| { + buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry)); + CollapsedEntry::Dir(*worktree_id, entry.id) + }), + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => { + buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry)); + Some(CollapsedEntry::Dir(*worktree_id, entry.id)) } - Some(PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _))) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { + buffers_to_unfold.insert(*buffer_id); Some(CollapsedEntry::File(*worktree_id, *buffer_id)) } - Some(PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _))) => { - Some(CollapsedEntry::ExternalFile(*buffer_id)) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_unfold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } - Some(PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _))) => { - Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) } - None | Some(PanelEntry::Search(_)) | Some(PanelEntry::Outline(..)) => None, + PanelEntry::Search(_) | PanelEntry::Outline(..) => return, }; let Some(collapsed_entry) = entry_to_expand else { return; @@ -1246,132 +1407,238 @@ impl OutlinePanel { let expanded = self.collapsed_entries.remove(&collapsed_entry); if expanded { if let CollapsedEntry::Dir(worktree_id, dir_entry_id) = collapsed_entry { - self.project.update(cx, |project, cx| { - project.expand_entry(worktree_id, dir_entry_id, cx); + let task = self.project.update(cx, |project, cx| { + project.expand_entry(worktree_id, dir_entry_id, cx) }); + if let Some(task) = task { + task.detach_and_log_err(cx); + } + }; + + active_editor.update(cx, |editor, cx| { + buffers_to_unfold.retain(|buffer_id| editor.buffer_folded(*buffer_id, cx)); + }); + self.select_entry(selected_entry, true, cx); + if buffers_to_unfold.is_empty() { + self.update_cached_entries(None, cx); + } else { + self.toggle_buffers_fold(buffers_to_unfold, false, cx) + .detach(); } - self.update_cached_entries(None, cx); } else { self.select_next(&SelectNext, cx) } } fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext) { + let Some(active_editor) = self.active_editor() else { + return; + }; let Some(selected_entry) = self.selected_entry().cloned() else { return; }; - match &selected_entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, selected_dir_entry)) => { - self.collapsed_entries - .insert(CollapsedEntry::Dir(*worktree_id, selected_dir_entry.id)); - self.select_entry(selected_entry, true, cx); - self.update_cached_entries(None, cx); - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - self.collapsed_entries - .insert(CollapsedEntry::File(*worktree_id, *buffer_id)); - self.select_entry(selected_entry, true, cx); - self.update_cached_entries(None, cx); - } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - self.collapsed_entries - .insert(CollapsedEntry::ExternalFile(*buffer_id)); - self.select_entry(selected_entry, true, cx); - self.update_cached_entries(None, cx); - } - PanelEntry::FoldedDirs(worktree_id, dir_entries) => { - if let Some(dir_entry) = dir_entries.last() { - if self - .collapsed_entries - .insert(CollapsedEntry::Dir(*worktree_id, dir_entry.id)) - { - self.select_entry(selected_entry, true, cx); - self.update_cached_entries(None, cx); - } - } - } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { + + let mut buffers_to_fold = HashSet::default(); + let collapsed = match &selected_entry { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => { if self .collapsed_entries - .insert(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + .insert(CollapsedEntry::Dir(*worktree_id, entry.id)) { - self.select_entry(selected_entry, true, cx); - self.update_cached_entries(None, cx); + buffers_to_fold.extend(self.buffers_inside_directory(*worktree_id, entry)); + true + } else { + false } } - PanelEntry::Search(_) | PanelEntry::Outline(..) => {} + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { + if self + .collapsed_entries + .insert(CollapsedEntry::File(*worktree_id, *buffer_id)) + { + buffers_to_fold.insert(*buffer_id); + true + } else { + false + } + } + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + if self + .collapsed_entries + .insert(CollapsedEntry::ExternalFile(external_file.buffer_id)) + { + buffers_to_fold.insert(external_file.buffer_id); + true + } else { + false + } + } + PanelEntry::FoldedDirs(folded_dirs) => { + let mut folded = false; + if let Some(dir_entry) = folded_dirs.entries.last() { + if self + .collapsed_entries + .insert(CollapsedEntry::Dir(folded_dirs.worktree_id, dir_entry.id)) + { + folded = true; + buffers_to_fold.extend( + self.buffers_inside_directory(folded_dirs.worktree_id, dir_entry), + ); + } + } + folded + } + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self + .collapsed_entries + .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)), + PanelEntry::Search(_) | PanelEntry::Outline(..) => false, + }; + + if collapsed { + active_editor.update(cx, |editor, cx| { + buffers_to_fold.retain(|buffer_id| !editor.buffer_folded(*buffer_id, cx)); + }); + self.select_entry(selected_entry, true, cx); + if buffers_to_fold.is_empty() { + self.update_cached_entries(None, cx); + } else { + self.toggle_buffers_fold(buffers_to_fold, true, cx).detach(); + } + } else { + self.select_parent(&SelectParent, cx); } } pub fn expand_all_entries(&mut self, _: &ExpandAllEntries, cx: &mut ViewContext) { + let Some(active_editor) = self.active_editor() else { + return; + }; + let mut buffers_to_unfold = HashSet::default(); let expanded_entries = self.fs_entries .iter() .fold(HashSet::default(), |mut entries, fs_entry| { match fs_entry { - FsEntry::ExternalFile(buffer_id, _) => { - entries.insert(CollapsedEntry::ExternalFile(*buffer_id)); - entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.iter().map(|(excerpt_id, _)| { - CollapsedEntry::Excerpt(*buffer_id, *excerpt_id) - }) - }, + FsEntry::ExternalFile(external_file) => { + buffers_to_unfold.insert(external_file.buffer_id); + entries.insert(CollapsedEntry::ExternalFile(external_file.buffer_id)); + entries.extend( + self.excerpts + .get(&external_file.buffer_id) + .into_iter() + .flat_map(|excerpts| { + excerpts.iter().map(|(excerpt_id, _)| { + CollapsedEntry::Excerpt( + external_file.buffer_id, + *excerpt_id, + ) + }) + }), + ); + } + FsEntry::Directory(directory) => { + entries.insert(CollapsedEntry::Dir( + directory.worktree_id, + directory.entry.id, )); } - FsEntry::Directory(worktree_id, entry) => { - entries.insert(CollapsedEntry::Dir(*worktree_id, entry.id)); + FsEntry::File(file) => { + buffers_to_unfold.insert(file.buffer_id); + entries.insert(CollapsedEntry::File(file.worktree_id, file.buffer_id)); + entries.extend( + self.excerpts.get(&file.buffer_id).into_iter().flat_map( + |excerpts| { + excerpts.iter().map(|(excerpt_id, _)| { + CollapsedEntry::Excerpt(file.buffer_id, *excerpt_id) + }) + }, + ), + ); } - FsEntry::File(worktree_id, _, buffer_id, _) => { - entries.insert(CollapsedEntry::File(*worktree_id, *buffer_id)); - entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.iter().map(|(excerpt_id, _)| { - CollapsedEntry::Excerpt(*buffer_id, *excerpt_id) - }) - }, - )); - } - } + }; entries }); self.collapsed_entries .retain(|entry| !expanded_entries.contains(entry)); - self.update_cached_entries(None, cx); + active_editor.update(cx, |editor, cx| { + buffers_to_unfold.retain(|buffer_id| editor.buffer_folded(*buffer_id, cx)); + }); + if buffers_to_unfold.is_empty() { + self.update_cached_entries(None, cx); + } else { + self.toggle_buffers_fold(buffers_to_unfold, false, cx) + .detach(); + } } pub fn collapse_all_entries(&mut self, _: &CollapseAllEntries, cx: &mut ViewContext) { + let Some(active_editor) = self.active_editor() else { + return; + }; + let mut buffers_to_fold = HashSet::default(); let new_entries = self .cached_entries .iter() .flat_map(|cached_entry| match &cached_entry.entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => { - Some(CollapsedEntry::Dir(*worktree_id, entry.id)) - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)), + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { + buffers_to_fold.insert(*buffer_id); Some(CollapsedEntry::File(*worktree_id, *buffer_id)) } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - Some(CollapsedEntry::ExternalFile(*buffer_id)) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_fold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } - PanelEntry::FoldedDirs(worktree_id, entries) => { - Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)) - } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) .collect::>(); self.collapsed_entries.extend(new_entries); - self.update_cached_entries(None, cx); + + active_editor.update(cx, |editor, cx| { + buffers_to_fold.retain(|buffer_id| !editor.buffer_folded(*buffer_id, cx)); + }); + if buffers_to_fold.is_empty() { + self.update_cached_entries(None, cx); + } else { + self.toggle_buffers_fold(buffers_to_fold, true, cx).detach(); + } } fn toggle_expanded(&mut self, entry: &PanelEntry, cx: &mut ViewContext) { + let Some(active_editor) = self.active_editor() else { + return; + }; + let mut fold = false; + let mut buffers_to_toggle = HashSet::default(); match entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry: dir_entry, + .. + })) => { let entry_id = dir_entry.id; let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id); + buffers_to_toggle.extend(self.buffers_inside_directory(*worktree_id, dir_entry)); if self.collapsed_entries.remove(&collapsed_entry) { self.project .update(cx, |project, cx| { @@ -1381,23 +1648,39 @@ impl OutlinePanel { .detach_and_log_err(cx); } else { self.collapsed_entries.insert(collapsed_entry); + fold = true; } } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { let collapsed_entry = CollapsedEntry::File(*worktree_id, *buffer_id); + buffers_to_toggle.insert(*buffer_id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); + fold = true; } } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - let collapsed_entry = CollapsedEntry::ExternalFile(*buffer_id); + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + let collapsed_entry = CollapsedEntry::ExternalFile(external_file.buffer_id); + buffers_to_toggle.insert(external_file.buffer_id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); + fold = true; } } - PanelEntry::FoldedDirs(worktree_id, dir_entries) => { - if let Some(entry_id) = dir_entries.first().map(|entry| entry.id) { + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries: dir_entries, + .. + }) => { + if let Some(dir_entry) = dir_entries.first() { + let entry_id = dir_entry.id; let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id); + buffers_to_toggle + .extend(self.buffers_inside_directory(*worktree_id, dir_entry)); if self.collapsed_entries.remove(&collapsed_entry) { self.project .update(cx, |project, cx| { @@ -1407,11 +1690,12 @@ impl OutlinePanel { .detach_and_log_err(cx); } else { self.collapsed_entries.insert(collapsed_entry); + fold = true; } } } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - let collapsed_entry = CollapsedEntry::Excerpt(*buffer_id, *excerpt_id); + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } @@ -1419,8 +1703,56 @@ impl OutlinePanel { PanelEntry::Search(_) | PanelEntry::Outline(..) => return, } + active_editor.update(cx, |editor, cx| { + buffers_to_toggle.retain(|buffer_id| { + let folded = editor.buffer_folded(*buffer_id, cx); + if fold { + !folded + } else { + folded + } + }); + }); + self.select_entry(entry.clone(), true, cx); - self.update_cached_entries(None, cx); + if buffers_to_toggle.is_empty() { + self.update_cached_entries(None, cx); + } else { + self.toggle_buffers_fold(buffers_to_toggle, fold, cx) + .detach(); + } + } + + fn toggle_buffers_fold( + &self, + buffers: HashSet, + fold: bool, + cx: &mut ViewContext, + ) -> Task<()> { + let Some(active_editor) = self.active_editor() else { + return Task::ready(()); + }; + cx.spawn(|outline_panel, mut cx| async move { + outline_panel + .update(&mut cx, |outline_panel, cx| { + active_editor.update(cx, |editor, cx| { + for buffer_id in buffers { + outline_panel + .preserve_selection_on_buffer_fold_toggles + .insert(buffer_id); + if fold { + editor.fold_buffer(buffer_id, cx); + } else { + editor.unfold_buffer(buffer_id, cx); + } + } + }); + if let Some(selection) = outline_panel.selected_entry().cloned() { + outline_panel.scroll_editor_to_entry(&selection, false, false, cx); + } + }) + .ok(); + }) } fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext) { @@ -1438,7 +1770,9 @@ impl OutlinePanel { .selected_entry() .and_then(|entry| match entry { PanelEntry::Fs(entry) => self.relative_path(entry, cx), - PanelEntry::FoldedDirs(_, dirs) => dirs.last().map(|entry| entry.path.clone()), + PanelEntry::FoldedDirs(folded_dirs) => { + folded_dirs.entries.last().map(|entry| entry.path.clone()) + } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) .map(|p| p.to_string_lossy().to_string()) @@ -1474,8 +1808,11 @@ impl OutlinePanel { } } - fn reveal_entry_for_selection(&mut self, editor: View, cx: &mut ViewContext<'_, Self>) { - if !self.active || !OutlinePanelSettings::get_global(cx).auto_reveal_entries { + fn reveal_entry_for_selection(&mut self, editor: View, cx: &mut ViewContext) { + if !self.active + || !OutlinePanelSettings::get_global(cx).auto_reveal_entries + || self.focus_handle.contains_focused(cx) + { return; } let project = self.project.clone(); @@ -1492,23 +1829,24 @@ impl OutlinePanel { return Ok(()); }; let related_buffer_entry = match &entry_with_selection { - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - project.update(&mut cx, |project, cx| { - let entry_id = project - .buffer_for_id(*buffer_id, cx) - .and_then(|buffer| buffer.read(cx).entry_id(cx)); - project - .worktree_for_id(*worktree_id, cx) - .zip(entry_id) - .and_then(|(worktree, entry_id)| { - let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); - Some((worktree, entry)) - }) - })? - } + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => project.update(&mut cx, |project, cx| { + let entry_id = project + .buffer_for_id(*buffer_id, cx) + .and_then(|buffer| buffer.read(cx).entry_id(cx)); + project + .worktree_for_id(*worktree_id, cx) + .zip(entry_id) + .and_then(|(worktree, entry_id)| { + let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); + Some((worktree, entry)) + }) + })?, PanelEntry::Outline(outline_entry) => { - let &(OutlineEntry::Outline(buffer_id, excerpt_id, _) - | OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) = outline_entry; + let (buffer_id, excerpt_id) = outline_entry.ids(); outline_panel.update(&mut cx, |outline_panel, cx| { outline_panel .collapsed_entries @@ -1621,25 +1959,21 @@ impl OutlinePanel { fn render_excerpt( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - range: &ExcerptRange, + excerpt: &OutlineEntryExcerpt, depth: usize, cx: &mut ViewContext, ) -> Option> { - let item_id = ElementId::from(excerpt_id.to_proto() as usize); + let item_id = ElementId::from(excerpt.id.to_proto() as usize); let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Excerpt( - selected_buffer_id, - selected_excerpt_id, - _, - ))) => selected_buffer_id == &buffer_id && selected_excerpt_id == &excerpt_id, + Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => { + selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id + } _ => false, }; let has_outlines = self .excerpts - .get(&buffer_id) - .and_then(|excerpts| match &excerpts.get(&excerpt_id)?.outlines { + .get(&excerpt.buffer_id) + .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines { ExcerptOutlines::Outlines(outlines) => Some(outlines), ExcerptOutlines::Invalidated(outlines) => Some(outlines), ExcerptOutlines::NotFetched => None, @@ -1647,7 +1981,7 @@ impl OutlinePanel { .map_or(false, |outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries - .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); let color = entry_git_aware_label_color(None, false, is_active); let icon = if has_outlines { FileIcons::get_chevron_icon(is_expanded, cx) @@ -1657,14 +1991,14 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let label = self.excerpt_label(buffer_id, range, cx)?; + let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?; let label_element = Label::new(label) .single_line() .color(color) .into_any_element(); Some(self.entry_element( - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, range.clone())), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())), item_id, depth, Some(icon), @@ -1691,50 +2025,40 @@ impl OutlinePanel { fn render_outline( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - rendered_outline: &Outline, + outline: &OutlineEntryOutline, depth: usize, string_match: Option<&StringMatch>, cx: &mut ViewContext, ) -> Stateful
{ - let (item_id, label_element) = ( - ElementId::from(SharedString::from(format!( - "{buffer_id:?}|{excerpt_id:?}{:?}|{:?}", - rendered_outline.range, &rendered_outline.text, - ))), - outline::render_item( - rendered_outline, - string_match - .map(|string_match| string_match.ranges().collect::>()) - .unwrap_or_default(), - cx, - ) - .into_any_element(), - ); + let item_id = ElementId::from(SharedString::from(format!( + "{:?}|{:?}{:?}|{:?}", + outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text, + ))); + + let label_element = outline::render_item( + &outline.outline, + string_match + .map(|string_match| string_match.ranges().collect::>()) + .unwrap_or_default(), + cx, + ) + .into_any_element(); + let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Outline( - selected_buffer_id, - selected_excerpt_id, - selected_entry, - ))) => { - selected_buffer_id == &buffer_id - && selected_excerpt_id == &excerpt_id - && selected_entry == rendered_outline + Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => { + outline == selected && outline.outline == selected.outline } _ => false, }; + let icon = if self.is_singleton_active(cx) { None } else { Some(empty_icon()) }; + self.entry_element( - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - rendered_outline.clone(), - )), + PanelEntry::Outline(OutlineEntry::Outline(outline.clone())), item_id, depth, icon, @@ -1757,7 +2081,9 @@ impl OutlinePanel { _ => false, }; let (item_id, label_element, icon) = match rendered_entry { - FsEntry::File(worktree_id, entry, ..) => { + FsEntry::File(FsEntryFile { + worktree_id, entry, .. + }) => { let name = self.entry_name(worktree_id, entry, cx); let color = entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active); @@ -1780,14 +2106,18 @@ impl OutlinePanel { icon.unwrap_or_else(empty_icon), ) } - FsEntry::Directory(worktree_id, entry) => { - let name = self.entry_name(worktree_id, entry, cx); + FsEntry::Directory(directory) => { + let name = self.entry_name(&directory.worktree_id, &directory.entry, cx); - let is_expanded = !self - .collapsed_entries - .contains(&CollapsedEntry::Dir(*worktree_id, entry.id)); - let color = - entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active); + let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Dir( + directory.worktree_id, + directory.entry.id, + )); + let color = entry_git_aware_label_color( + directory.entry.git_status, + directory.entry.is_ignored, + is_active, + ); let icon = if settings.folder_icons { FileIcons::get_folder_icon(is_expanded, cx) } else { @@ -1796,7 +2126,7 @@ impl OutlinePanel { .map(Icon::from_path) .map(|icon| icon.color(color).into_any_element()); ( - ElementId::from(entry.id.to_proto() as usize), + ElementId::from(directory.entry.id.to_proto() as usize), HighlightedLabel::new( name, string_match @@ -1808,9 +2138,9 @@ impl OutlinePanel { icon.unwrap_or_else(empty_icon), ) } - FsEntry::ExternalFile(buffer_id, ..) => { + FsEntry::ExternalFile(external_file) => { let color = entry_label_color(is_active); - let (icon, name) = match self.buffer_snapshot_for_id(*buffer_id, cx) { + let (icon, name) = match self.buffer_snapshot_for_id(external_file.buffer_id, cx) { Some(buffer_snapshot) => match buffer_snapshot.file() { Some(file) => { let path = file.path(); @@ -1828,7 +2158,7 @@ impl OutlinePanel { None => (None, "Unknown buffer".to_string()), }; ( - ElementId::from(buffer_id.to_proto() as usize), + ElementId::from(external_file.buffer_id.to_proto() as usize), HighlightedLabel::new( name, string_match @@ -1855,29 +2185,32 @@ impl OutlinePanel { fn render_folded_dirs( &self, - worktree_id: WorktreeId, - dir_entries: &[Entry], + folded_dir: &FoldedDirsEntry, depth: usize, string_match: Option<&StringMatch>, cx: &mut ViewContext, ) -> Stateful
{ let settings = OutlinePanelSettings::get_global(cx); let is_active = match self.selected_entry() { - Some(PanelEntry::FoldedDirs(selected_worktree_id, selected_entries)) => { - selected_worktree_id == &worktree_id && selected_entries == dir_entries + Some(PanelEntry::FoldedDirs(selected_dirs)) => { + selected_dirs.worktree_id == folded_dir.worktree_id + && selected_dirs.entries == folded_dir.entries } _ => false, }; let (item_id, label_element, icon) = { - let name = self.dir_names_string(dir_entries, worktree_id, cx); + let name = self.dir_names_string(&folded_dir.entries, folded_dir.worktree_id, cx); - let is_expanded = dir_entries.iter().all(|dir| { + let is_expanded = folded_dir.entries.iter().all(|dir| { !self .collapsed_entries - .contains(&CollapsedEntry::Dir(worktree_id, dir.id)) + .contains(&CollapsedEntry::Dir(folded_dir.worktree_id, dir.id)) }); - let is_ignored = dir_entries.iter().any(|entry| entry.is_ignored); - let git_status = dir_entries.first().and_then(|entry| entry.git_status); + let is_ignored = folded_dir.entries.iter().any(|entry| entry.is_ignored); + let git_status = folded_dir + .entries + .first() + .and_then(|entry| entry.git_status); let color = entry_git_aware_label_color(git_status, is_ignored, is_active); let icon = if settings.folder_icons { FileIcons::get_folder_icon(is_expanded, cx) @@ -1888,10 +2221,12 @@ impl OutlinePanel { .map(|icon| icon.color(color).into_any_element()); ( ElementId::from( - dir_entries + folded_dir + .entries .last() .map(|entry| entry.id.to_proto()) - .unwrap_or_else(|| worktree_id.to_proto()) as usize, + .unwrap_or_else(|| folded_dir.worktree_id.to_proto()) + as usize, ), HighlightedLabel::new( name, @@ -1906,7 +2241,7 @@ impl OutlinePanel { }; self.entry_element( - PanelEntry::FoldedDirs(worktree_id, dir_entries.to_vec()), + PanelEntry::FoldedDirs(folded_dir.clone()), item_id, depth, Some(icon), @@ -2027,9 +2362,9 @@ impl OutlinePanel { if event.down.button == MouseButton::Right || event.down.first_mouse { return; } - let change_selection = event.down.click_count > 1; + let change_focus = event.down.click_count > 1; outline_panel.toggle_expanded(&clicked_entry, cx); - outline_panel.open_entry(&clicked_entry, change_selection, cx); + outline_panel.scroll_editor_to_entry(&clicked_entry, true, change_focus, cx); }) }) .cursor_pointer() @@ -2037,7 +2372,7 @@ impl OutlinePanel { ListItem::new(item_id) .indent_level(depth) .indent_step_size(px(settings.indent_size)) - .selected(is_active) + .toggle_state(is_active) .when_some(icon_element, |list_item, icon_element| { list_item.child(h_flex().child(icon_element)) }) @@ -2099,8 +2434,7 @@ impl OutlinePanel { fn update_fs_entries( &mut self, - active_editor: &View, - new_entries: HashSet, + active_editor: View, debounce: Option, cx: &mut ViewContext, ) { @@ -2110,6 +2444,7 @@ impl OutlinePanel { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; let active_multi_buffer = active_editor.read(cx).buffer().clone(); + let new_entries = self.new_entries_for_fs_update.clone(); self.updating_fs_entries = true; self.fs_entries_update_task = cx.spawn(|outline_panel, mut cx| async move { if let Some(debounce) = debounce { @@ -2133,10 +2468,11 @@ impl OutlinePanel { let worktree = file.map(|file| file.worktree.read(cx).snapshot()); let is_new = new_entries.contains(&excerpt_id) || !outline_panel.excerpts.contains_key(&buffer_id); + let is_folded = active_editor.read(cx).buffer_folded(buffer_id, cx); buffer_excerpts .entry(buffer_id) - .or_insert_with(|| (is_new, Vec::new(), entry_id, worktree)) - .1 + .or_insert_with(|| (is_new, is_folded, Vec::new(), entry_id, worktree)) + .2 .push(excerpt_id); let outlines = match outline_panel @@ -2179,15 +2515,28 @@ impl OutlinePanel { .spawn(async move { let mut processed_external_buffers = HashSet::default(); let mut new_worktree_entries = - HashMap::)>::default(); + HashMap::>::default(); let mut worktree_excerpts = HashMap::< WorktreeId, HashMap)>, >::default(); let mut external_excerpts = HashMap::default(); - for (buffer_id, (is_new, excerpts, entry_id, worktree)) in buffer_excerpts { - if is_new { + for (buffer_id, (is_new, is_folded, excerpts, entry_id, worktree)) in + buffer_excerpts + { + if is_folded { + match &worktree { + Some(worktree) => { + new_collapsed_entries + .insert(CollapsedEntry::File(worktree.id(), buffer_id)); + } + None => { + new_collapsed_entries + .insert(CollapsedEntry::ExternalFile(buffer_id)); + } + } + } else if is_new { match &worktree { Some(worktree) => { new_collapsed_entries @@ -2206,14 +2555,15 @@ impl OutlinePanel { match entry_id.and_then(|id| worktree.entry_for_id(id)).cloned() { Some(entry) => { - let mut traversal = worktree.traverse_from_path( - true, - true, - true, - entry.path.as_ref(), - ); + let entry = GitEntry { + git_status: worktree.status_for_file(&entry.path), + entry, + }; + let mut traversal = worktree + .traverse_from_path(true, true, true, entry.path.as_ref()) + .with_git_statuses(); - let mut entries_to_add = HashSet::default(); + let mut entries_to_add = HashMap::default(); worktree_excerpts .entry(worktree_id) .or_default() @@ -2238,10 +2588,12 @@ impl OutlinePanel { } } - let new_entry_added = entries_to_add.insert(current_entry); + let new_entry_added = entries_to_add + .insert(current_entry.id, current_entry) + .is_none(); if new_entry_added && traversal.back_to_parent() { if let Some(parent_entry) = traversal.entry() { - current_entry = parent_entry.clone(); + current_entry = parent_entry.to_owned(); continue; } } @@ -2249,8 +2601,7 @@ impl OutlinePanel { } new_worktree_entries .entry(worktree_id) - .or_insert_with(|| (worktree.clone(), HashSet::default())) - .1 + .or_insert_with(HashMap::default) .extend(entries_to_add); } None => { @@ -2275,11 +2626,9 @@ impl OutlinePanel { let worktree_entries = new_worktree_entries .into_iter() - .map(|(worktree_id, (worktree_snapshot, entries))| { - let mut entries = entries.into_iter().collect::>(); - // For a proper git status propagation, we have to keep the entries sorted lexicographically. + .map(|(worktree_id, entries)| { + let mut entries = entries.into_values().collect::>(); entries.sort_by(|a, b| a.path.as_ref().cmp(b.path.as_ref())); - worktree_snapshot.propagate_git_statuses(&mut entries); (worktree_id, entries) }) .flat_map(|(worktree_id, entries)| { @@ -2303,19 +2652,22 @@ impl OutlinePanel { } if entry.is_dir() { - Some(FsEntry::Directory(worktree_id, entry)) + Some(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry, + })) } else { let (buffer_id, excerpts) = worktree_excerpts .get_mut(&worktree_id) .and_then(|worktree_excerpts| { worktree_excerpts.remove(&entry.id) })?; - Some(FsEntry::File( + Some(FsEntry::File(FsEntryFile { worktree_id, - entry, buffer_id, + entry, excerpts, - )) + })) } }) .collect::>() @@ -2328,25 +2680,29 @@ impl OutlinePanel { let new_visible_entries = external_excerpts .into_iter() .sorted_by_key(|(id, _)| *id) - .map(|(buffer_id, excerpts)| FsEntry::ExternalFile(buffer_id, excerpts)) + .map(|(buffer_id, excerpts)| { + FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts, + }) + }) .chain(worktree_entries) .filter(|visible_item| { match visible_item { - FsEntry::Directory(worktree_id, dir_entry) => { + FsEntry::Directory(directory) => { let parent_id = back_to_common_visited_parent( &mut visited_dirs, - worktree_id, - dir_entry, + &directory.worktree_id, + &directory.entry, ); - let depth = if root_entries.contains(&dir_entry.id) { - 0 - } else { + let mut depth = 0; + if !root_entries.contains(&directory.entry.id) { if auto_fold_dirs { let children = new_children_count - .get(worktree_id) + .get(&directory.worktree_id) .and_then(|children_count| { - children_count.get(&dir_entry.path) + children_count.get(&directory.entry.path) }) .copied() .unwrap_or_default(); @@ -2357,7 +2713,7 @@ impl OutlinePanel { .last() .map(|(parent_dir_id, _)| { new_unfolded_dirs - .get(worktree_id) + .get(&directory.worktree_id) .map_or(true, |unfolded_dirs| { unfolded_dirs .contains(parent_dir_id) @@ -2366,23 +2722,29 @@ impl OutlinePanel { .unwrap_or(true)) { new_unfolded_dirs - .entry(*worktree_id) + .entry(directory.worktree_id) .or_default() - .insert(dir_entry.id); + .insert(directory.entry.id); } } - parent_id + depth = parent_id .and_then(|(worktree_id, id)| { new_depth_map.get(&(worktree_id, id)).copied() }) .unwrap_or(0) - + 1 + + 1; }; - visited_dirs.push((dir_entry.id, dir_entry.path.clone())); - new_depth_map.insert((*worktree_id, dir_entry.id), depth); + visited_dirs + .push((directory.entry.id, directory.entry.path.clone())); + new_depth_map + .insert((directory.worktree_id, directory.entry.id), depth); } - FsEntry::File(worktree_id, file_entry, ..) => { + FsEntry::File(FsEntryFile { + worktree_id, + entry: file_entry, + .. + }) => { let parent_id = back_to_common_visited_parent( &mut visited_dirs, worktree_id, @@ -2426,6 +2788,7 @@ impl OutlinePanel { outline_panel .update(&mut cx, |outline_panel, cx| { outline_panel.updating_fs_entries = false; + outline_panel.new_entries_for_fs_update.clear(); outline_panel.excerpts = new_excerpts; outline_panel.collapsed_entries = new_collapsed_entries; outline_panel.unfolded_dirs = new_unfolded_dirs; @@ -2450,7 +2813,7 @@ impl OutlinePanel { self.clear_previous(cx); let buffer_search_subscription = cx.subscribe( &new_active_editor, - |outline_panel: &mut Self, _, e: &SearchEvent, cx: &mut ViewContext<'_, Self>| { + |outline_panel: &mut Self, _, e: &SearchEvent, cx: &mut ViewContext| { if matches!(e, SearchEvent::MatchesInvalidated) { outline_panel.update_search_matches(cx); }; @@ -2463,13 +2826,13 @@ impl OutlinePanel { item_handle: new_active_item.downgrade_item(), active_editor: new_active_editor.downgrade(), }); - let new_entries = - HashSet::from_iter(new_active_editor.read(cx).buffer().read(cx).excerpt_ids()); + self.new_entries_for_fs_update + .extend(new_active_editor.read(cx).buffer().read(cx).excerpt_ids()); self.selected_entry.invalidate(); - self.update_fs_entries(&new_active_editor, new_entries, None, cx); + self.update_fs_entries(new_active_editor, None, cx); } - fn clear_previous(&mut self, cx: &mut WindowContext<'_>) { + fn clear_previous(&mut self, cx: &mut WindowContext) { self.fs_entries_update_task = Task::ready(()); self.outline_fetch_tasks.clear(); self.cached_entries_update_task = Task::ready(()); @@ -2505,6 +2868,26 @@ impl OutlinePanel { .read(cx) .excerpt_containing(selection, cx)?; let buffer_id = buffer.read(cx).remote_id(); + + if editor.read(cx).buffer_folded(buffer_id, cx) { + return self + .fs_entries + .iter() + .find(|fs_entry| match fs_entry { + FsEntry::Directory(..) => false, + FsEntry::File(FsEntryFile { + buffer_id: other_buffer_id, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id: other_buffer_id, + .. + }) => buffer_id == *other_buffer_id, + }) + .cloned() + .map(PanelEntry::Fs); + } + let selection_display_point = selection.to_display_point(&editor_snapshot); match &self.mode { @@ -2649,26 +3032,31 @@ impl OutlinePanel { .cloned(); let closest_container = match outline_item { - Some(outline) => { - PanelEntry::Outline(OutlineEntry::Outline(buffer_id, excerpt_id, outline)) - } + Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { + buffer_id, + excerpt_id, + outline, + })), None => { self.cached_entries.iter().rev().find_map(|cached_entry| { match &cached_entry.entry { - PanelEntry::Outline(OutlineEntry::Excerpt( - entry_buffer_id, - entry_excerpt_id, - _, - )) => { - if entry_buffer_id == &buffer_id && entry_excerpt_id == &excerpt_id { + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id { Some(cached_entry.entry.clone()) } else { None } } PanelEntry::Fs( - FsEntry::ExternalFile(file_buffer_id, file_excerpts) - | FsEntry::File(_, _, file_buffer_id, file_excerpts), + FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id: file_buffer_id, + excerpts: file_excerpts, + }) + | FsEntry::File(FsEntryFile { + buffer_id: file_buffer_id, + excerpts: file_excerpts, + .. + }), ) => { if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) { Some(cached_entry.entry.clone()) @@ -2767,8 +3155,15 @@ impl OutlinePanel { .iter() .fold(HashMap::default(), |mut excerpts_to_fetch, fs_entry| { match fs_entry { - FsEntry::File(_, _, buffer_id, file_excerpts) - | FsEntry::ExternalFile(buffer_id, file_excerpts) => { + FsEntry::File(FsEntryFile { + buffer_id, + excerpts: file_excerpts, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts: file_excerpts, + }) => { let excerpts = self.excerpts.get(buffer_id); for &file_excerpt in file_excerpts { if let Some(excerpt) = excerpts @@ -2818,21 +3213,28 @@ impl OutlinePanel { fn abs_path(&self, entry: &PanelEntry, cx: &AppContext) -> Option { match entry { PanelEntry::Fs( - FsEntry::File(_, _, buffer_id, _) | FsEntry::ExternalFile(buffer_id, _), + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }), ) => self .buffer_snapshot_for_id(*buffer_id, cx) .and_then(|buffer_snapshot| { let file = File::from_dyn(buffer_snapshot.file())?; file.worktree.read(cx).absolutize(&file.path).ok() }), - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => self .project .read(cx) .worktree_for_id(*worktree_id, cx)? .read(cx) .absolutize(&entry.path) .ok(), - PanelEntry::FoldedDirs(worktree_id, dirs) => dirs.last().and_then(|entry| { + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries: dirs, + .. + }) => dirs.last().and_then(|entry| { self.project .read(cx) .worktree_for_id(*worktree_id, cx) @@ -2844,12 +3246,12 @@ impl OutlinePanel { fn relative_path(&self, entry: &FsEntry, cx: &AppContext) -> Option> { match entry { - FsEntry::ExternalFile(buffer_id, _) => { + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { let buffer_snapshot = self.buffer_snapshot_for_id(*buffer_id, cx)?; Some(buffer_snapshot.file()?.path().clone()) } - FsEntry::Directory(_, entry) => Some(entry.path.clone()), - FsEntry::File(_, entry, ..) => Some(entry.path.clone()), + FsEntry::Directory(FsEntryDirectory { entry, .. }) => Some(entry.path.clone()), + FsEntry::File(FsEntryFile { entry, .. }) => Some(entry.path.clone()), } } @@ -2904,15 +3306,18 @@ impl OutlinePanel { &self, is_singleton: bool, query: Option, - cx: &mut ViewContext<'_, Self>, + cx: &mut ViewContext, ) -> Task<(Vec, Option)> { let project = self.project.clone(); + let Some(active_editor) = self.active_editor() else { + return Task::ready((Vec::new(), None)); + }; cx.spawn(|outline_panel, mut cx| async move { let mut generation_state = GenerationState::default(); let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; - let mut folded_dirs_entry = None::<(usize, WorktreeId, Vec)>; + let mut folded_dirs_entry = None::<(usize, FoldedDirsEntry)>; let track_matches = query.is_some(); #[derive(Debug)] @@ -2926,29 +3331,29 @@ impl OutlinePanel { for entry in outline_panel.fs_entries.clone() { let is_expanded = outline_panel.is_expanded(&entry); let (depth, should_add) = match &entry { - FsEntry::Directory(worktree_id, dir_entry) => { + FsEntry::Directory(directory_entry) => { let mut should_add = true; let is_root = project .read(cx) - .worktree_for_id(*worktree_id, cx) + .worktree_for_id(directory_entry.worktree_id, cx) .map_or(false, |worktree| { - worktree.read(cx).root_entry() == Some(dir_entry) + worktree.read(cx).root_entry() == Some(&directory_entry.entry) }); let folded = auto_fold_dirs && !is_root && outline_panel .unfolded_dirs - .get(worktree_id) + .get(&directory_entry.worktree_id) .map_or(true, |unfolded_dirs| { - !unfolded_dirs.contains(&dir_entry.id) + !unfolded_dirs.contains(&directory_entry.entry.id) }); let fs_depth = outline_panel .fs_entries_depth - .get(&(*worktree_id, dir_entry.id)) + .get(&(directory_entry.worktree_id, directory_entry.entry.id)) .copied() .unwrap_or(0); while let Some(parent) = parent_dirs.last() { - if dir_entry.path.starts_with(&parent.path) { + if directory_entry.entry.path.starts_with(&parent.path) { break; } parent_dirs.pop(); @@ -2956,11 +3361,14 @@ impl OutlinePanel { let auto_fold = match parent_dirs.last() { Some(parent) => { parent.folded - && Some(parent.path.as_ref()) == dir_entry.path.parent() + && Some(parent.path.as_ref()) + == directory_entry.entry.path.parent() && outline_panel .fs_children_count - .get(worktree_id) - .and_then(|entries| entries.get(&dir_entry.path)) + .get(&directory_entry.worktree_id) + .and_then(|entries| { + entries.get(&directory_entry.entry.path) + }) .copied() .unwrap_or_default() .may_be_fold_part() @@ -2978,7 +3386,7 @@ impl OutlinePanel { parent.depth + 1 }; parent_dirs.push(ParentStats { - path: dir_entry.path.clone(), + path: directory_entry.entry.path.clone(), folded, expanded: parent_expanded && is_expanded, depth: new_depth, @@ -2987,7 +3395,7 @@ impl OutlinePanel { } None => { parent_dirs.push(ParentStats { - path: dir_entry.path.clone(), + path: directory_entry.entry.path.clone(), folded, expanded: is_expanded, depth: fs_depth, @@ -2996,37 +3404,38 @@ impl OutlinePanel { } }; - if let Some((folded_depth, folded_worktree_id, mut folded_dirs)) = - folded_dirs_entry.take() + if let Some((folded_depth, mut folded_dirs)) = folded_dirs_entry.take() { if folded - && worktree_id == &folded_worktree_id - && dir_entry.path.parent() - == folded_dirs.last().map(|entry| entry.path.as_ref()) + && directory_entry.worktree_id == folded_dirs.worktree_id + && directory_entry.entry.path.parent() + == folded_dirs + .entries + .last() + .map(|entry| entry.path.as_ref()) { - folded_dirs.push(dir_entry.clone()); - folded_dirs_entry = - Some((folded_depth, folded_worktree_id, folded_dirs)) + folded_dirs.entries.push(directory_entry.entry.clone()); + folded_dirs_entry = Some((folded_depth, folded_dirs)) } else { if !is_singleton { let start_of_collapsed_dir_sequence = !parent_expanded && parent_dirs .iter() .rev() - .nth(folded_dirs.len() + 1) + .nth(folded_dirs.entries.len() + 1) .map_or(true, |parent| parent.expanded); if start_of_collapsed_dir_sequence || parent_expanded || query.is_some() { if parent_folded { - folded_dirs.push(dir_entry.clone()); + folded_dirs + .entries + .push(directory_entry.entry.clone()); should_add = false; } - let new_folded_dirs = PanelEntry::FoldedDirs( - folded_worktree_id, - folded_dirs, - ); + let new_folded_dirs = + PanelEntry::FoldedDirs(folded_dirs.clone()); outline_panel.push_entry( &mut generation_state, track_matches, @@ -3040,12 +3449,23 @@ impl OutlinePanel { folded_dirs_entry = if parent_folded { None } else { - Some((depth, *worktree_id, vec![dir_entry.clone()])) + Some(( + depth, + FoldedDirsEntry { + worktree_id: directory_entry.worktree_id, + entries: vec![directory_entry.entry.clone()], + }, + )) }; } } else if folded { - folded_dirs_entry = - Some((depth, *worktree_id, vec![dir_entry.clone()])); + folded_dirs_entry = Some(( + depth, + FoldedDirsEntry { + worktree_id: directory_entry.worktree_id, + entries: vec![directory_entry.entry.clone()], + }, + )); } let should_add = @@ -3053,21 +3473,22 @@ impl OutlinePanel { (depth, should_add) } FsEntry::ExternalFile(..) => { - if let Some((folded_depth, worktree_id, folded_dirs)) = - folded_dirs_entry.take() - { + if let Some((folded_depth, folded_dir)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() .find(|parent| { - folded_dirs.iter().all(|entry| entry.path != parent.path) + folded_dir + .entries + .iter() + .all(|entry| entry.path != parent.path) }) .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dir), folded_depth, cx, ); @@ -3076,22 +3497,23 @@ impl OutlinePanel { parent_dirs.clear(); (0, true) } - FsEntry::File(worktree_id, file_entry, ..) => { - if let Some((folded_depth, worktree_id, folded_dirs)) = - folded_dirs_entry.take() - { + FsEntry::File(file) => { + if let Some((folded_depth, folded_dirs)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() .find(|parent| { - folded_dirs.iter().all(|entry| entry.path != parent.path) + folded_dirs + .entries + .iter() + .all(|entry| entry.path != parent.path) }) .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dirs), folded_depth, cx, ); @@ -3100,23 +3522,22 @@ impl OutlinePanel { let fs_depth = outline_panel .fs_entries_depth - .get(&(*worktree_id, file_entry.id)) + .get(&(file.worktree_id, file.entry.id)) .copied() .unwrap_or(0); while let Some(parent) = parent_dirs.last() { - if file_entry.path.starts_with(&parent.path) { + if file.entry.path.starts_with(&parent.path) { break; } parent_dirs.pop(); } - let (depth, should_add) = match parent_dirs.last() { + match parent_dirs.last() { Some(parent) => { let new_depth = parent.depth + 1; (new_depth, parent.expanded) } None => (fs_depth, true), - }; - (depth, should_add) + } } }; @@ -3137,6 +3558,7 @@ impl OutlinePanel { if is_singleton || query.is_some() || (should_add && is_expanded) { outline_panel.add_search_entries( &mut generation_state, + &active_editor, entry.clone(), depth, query.clone(), @@ -3149,28 +3571,34 @@ impl OutlinePanel { let excerpts_to_consider = if is_singleton || query.is_some() || (should_add && is_expanded) { match &entry { - FsEntry::File(_, _, buffer_id, entry_excerpts) => { - Some((*buffer_id, entry_excerpts)) - } - FsEntry::ExternalFile(buffer_id, entry_excerpts) => { - Some((*buffer_id, entry_excerpts)) - } + FsEntry::File(FsEntryFile { + buffer_id, + excerpts, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts, + .. + }) => Some((*buffer_id, excerpts)), _ => None, } } else { None }; if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider { - outline_panel.add_excerpt_entries( - &mut generation_state, - buffer_id, - entry_excerpts, - depth, - track_matches, - is_singleton, - query.as_deref(), - cx, - ); + if !active_editor.read(cx).buffer_folded(buffer_id, cx) { + outline_panel.add_excerpt_entries( + &mut generation_state, + buffer_id, + entry_excerpts, + depth, + track_matches, + is_singleton, + query.as_deref(), + cx, + ); + } } } } @@ -3191,17 +3619,22 @@ impl OutlinePanel { } } - if let Some((folded_depth, worktree_id, folded_dirs)) = folded_dirs_entry.take() { + if let Some((folded_depth, folded_dirs)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() - .find(|parent| folded_dirs.iter().all(|entry| entry.path != parent.path)) + .find(|parent| { + folded_dirs + .entries + .iter() + .all(|entry| entry.path != parent.path) + }) .map_or(true, |parent| parent.expanded); if parent_expanded || query.is_some() { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dirs), folded_depth, cx, ); @@ -3264,13 +3697,16 @@ impl OutlinePanel { depth: usize, cx: &mut WindowContext, ) { - let entry = if let PanelEntry::FoldedDirs(worktree_id, entries) = &entry { - match entries.len() { + let entry = if let PanelEntry::FoldedDirs(folded_dirs_entry) = &entry { + match folded_dirs_entry.entries.len() { 0 => { debug_panic!("Empty folded dirs receiver"); return; } - 1 => PanelEntry::Fs(FsEntry::Directory(*worktree_id, entries[0].clone())), + 1 => PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id: folded_dirs_entry.worktree_id, + entry: folded_dirs_entry.entries[0].clone(), + })), _ => entry, } } else { @@ -3284,40 +3720,32 @@ impl OutlinePanel { if let Some(file_name) = self.relative_path(fs_entry, cx).as_deref().map(file_name) { - state.match_candidates.push(StringMatchCandidate { - id, - string: file_name.to_string(), - char_bag: file_name.chars().collect(), - }); + state + .match_candidates + .push(StringMatchCandidate::new(id, &file_name)); } } - PanelEntry::FoldedDirs(worktree_id, entries) => { - let dir_names = self.dir_names_string(entries, *worktree_id, cx); + PanelEntry::FoldedDirs(folded_dir_entry) => { + let dir_names = self.dir_names_string( + &folded_dir_entry.entries, + folded_dir_entry.worktree_id, + cx, + ); { - state.match_candidates.push(StringMatchCandidate { - id, - string: dir_names.clone(), - char_bag: dir_names.chars().collect(), - }); + state + .match_candidates + .push(StringMatchCandidate::new(id, &dir_names)); } } - PanelEntry::Outline(outline_entry) => match outline_entry { - OutlineEntry::Outline(_, _, outline) => { - state.match_candidates.push(StringMatchCandidate { - id, - string: outline.text.clone(), - char_bag: outline.text.chars().collect(), - }); - } - OutlineEntry::Excerpt(..) => {} - }, + PanelEntry::Outline(OutlineEntry::Outline(outline_entry)) => state + .match_candidates + .push(StringMatchCandidate::new(id, &outline_entry.outline.text)), + PanelEntry::Outline(OutlineEntry::Excerpt(_)) => {} PanelEntry::Search(new_search_entry) => { if let Some(search_data) = new_search_entry.render_data.get() { - state.match_candidates.push(StringMatchCandidate { - id, - char_bag: search_data.context_text.chars().collect(), - string: search_data.context_text.clone(), - }); + state + .match_candidates + .push(StringMatchCandidate::new(id, &search_data.context_text)); } } } @@ -3340,7 +3768,7 @@ impl OutlinePanel { fn dir_names_string( &self, - entries: &[Entry], + entries: &[GitEntry], worktree_id: WorktreeId, cx: &AppContext, ) -> String { @@ -3362,11 +3790,17 @@ impl OutlinePanel { fn is_expanded(&self, entry: &FsEntry) -> bool { let entry_to_check = match entry { - FsEntry::ExternalFile(buffer_id, _) => CollapsedEntry::ExternalFile(*buffer_id), - FsEntry::File(worktree_id, _, buffer_id, _) => { - CollapsedEntry::File(*worktree_id, *buffer_id) + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + CollapsedEntry::ExternalFile(*buffer_id) } - FsEntry::Directory(worktree_id, entry) => CollapsedEntry::Dir(*worktree_id, entry.id), + FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + }) => CollapsedEntry::File(*worktree_id, *buffer_id), + FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => CollapsedEntry::Dir(*worktree_id, entry.id), }; !self.collapsed_entries.contains(&entry_to_check) } @@ -3490,11 +3924,11 @@ impl OutlinePanel { self.push_entry( state, track_matches, - PanelEntry::Outline(OutlineEntry::Excerpt( + PanelEntry::Outline(OutlineEntry::Excerpt(OutlineEntryExcerpt { buffer_id, - excerpt_id, - excerpt.range.clone(), - )), + id: excerpt_id, + range: excerpt.range.clone(), + })), excerpt_depth, cx, ); @@ -3515,11 +3949,11 @@ impl OutlinePanel { self.push_entry( state, track_matches, - PanelEntry::Outline(OutlineEntry::Outline( + PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { buffer_id, excerpt_id, - outline.clone(), - )), + outline: outline.clone(), + })), outline_base_depth + outline.depth, cx, ); @@ -3532,34 +3966,49 @@ impl OutlinePanel { fn add_search_entries( &mut self, state: &mut GenerationState, + active_editor: &View, parent_entry: FsEntry, parent_depth: usize, filter_query: Option, is_singleton: bool, cx: &mut ViewContext, ) { - if self.active_editor().is_none() { - return; - }; let ItemsDisplayMode::Search(search_state) = &mut self.mode else { return; }; let kind = search_state.kind; let related_excerpts = match &parent_entry { - FsEntry::Directory(_, _) => return, - FsEntry::ExternalFile(_, excerpts) => excerpts, - FsEntry::File(_, _, _, excerpts) => excerpts, + FsEntry::Directory(_) => return, + FsEntry::ExternalFile(external) => &external.excerpts, + FsEntry::File(file) => &file.excerpts, } .iter() .copied() .collect::>(); let depth = if is_singleton { 0 } else { parent_depth + 1 }; - let new_search_matches = search_state.matches.iter().filter(|(match_range, _)| { - related_excerpts.contains(&match_range.start.excerpt_id) - || related_excerpts.contains(&match_range.end.excerpt_id) - }); + let new_search_matches = search_state + .matches + .iter() + .filter(|(match_range, _)| { + related_excerpts.contains(&match_range.start.excerpt_id) + || related_excerpts.contains(&match_range.end.excerpt_id) + }) + .filter(|(match_range, _)| { + let editor = active_editor.read(cx); + if let Some(buffer_id) = match_range.start.buffer_id { + if editor.buffer_folded(buffer_id, cx) { + return false; + } + } + if let Some(buffer_id) = match_range.start.buffer_id { + if editor.buffer_folded(buffer_id, cx) { + return false; + } + } + true + }); let new_search_entries = new_search_matches .map(|(match_range, search_data)| SearchEntry { @@ -3798,24 +4247,28 @@ impl OutlinePanel { fn width_estimate(&self, depth: usize, entry: &PanelEntry, cx: &AppContext) -> u64 { let item_text_chars = match entry { - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => self - .buffer_snapshot_for_id(*buffer_id, cx) + PanelEntry::Fs(FsEntry::ExternalFile(external)) => self + .buffer_snapshot_for_id(external.buffer_id, cx) .and_then(|snapshot| { Some(snapshot.file()?.path().file_name()?.to_string_lossy().len()) }) .unwrap_or_default(), - PanelEntry::Fs(FsEntry::Directory(_, directory)) => directory + PanelEntry::Fs(FsEntry::Directory(directory)) => directory + .entry .path .file_name() .map(|name| name.to_string_lossy().len()) .unwrap_or_default(), - PanelEntry::Fs(FsEntry::File(_, file, _, _)) => file + PanelEntry::Fs(FsEntry::File(file)) => file + .entry .path .file_name() .map(|name| name.to_string_lossy().len()) .unwrap_or_default(), - PanelEntry::FoldedDirs(_, dirs) => { - dirs.iter() + PanelEntry::FoldedDirs(folded_dirs) => { + folded_dirs + .entries + .iter() .map(|dir| { dir.path .file_name() @@ -3823,13 +4276,13 @@ impl OutlinePanel { .unwrap_or_default() }) .sum::() - + dirs.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() + + folded_dirs.entries.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, _, range)) => self - .excerpt_label(*buffer_id, range, cx) + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self + .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) .map(|label| label.len()) .unwrap_or_default(), - PanelEntry::Outline(OutlineEntry::Outline(_, _, outline)) => outline.text.len(), + PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.outline.text.len(), PanelEntry::Search(search) => search .render_data .get() @@ -3845,7 +4298,7 @@ impl OutlinePanel { query: Option, show_indent_guides: bool, indent_size: f32, - cx: &mut ViewContext<'_, Self>, + cx: &mut ViewContext, ) -> Div { let contents = if self.cached_entries.is_empty() { let header = if self.updating_fs_entries { @@ -3903,38 +4356,25 @@ impl OutlinePanel { cached_entry.string_match.as_ref(), cx, )), - PanelEntry::FoldedDirs(worktree_id, entries) => { + PanelEntry::FoldedDirs(folded_dirs_entry) => { Some(outline_panel.render_folded_dirs( - worktree_id, - &entries, + &folded_dirs_entry, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )) + } + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + outline_panel.render_excerpt(&excerpt, cached_entry.depth, cx) + } + PanelEntry::Outline(OutlineEntry::Outline(entry)) => { + Some(outline_panel.render_outline( + &entry, cached_entry.depth, cached_entry.string_match.as_ref(), cx, )) } - PanelEntry::Outline(OutlineEntry::Excerpt( - buffer_id, - excerpt_id, - excerpt, - )) => outline_panel.render_excerpt( - buffer_id, - excerpt_id, - &excerpt, - cached_entry.depth, - cx, - ), - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - outline, - )) => Some(outline_panel.render_outline( - buffer_id, - excerpt_id, - &outline, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), PanelEntry::Search(SearchEntry { match_range, render_data, @@ -4024,7 +4464,7 @@ impl OutlinePanel { deferred( anchored() .position(*position) - .anchor(gpui::AnchorCorner::TopLeft) + .anchor(gpui::Corner::TopLeft) .child(menu.clone()), ) .with_priority(1) @@ -4033,7 +4473,7 @@ impl OutlinePanel { v_flex().w_full().flex_1().overflow_hidden().child(contents) } - fn render_filter_footer(&mut self, pinned: bool, cx: &mut ViewContext<'_, Self>) -> Div { + fn render_filter_footer(&mut self, pinned: bool, cx: &mut ViewContext) -> Div { v_flex().flex_none().child(horizontal_separator(cx)).child( h_flex() .p_2() @@ -4067,6 +4507,42 @@ impl OutlinePanel { ), ) } + + fn buffers_inside_directory( + &self, + dir_worktree: WorktreeId, + dir_entry: &GitEntry, + ) -> HashSet { + if !dir_entry.is_dir() { + debug_panic!("buffers_inside_directory called on a non-directory entry {dir_entry:?}"); + return HashSet::default(); + } + + self.fs_entries + .iter() + .skip_while(|fs_entry| match fs_entry { + FsEntry::Directory(directory) => { + directory.worktree_id != dir_worktree || &directory.entry != dir_entry + } + _ => true, + }) + .skip(1) + .take_while(|fs_entry| match fs_entry { + FsEntry::ExternalFile(..) => false, + FsEntry::Directory(directory) => { + directory.worktree_id == dir_worktree + && directory.entry.path.starts_with(&dir_entry.path) + } + FsEntry::File(file) => { + file.worktree_id == dir_worktree && file.entry.path.starts_with(&dir_entry.path) + } + }) + .filter_map(|fs_entry| match fs_entry { + FsEntry::File(file) => Some(file.buffer_id), + _ => None, + }) + .collect() + } } fn workspace_active_editor( @@ -4179,23 +4655,27 @@ impl Panel for OutlinePanel { .update(&mut cx, |outline_panel, cx| { let old_active = outline_panel.active; outline_panel.active = active; - if active && old_active != active { - if let Some((active_item, active_editor)) = outline_panel - .workspace - .upgrade() - .and_then(|workspace| workspace_active_editor(workspace.read(cx), cx)) - { - if outline_panel.should_replace_active_item(active_item.as_ref()) { - outline_panel.replace_active_editor(active_item, active_editor, cx); - } else { - outline_panel.update_fs_entries( - &active_editor, - HashSet::default(), - None, - cx, - ) + if old_active != active { + if active { + if let Some((active_item, active_editor)) = + outline_panel.workspace.upgrade().and_then(|workspace| { + workspace_active_editor(workspace.read(cx), cx) + }) + { + if outline_panel.should_replace_active_item(active_item.as_ref()) { + outline_panel.replace_active_editor( + active_item, + active_editor, + cx, + ); + } else { + outline_panel.update_fs_entries(active_editor, None, cx) + } + return; } - } else if !outline_panel.pinned { + } + + if !outline_panel.pinned { outline_panel.clear_previous(cx); } } @@ -4205,6 +4685,10 @@ impl Panel for OutlinePanel { }) .detach() } + + fn activation_priority(&self) -> u32 { + 5 + } } impl FocusableView for OutlinePanel { @@ -4338,20 +4822,20 @@ fn subscribe_for_editor_events( cx: &mut ViewContext, ) -> Subscription { let debounce = Some(UPDATE_DEBOUNCE); - cx.subscribe( - editor, - move |outline_panel, editor, e: &EditorEvent, cx| match e { + cx.subscribe(editor, move |outline_panel, editor, e: &EditorEvent, cx| { + if !outline_panel.active { + return; + } + match e { EditorEvent::SelectionsChanged { local: true } => { outline_panel.reveal_entry_for_selection(editor, cx); cx.notify(); } EditorEvent::ExcerptsAdded { excerpts, .. } => { - outline_panel.update_fs_entries( - &editor, - excerpts.iter().map(|&(excerpt_id, _)| excerpt_id).collect(), - debounce, - cx, - ); + outline_panel + .new_entries_for_fs_update + .extend(excerpts.iter().map(|&(excerpt_id, _)| excerpt_id)); + outline_panel.update_fs_entries(editor, debounce, cx); } EditorEvent::ExcerptsRemoved { ids } => { let mut ids = ids.iter().collect::>(); @@ -4361,7 +4845,7 @@ fn subscribe_for_editor_events( break; } } - outline_panel.update_fs_entries(&editor, HashSet::default(), debounce, cx); + outline_panel.update_fs_entries(editor, debounce, cx); } EditorEvent::ExcerptsExpanded { ids } => { outline_panel.invalidate_outlines(ids); @@ -4371,6 +4855,73 @@ fn subscribe_for_editor_events( outline_panel.invalidate_outlines(ids); outline_panel.update_non_fs_items(cx); } + EditorEvent::BufferFoldToggled { ids, .. } => { + outline_panel.invalidate_outlines(ids); + let mut latest_unfolded_buffer_id = None; + let mut latest_folded_buffer_id = None; + let mut ignore_selections_change = false; + outline_panel.new_entries_for_fs_update.extend( + ids.iter() + .filter(|id| { + outline_panel + .excerpts + .iter() + .find_map(|(buffer_id, excerpts)| { + if excerpts.contains_key(id) { + ignore_selections_change |= outline_panel + .preserve_selection_on_buffer_fold_toggles + .remove(buffer_id); + Some(buffer_id) + } else { + None + } + }) + .map(|buffer_id| { + if editor.read(cx).buffer_folded(*buffer_id, cx) { + latest_folded_buffer_id = Some(*buffer_id); + false + } else { + latest_unfolded_buffer_id = Some(*buffer_id); + true + } + }) + .unwrap_or(true) + }) + .copied(), + ); + if !ignore_selections_change { + if let Some(entry_to_select) = latest_unfolded_buffer_id + .or(latest_folded_buffer_id) + .and_then(|toggled_buffer_id| { + outline_panel + .fs_entries + .iter() + .find_map(|fs_entry| match fs_entry { + FsEntry::ExternalFile(external) => { + if external.buffer_id == toggled_buffer_id { + Some(fs_entry.clone()) + } else { + None + } + } + FsEntry::File(FsEntryFile { buffer_id, .. }) => { + if *buffer_id == toggled_buffer_id { + Some(fs_entry.clone()) + } else { + None + } + } + FsEntry::Directory(..) => None, + }) + }) + .map(PanelEntry::Fs) + { + outline_panel.select_entry(entry_to_select, true, cx); + } + } + + outline_panel.update_fs_entries(editor, debounce, cx); + } EditorEvent::Reparsed(buffer_id) => { if let Some(excerpts) = outline_panel.excerpts.get_mut(buffer_id) { for (_, excerpt) in excerpts { @@ -4380,8 +4931,8 @@ fn subscribe_for_editor_events( outline_panel.update_non_fs_items(cx); } _ => {} - }, - ) + } + }) } fn empty_icon() -> AnyElement { @@ -4413,6 +4964,7 @@ impl GenerationState { #[cfg(test)] mod tests { + use db::indoc; use gpui::{TestAppContext, VisualTestContext, WindowHandle}; use language::{tree_sitter_rust, Language, LanguageConfig, LanguageMatcher}; use pretty_assertions::assert_eq; @@ -4527,6 +5079,8 @@ mod tests { outline_panel.update(cx, |outline_panel, cx| { outline_panel.collapse_selected_entry(&CollapseSelectedEntry, cx); }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); cx.run_until_parked(); outline_panel.update(cx, |outline_panel, cx| { assert_eq!( @@ -4559,6 +5113,8 @@ mod tests { outline_panel.update(cx, |outline_panel, cx| { outline_panel.expand_all_entries(&ExpandAllEntries, cx); }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); cx.run_until_parked(); outline_panel.update(cx, |outline_panel, cx| { outline_panel.select_parent(&SelectParent, cx); @@ -4587,6 +5143,8 @@ mod tests { outline_panel.update(cx, |outline_panel, cx| { outline_panel.collapse_selected_entry(&CollapseSelectedEntry, cx); }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); cx.run_until_parked(); outline_panel.update(cx, |outline_panel, cx| { assert_eq!( @@ -4611,6 +5169,8 @@ mod tests { outline_panel.update(cx, |outline_panel, cx| { outline_panel.expand_selected_entry(&ExpandSelectedEntry, cx); }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); cx.run_until_parked(); outline_panel.update(cx, |outline_panel, cx| { assert_eq!( @@ -4859,9 +5419,13 @@ mod tests { ), select_first_in_all_matches(navigated_outline_selection) ); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + outline_panel.update(cx, |_, cx| { assert_eq!( selected_row_text(&active_editor, cx), - initial_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes "Should still have the initial caret position after SelectNext calls" ); }); @@ -4891,9 +5455,13 @@ mod tests { ), select_first_in_all_matches(next_navigated_outline_selection) ); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + outline_panel.update(cx, |_, cx| { assert_eq!( selected_row_text(&active_editor, cx), - navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + next_navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes "Should again preserve the selection after another SelectNext call" ); }); @@ -4930,6 +5498,312 @@ mod tests { }); } + #[gpui::test] + async fn test_navigating_in_singleton(cx: &mut TestAppContext) { + init_test(cx); + + let root = "/root"; + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + root, + json!({ + "src": { + "lib.rs": indoc!(" +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +struct OutlineEntryExcerpt { + id: ExcerptId, + buffer_id: BufferId, + range: ExcerptRange, +}"), + } + }), + ) + .await; + let project = Project::test(fs.clone(), [root.as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(Arc::new( + rust_lang() + .with_outline_query( + r#" + (struct_item + (visibility_modifier)? @context + "struct" @context + name: (_) @name) @item + + (field_declaration + (visibility_modifier)? @context + name: (_) @name) @item +"#, + ) + .unwrap(), + )) + }); + let workspace = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let outline_panel = outline_panel(&workspace, cx); + outline_panel.update(cx, |outline_panel, cx| outline_panel.set_active(true, cx)); + + let _editor = workspace + .update(cx, |workspace, cx| { + workspace.open_abs_path(PathBuf::from("/root/src/lib.rs"), true, cx) + }) + .unwrap() + .await + .expect("Failed to open Rust source file") + .downcast::() + .expect("Should open an editor for Rust source file"); + + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id + outline: buffer_id + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_next(&SelectNext, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt <==== selected + outline: id + outline: buffer_id + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_next(&SelectNext, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id <==== selected + outline: buffer_id + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_next(&SelectNext, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id + outline: buffer_id <==== selected + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_next(&SelectNext, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id + outline: buffer_id + outline: range <==== selected" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_next(&SelectNext, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt <==== selected + outline: id + outline: buffer_id + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_prev(&SelectPrev, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id + outline: buffer_id + outline: range <==== selected" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_prev(&SelectPrev, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id + outline: buffer_id <==== selected + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_prev(&SelectPrev, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id <==== selected + outline: buffer_id + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_prev(&SelectPrev, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt <==== selected + outline: id + outline: buffer_id + outline: range" + ) + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.select_prev(&SelectPrev, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + indoc!( + " +outline: struct OutlineEntryExcerpt + outline: id + outline: buffer_id + outline: range <==== selected" + ) + ); + }); + } + #[gpui::test(iterations = 10)] async fn test_frontend_repo_structure(cx: &mut TestAppContext) { init_test(cx); @@ -5041,6 +5915,8 @@ mod tests { } outline_panel.collapse_selected_entry(&CollapseSelectedEntry, cx); }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); cx.run_until_parked(); outline_panel.update(cx, |outline_panel, cx| { assert_eq!( @@ -5060,6 +5936,91 @@ mod tests { search: static"# ); }); + + outline_panel.update(cx, |outline_panel, cx| { + // Move to the next visible non-FS entry + for _ in 0..3 { + outline_panel.select_next(&SelectNext, cx); + } + }); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + r#"/ + public/lottie/ + syntax-tree.json + search: { "something": "static" } + src/ + app/(site)/ + components/ + ErrorBoundary.tsx + search: static <==== selected"# + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel + .active_editor() + .expect("Should have an active editor") + .update(cx, |editor, cx| { + editor.toggle_fold(&editor::actions::ToggleFold, cx) + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + r#"/ + public/lottie/ + syntax-tree.json + search: { "something": "static" } + src/ + app/(site)/ + components/ + ErrorBoundary.tsx <==== selected"# + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel + .active_editor() + .expect("Should have an active editor") + .update(cx, |editor, cx| { + editor.toggle_fold(&editor::actions::ToggleFold, cx) + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &snapshot(&outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry() + ), + r#"/ + public/lottie/ + syntax-tree.json + search: { "something": "static" } + src/ + app/(site)/ + components/ + ErrorBoundary.tsx <==== selected + search: static"# + ); + }); } async fn add_outline_panel( @@ -5110,41 +6071,46 @@ mod tests { } display_string += &match &entry.entry { PanelEntry::Fs(entry) => match entry { - FsEntry::ExternalFile(_, _) => { + FsEntry::ExternalFile(_) => { panic!("Did not cover external files with tests") } - FsEntry::Directory(_, dir_entry) => format!( + FsEntry::Directory(directory) => format!( "{}/", - dir_entry + directory + .entry .path .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_default() ), - FsEntry::File(_, file_entry, ..) => file_entry + FsEntry::File(file) => file + .entry .path .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_default(), }, - PanelEntry::FoldedDirs(_, dirs) => dirs + PanelEntry::FoldedDirs(folded_dirs) => folded_dirs + .entries .iter() .filter_map(|dir| dir.path.file_name()) .map(|name| name.to_string_lossy().to_string() + "/") .collect(), PanelEntry::Outline(outline_entry) => match outline_entry { - OutlineEntry::Excerpt(_, _, _) => continue, - OutlineEntry::Outline(_, _, outline) => format!("outline: {}", outline.text), + OutlineEntry::Excerpt(_) => continue, + OutlineEntry::Outline(outline_entry) => { + format!("outline: {}", outline_entry.outline.text) + } }, - PanelEntry::Search(SearchEntry { - render_data, - match_range, - .. - }) => { + PanelEntry::Search(search_entry) => { format!( "search: {}", - render_data - .get_or_init(|| SearchData::new(match_range, &multi_buffer_snapshot)) + search_entry + .render_data + .get_or_init(|| SearchData::new( + &search_entry.match_range, + &multi_buffer_snapshot + )) .context_text ) } @@ -5383,8 +6349,8 @@ mod tests { .with_injection_query( r#" (macro_invocation - (token_tree) @content - (#set! "language" "rust")) + (token_tree) @injection.content + (#set! injection.language "rust")) "#, ) .unwrap() diff --git a/crates/picker/src/head.rs b/crates/picker/src/head.rs index 5ebcaf13a5..d91a73b1b3 100644 --- a/crates/picker/src/head.rs +++ b/crates/picker/src/head.rs @@ -16,7 +16,7 @@ pub(crate) enum Head { impl Head { pub fn editor( placeholder_text: Arc, - edit_handler: impl FnMut(&mut V, View, &EditorEvent, &mut ViewContext<'_, V>) + 'static, + edit_handler: impl FnMut(&mut V, View, &EditorEvent, &mut ViewContext) + 'static, cx: &mut ViewContext, ) -> Self { let editor = cx.new_view(|cx| { @@ -29,7 +29,7 @@ impl Head { } pub fn empty( - blur_handler: impl FnMut(&mut V, &mut ViewContext<'_, V>) + 'static, + blur_handler: impl FnMut(&mut V, &mut ViewContext) + 'static, cx: &mut ViewContext, ) -> Self { let head = cx.new_view(EmptyHead::new); diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 119c412b48..c97fceeef3 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -425,6 +425,19 @@ impl Picker { self.cancel(&menu::Cancel, cx); } + pub fn refresh_placeholder(&mut self, cx: &mut WindowContext) { + match &self.head { + Head::Editor(view) => { + let placeholder = self.delegate.placeholder_text(cx); + view.update(cx, |this, cx| { + this.set_placeholder_text(placeholder, cx); + cx.notify(); + }); + } + Head::Empty(_) => {} + } + } + pub fn refresh(&mut self, cx: &mut ViewContext) { let query = self.query(cx); self.update_matches(query, cx); @@ -480,7 +493,7 @@ impl Picker { } } - pub fn set_query(&self, query: impl Into>, cx: &mut WindowContext<'_>) { + pub fn set_query(&self, query: impl Into>, cx: &mut WindowContext) { if let Head::Editor(ref editor) = &self.head { editor.update(cx, |editor, cx| { editor.set_text(query, cx); diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 92db62e6c6..d4c1654d92 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -58,6 +58,7 @@ impl Prettier { "prettier.config.js", "prettier.config.cjs", ".editorconfig", + ".prettierignore", ]; pub async fn locate_prettier_installation( @@ -134,6 +135,101 @@ impl Prettier { } } + pub async fn locate_prettier_ignore( + fs: &dyn Fs, + prettier_ignores: &HashSet, + locate_from: &Path, + ) -> anyhow::Result>> { + let mut path_to_check = locate_from + .components() + .take_while(|component| component.as_os_str().to_string_lossy() != "node_modules") + .collect::(); + if path_to_check != locate_from { + log::debug!( + "Skipping prettier ignore location for path {path_to_check:?} that is inside node_modules" + ); + return Ok(ControlFlow::Break(())); + } + + let path_to_check_metadata = fs + .metadata(&path_to_check) + .await + .with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))? + .with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?; + if !path_to_check_metadata.is_dir { + path_to_check.pop(); + } + + let mut closest_package_json_path = None; + loop { + if prettier_ignores.contains(&path_to_check) { + log::debug!("Found prettier ignore at {path_to_check:?}"); + return Ok(ControlFlow::Continue(Some(path_to_check))); + } else if let Some(package_json_contents) = + read_package_json(fs, &path_to_check).await? + { + let ignore_path = path_to_check.join(".prettierignore"); + if let Some(metadata) = fs + .metadata(&ignore_path) + .await + .with_context(|| format!("fetching metadata for {ignore_path:?}"))? + { + if !metadata.is_dir && !metadata.is_symlink { + log::info!("Found prettier ignore at {ignore_path:?}"); + return Ok(ControlFlow::Continue(Some(path_to_check))); + } + } + match &closest_package_json_path { + None => closest_package_json_path = Some(path_to_check.clone()), + Some(closest_package_json_path) => { + if let Some(serde_json::Value::Array(workspaces)) = + package_json_contents.get("workspaces") + { + let subproject_path = closest_package_json_path + .strip_prefix(&path_to_check) + .expect("traversing path parents, should be able to strip prefix"); + + if workspaces + .iter() + .filter_map(|value| { + if let serde_json::Value::String(s) = value { + Some(s.clone()) + } else { + log::warn!( + "Skipping non-string 'workspaces' value: {value:?}" + ); + None + } + }) + .any(|workspace_definition| { + workspace_definition == subproject_path.to_string_lossy() + || PathMatcher::new(&[workspace_definition]) + .ok() + .map_or(false, |path_matcher| { + path_matcher.is_match(subproject_path) + }) + }) + { + let workspace_ignore = path_to_check.join(".prettierignore"); + if let Some(metadata) = fs.metadata(&workspace_ignore).await? { + if !metadata.is_dir { + log::info!("Found prettier ignore at workspace root {workspace_ignore:?}"); + return Ok(ControlFlow::Continue(Some(path_to_check))); + } + } + } + } + } + } + } + + if !path_to_check.pop() { + log::debug!("Found no prettier ignore in ancestors of {locate_from:?}"); + return Ok(ControlFlow::Continue(None)); + } + } + } + #[cfg(any(test, feature = "test-support"))] pub async fn start( _: LanguageServerId, @@ -201,6 +297,7 @@ impl Prettier { &self, buffer: &Model, buffer_path: Option, + ignore_dir: Option, cx: &mut AsyncAppContext, ) -> anyhow::Result { match self { @@ -315,11 +412,17 @@ impl Prettier { } + let ignore_path = ignore_dir.and_then(|dir| { + let ignore_file = dir.join(".prettierignore"); + ignore_file.is_file().then_some(ignore_file) + }); + log::debug!( - "Formatting file {:?} with prettier, plugins :{:?}, options: {:?}", + "Formatting file {:?} with prettier, plugins :{:?}, options: {:?}, ignore_path: {:?}", buffer.file().map(|f| f.full_path(cx)), plugins, prettier_options, + ignore_path, ); anyhow::Ok(FormatParams { @@ -329,6 +432,7 @@ impl Prettier { plugins, path: buffer_path, prettier_options, + ignore_path, }, }) })? @@ -449,6 +553,7 @@ struct FormatOptions { #[serde(rename = "filepath")] path: Option, prettier_options: Option>, + ignore_path: Option, } #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] @@ -840,4 +945,150 @@ mod tests { }, }; } + + #[gpui::test] + async fn test_prettier_ignore_with_editor_prettier(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "project": { + "src": { + "index.js": "// index.js file contents", + "ignored.js": "// this file should be ignored", + }, + ".prettierignore": "ignored.js", + "package.json": r#"{ + "name": "test-project" + }"# + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_ignore( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/project/src/index.js"), + ) + .await + .unwrap(), + ControlFlow::Continue(Some(PathBuf::from("/root/project"))), + "Should find prettierignore in project root" + ); + } + + #[gpui::test] + async fn test_prettier_ignore_in_monorepo_with_only_child_ignore( + cx: &mut gpui::TestAppContext, + ) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "monorepo": { + "node_modules": { + "prettier": { + "index.js": "// Dummy prettier package file", + } + }, + "packages": { + "web": { + "src": { + "index.js": "// index.js contents", + "ignored.js": "// this should be ignored", + }, + ".prettierignore": "ignored.js", + "package.json": r#"{ + "name": "web-package" + }"# + } + }, + "package.json": r#"{ + "workspaces": ["packages/*"], + "devDependencies": { + "prettier": "^2.0.0" + } + }"# + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_ignore( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/monorepo/packages/web/src/index.js"), + ) + .await + .unwrap(), + ControlFlow::Continue(Some(PathBuf::from("/root/monorepo/packages/web"))), + "Should find prettierignore in child package" + ); + } + + #[gpui::test] + async fn test_prettier_ignore_in_monorepo_with_root_and_child_ignores( + cx: &mut gpui::TestAppContext, + ) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "monorepo": { + "node_modules": { + "prettier": { + "index.js": "// Dummy prettier package file", + } + }, + ".prettierignore": "main.js", + "packages": { + "web": { + "src": { + "main.js": "// this should not be ignored", + "ignored.js": "// this should be ignored", + }, + ".prettierignore": "ignored.js", + "package.json": r#"{ + "name": "web-package" + }"# + } + }, + "package.json": r#"{ + "workspaces": ["packages/*"], + "devDependencies": { + "prettier": "^2.0.0" + } + }"# + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_ignore( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/monorepo/packages/web/src/main.js"), + ) + .await + .unwrap(), + ControlFlow::Continue(Some(PathBuf::from("/root/monorepo/packages/web"))), + "Should find child package prettierignore first" + ); + + assert_eq!( + Prettier::locate_prettier_ignore( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/monorepo/packages/web/src/ignored.js"), + ) + .await + .unwrap(), + ControlFlow::Continue(Some(PathBuf::from("/root/monorepo/packages/web"))), + "Should find child package prettierignore first" + ); + } } diff --git a/crates/prettier/src/prettier_server.js b/crates/prettier/src/prettier_server.js index d19c557f8e..abf8435b99 100644 --- a/crates/prettier/src/prettier_server.js +++ b/crates/prettier/src/prettier_server.js @@ -44,7 +44,9 @@ class Prettier { process.exit(1); } process.stderr.write( - `Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`, + `Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify( + config, + )}\n`, ); process.stdin.resume(); handleBuffer(new Prettier(prettierPath, prettier, config)); @@ -68,7 +70,9 @@ async function handleBuffer(prettier) { sendResponse({ id: message.id, ...makeError( - `error during message '${JSON.stringify(errorMessage)}' handling: ${e}`, + `error during message '${JSON.stringify( + errorMessage, + )}' handling: ${e}`, ), }); }); @@ -189,6 +193,22 @@ async function handleMessage(message, prettier) { if (params.options.filepath) { resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {}; + + if (params.options.ignorePath) { + const fileInfo = await prettier.prettier.getFileInfo( + params.options.filepath, + { + ignorePath: params.options.ignorePath, + }, + ); + if (fileInfo.ignored) { + process.stderr.write( + `Ignoring file '${params.options.filepath}' based on rules in '${params.options.ignorePath}'\n`, + ); + sendResponse({ id, result: { text: params.text } }); + return; + } + } } // Marking the params.options.filepath as undefined makes diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index b9fdd04be6..249f788c82 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -68,14 +68,12 @@ snippet.workspace = true snippet_provider.workspace = true terminal.workspace = true text.workspace = true +toml.workspace = true util.workspace = true url.workspace = true which.workspace = true fancy-regex.workspace = true -[target.'cfg(target_os = "windows")'.dependencies] -windows.workspace = true - [dev-dependencies] client = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 5d8fb3ab39..5e42ebc082 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -1,15 +1,16 @@ use crate::{ + lsp_store::OpenLspBufferHandle, search::SearchQuery, worktree_store::{WorktreeStore, WorktreeStoreEvent}, - Item, ProjectPath, + ProjectItem as _, ProjectPath, }; use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; -use anyhow::{anyhow, Context as _, Result}; +use anyhow::{anyhow, bail, Context as _, Result}; use client::Client; use collections::{hash_map, HashMap, HashSet}; use fs::Fs; -use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt}; -use git::blame::Blame; +use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt}; +use git::{blame::Blame, diff::BufferDiff, repository::RepoPath}; use gpui::{ AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Subscription, Task, WeakModel, @@ -20,47 +21,57 @@ use language::{ deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version, split_operations, }, - Buffer, BufferEvent, Capability, File as _, Language, Operation, + Buffer, BufferEvent, Capability, DiskState, File as _, Language, Operation, }; use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope}; +use serde::Deserialize; use smol::channel::Receiver; -use std::{io, ops::Range, path::Path, str::FromStr as _, sync::Arc, time::Instant}; -use text::BufferId; +use std::{ + io, + ops::Range, + path::{Path, PathBuf}, + str::FromStr as _, + sync::Arc, + time::Instant, +}; +use text::{BufferId, LineEnding, Rope}; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId}; -trait BufferStoreImpl { - fn open_buffer( - &self, - path: Arc, - worktree: Model, - cx: &mut ModelContext, - ) -> Task>>; +/// A set of open buffers. +pub struct BufferStore { + state: BufferStoreState, + #[allow(clippy::type_complexity)] + loading_buffers: HashMap, Arc>>>>, + #[allow(clippy::type_complexity)] + loading_change_sets: + HashMap, Arc>>>>, + worktree_store: Model, + opened_buffers: HashMap, + downstream_client: Option<(AnyProtoClient, u64)>, + shared_buffers: HashMap>, +} - fn save_buffer( - &self, - buffer: Model, - cx: &mut ModelContext, - ) -> Task>; +#[derive(Hash, Eq, PartialEq, Clone)] +struct SharedBuffer { + buffer: Model, + unstaged_changes: Option>, + lsp_handle: Option, +} - fn save_buffer_as( - &self, - buffer: Model, - path: ProjectPath, - cx: &mut ModelContext, - ) -> Task>; +#[derive(Debug)] +pub struct BufferChangeSet { + pub buffer_id: BufferId, + pub base_text: Option>, + pub diff_to_buffer: git::diff::BufferDiff, + pub recalculate_diff_task: Option>>, + pub diff_updated_futures: Vec>, + pub base_text_version: usize, +} - fn create_buffer(&self, cx: &mut ModelContext) -> Task>>; - - fn reload_buffers( - &self, - buffers: HashSet>, - push_to_history: bool, - cx: &mut ModelContext, - ) -> Task>; - - fn as_remote(&self) -> Option>; - fn as_local(&self) -> Option>; +enum BufferStoreState { + Local(LocalBufferStore), + Remote(RemoteBufferStore), } struct RemoteBufferStore { @@ -71,33 +82,20 @@ struct RemoteBufferStore { remote_buffer_listeners: HashMap, anyhow::Error>>>>, worktree_store: Model, - buffer_store: WeakModel, } struct LocalBufferStore { local_buffer_ids_by_path: HashMap, local_buffer_ids_by_entry_id: HashMap, - buffer_store: WeakModel, worktree_store: Model, _subscription: Subscription, } -/// A set of open buffers. -pub struct BufferStore { - state: Box, - #[allow(clippy::type_complexity)] - loading_buffers_by_path: HashMap< - ProjectPath, - postage::watch::Receiver, Arc>>>, - >, - worktree_store: Model, - opened_buffers: HashMap, - downstream_client: Option<(AnyProtoClient, u64)>, - shared_buffers: HashMap>>, -} - enum OpenBuffer { - Buffer(WeakModel), + Complete { + buffer: WeakModel, + unstaged_changes: Option>, + }, Operations(Vec), } @@ -116,17 +114,33 @@ pub struct ProjectTransaction(pub HashMap, language::Transaction>) impl EventEmitter for BufferStore {} impl RemoteBufferStore { + fn load_staged_text( + &self, + buffer_id: BufferId, + cx: &AppContext, + ) -> Task>> { + let project_id = self.project_id; + let client = self.upstream_client.clone(); + cx.background_executor().spawn(async move { + Ok(client + .request(proto::GetStagedText { + project_id, + buffer_id: buffer_id.to_proto(), + }) + .await? + .staged_text) + }) + } pub fn wait_for_remote_buffer( &mut self, id: BufferId, - cx: &mut AppContext, + cx: &mut ModelContext, ) -> Task>> { - let buffer_store = self.buffer_store.clone(); let (tx, rx) = oneshot::channel(); self.remote_buffer_listeners.entry(id).or_default().push(tx); - cx.spawn(|cx| async move { - if let Some(buffer) = buffer_store + cx.spawn(|this, cx| async move { + if let Some(buffer) = this .read_with(&cx, |buffer_store, _| buffer_store.get(id)) .ok() .flatten() @@ -144,7 +158,7 @@ impl RemoteBufferStore { &self, buffer_handle: Model, new_path: Option, - cx: &ModelContext, + cx: &ModelContext, ) -> Task> { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id().into(); @@ -176,7 +190,7 @@ impl RemoteBufferStore { envelope: TypedEnvelope, replica_id: u16, capability: Capability, - cx: &mut ModelContext, + cx: &mut ModelContext, ) -> Result>> { match envelope .payload @@ -277,7 +291,7 @@ impl RemoteBufferStore { &self, message: proto::ProjectTransaction, push_to_history: bool, - cx: &mut ModelContext, + cx: &mut ModelContext, ) -> Task> { cx.spawn(|this, mut cx| async move { let mut project_transaction = ProjectTransaction::default(); @@ -310,36 +324,6 @@ impl RemoteBufferStore { Ok(project_transaction) }) } -} - -impl BufferStoreImpl for Model { - fn as_remote(&self) -> Option> { - Some(self.clone()) - } - - fn as_local(&self) -> Option> { - None - } - - fn save_buffer( - &self, - buffer: Model, - cx: &mut ModelContext, - ) -> Task> { - self.update(cx, |this, cx| { - this.save_remote_buffer(buffer.clone(), None, cx) - }) - } - fn save_buffer_as( - &self, - buffer: Model, - path: ProjectPath, - cx: &mut ModelContext, - ) -> Task> { - self.update(cx, |this, cx| { - this.save_remote_buffer(buffer, Some(path.to_proto()), cx) - }) - } fn open_buffer( &self, @@ -347,46 +331,42 @@ impl BufferStoreImpl for Model { worktree: Model, cx: &mut ModelContext, ) -> Task>> { - self.update(cx, |this, cx| { - let worktree_id = worktree.read(cx).id().to_proto(); - let project_id = this.project_id; - let client = this.upstream_client.clone(); - let path_string = path.clone().to_string_lossy().to_string(); - cx.spawn(move |this, mut cx| async move { - let response = client - .request(proto::OpenBufferByPath { - project_id, - worktree_id, - path: path_string, - }) - .await?; - let buffer_id = BufferId::new(response.buffer_id)?; + let worktree_id = worktree.read(cx).id().to_proto(); + let project_id = self.project_id; + let client = self.upstream_client.clone(); + let path_string = path.clone().to_string_lossy().to_string(); + cx.spawn(move |this, mut cx| async move { + let response = client + .request(proto::OpenBufferByPath { + project_id, + worktree_id, + path: path_string, + }) + .await?; + let buffer_id = BufferId::new(response.buffer_id)?; - let buffer = this - .update(&mut cx, { - |this, cx| this.wait_for_remote_buffer(buffer_id, cx) - })? - .await?; + let buffer = this + .update(&mut cx, { + |this, cx| this.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; - Ok(buffer) - }) + Ok(buffer) }) } fn create_buffer(&self, cx: &mut ModelContext) -> Task>> { - self.update(cx, |this, cx| { - let create = this.upstream_client.request(proto::OpenNewBuffer { - project_id: this.project_id, - }); - cx.spawn(|this, mut cx| async move { - let response = create.await?; - let buffer_id = BufferId::new(response.buffer_id)?; + let create = self.upstream_client.request(proto::OpenNewBuffer { + project_id: self.project_id, + }); + cx.spawn(|this, mut cx| async move { + let response = create.await?; + let buffer_id = BufferId::new(response.buffer_id)?; - this.update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await - }) + this.update(&mut cx, |this, cx| { + this.wait_for_remote_buffer(buffer_id, cx) + })? + .await }) } @@ -396,37 +376,56 @@ impl BufferStoreImpl for Model { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - self.update(cx, |this, cx| { - let request = this.upstream_client.request(proto::ReloadBuffers { - project_id: this.project_id, - buffer_ids: buffers - .iter() - .map(|buffer| buffer.read(cx).remote_id().to_proto()) - .collect(), - }); + let request = self.upstream_client.request(proto::ReloadBuffers { + project_id: self.project_id, + buffer_ids: buffers + .iter() + .map(|buffer| buffer.read(cx).remote_id().to_proto()) + .collect(), + }); - cx.spawn(|this, mut cx| async move { - let response = request - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - this.update(&mut cx, |this, cx| { - this.deserialize_project_transaction(response, push_to_history, cx) - })? - .await - }) + cx.spawn(|this, mut cx| async move { + let response = request + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + this.update(&mut cx, |this, cx| { + this.deserialize_project_transaction(response, push_to_history, cx) + })? + .await }) } } impl LocalBufferStore { + fn load_staged_text( + &self, + buffer: &Model, + cx: &AppContext, + ) -> Task>> { + let Some(file) = buffer.read(cx).file() else { + return Task::ready(Ok(None)); + }; + let worktree_id = file.worktree_id(cx); + let path = file.path().clone(); + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + + worktree.read(cx).load_staged_file(path.as_ref(), cx) + } + fn save_local_buffer( &self, buffer_handle: Model, worktree: Model, path: Arc, mut has_changed_file: bool, - cx: &mut ModelContext, + cx: &mut ModelContext, ) -> Task> { let buffer = buffer_handle.read(cx); @@ -434,7 +433,10 @@ impl LocalBufferStore { let line_ending = buffer.line_ending(); let version = buffer.version(); let buffer_id = buffer.remote_id(); - if buffer.file().is_some_and(|file| !file.is_created()) { + if buffer + .file() + .is_some_and(|file| file.disk_state() == DiskState::New) + { has_changed_file = true; } @@ -444,9 +446,9 @@ impl LocalBufferStore { cx.spawn(move |this, mut cx| async move { let new_file = save.await?; - let mtime = new_file.mtime; + let mtime = new_file.disk_state().mtime(); this.update(&mut cx, |this, cx| { - if let Some((downstream_client, project_id)) = this.downstream_client(cx) { + if let Some((downstream_client, project_id)) = this.downstream_client.clone() { if has_changed_file { downstream_client .send(proto::UpdateBufferFile { @@ -475,15 +477,24 @@ impl LocalBufferStore { }) } - fn subscribe_to_worktree(&mut self, worktree: &Model, cx: &mut ModelContext) { + fn subscribe_to_worktree( + &mut self, + worktree: &Model, + cx: &mut ModelContext, + ) { cx.subscribe(worktree, |this, worktree, event, cx| { if worktree.read(cx).is_local() { match event { worktree::Event::UpdatedEntries(changes) => { - this.local_worktree_entries_changed(&worktree, changes, cx); + Self::local_worktree_entries_changed(this, &worktree, changes, cx); } worktree::Event::UpdatedGitRepositories(updated_repos) => { - this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx) + Self::local_worktree_git_repos_changed( + this, + worktree.clone(), + updated_repos, + cx, + ) } _ => {} } @@ -493,119 +504,97 @@ impl LocalBufferStore { } fn local_worktree_entries_changed( - &mut self, + this: &mut BufferStore, worktree_handle: &Model, changes: &[(Arc, ProjectEntryId, PathChange)], - cx: &mut ModelContext, + cx: &mut ModelContext, ) { let snapshot = worktree_handle.read(cx).snapshot(); for (path, entry_id, _) in changes { - self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx); + Self::local_worktree_entry_changed( + this, + *entry_id, + path, + worktree_handle, + &snapshot, + cx, + ); } } fn local_worktree_git_repos_changed( - &mut self, + this: &mut BufferStore, worktree_handle: Model, changed_repos: &UpdatedGitRepositoriesSet, - cx: &mut ModelContext, + cx: &mut ModelContext, ) { debug_assert!(worktree_handle.read(cx).is_local()); - let Some(buffer_store) = self.buffer_store.upgrade() else { - return; - }; - // Identify the loading buffers whose containing repository that has changed. - let (future_buffers, current_buffers) = buffer_store.update(cx, |buffer_store, cx| { - let future_buffers = buffer_store - .loading_buffers() - .filter_map(|(project_path, receiver)| { - if project_path.worktree_id != worktree_handle.read(cx).id() { - return None; - } - let path = &project_path.path; - changed_repos - .iter() - .find(|(work_dir, _)| path.starts_with(work_dir))?; - let path = path.clone(); - Some(async move { - BufferStore::wait_for_loading_buffer(receiver) - .await - .ok() - .map(|buffer| (buffer, path)) - }) - }) - .collect::>(); - - // Identify the current buffers whose containing repository has changed. - let current_buffers = buffer_store - .buffers() - .filter_map(|buffer| { - let file = File::from_dyn(buffer.read(cx).file())?; + let buffer_change_sets = this + .opened_buffers + .values() + .filter_map(|buffer| { + if let OpenBuffer::Complete { + buffer, + unstaged_changes, + } = buffer + { + let buffer = buffer.upgrade()?.read(cx); + let file = File::from_dyn(buffer.file())?; if file.worktree != worktree_handle { return None; } changed_repos .iter() .find(|(work_dir, _)| file.path.starts_with(work_dir))?; - Some((buffer, file.path.clone())) - }) - .collect::>(); - (future_buffers, current_buffers) - }); + let unstaged_changes = unstaged_changes.as_ref()?.upgrade()?; + let snapshot = buffer.text_snapshot(); + Some((unstaged_changes, snapshot, file.path.clone())) + } else { + None + } + }) + .collect::>(); - if future_buffers.len() + current_buffers.len() == 0 { + if buffer_change_sets.is_empty() { return; } cx.spawn(move |this, mut cx| async move { - // Wait for all of the buffers to load. - let future_buffers = future_buffers.collect::>().await; - - // Reload the diff base for every buffer whose containing git repository has changed. let snapshot = worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; let diff_bases_by_buffer = cx .background_executor() .spawn(async move { - let mut diff_base_tasks = future_buffers + buffer_change_sets .into_iter() - .flatten() - .chain(current_buffers) - .filter_map(|(buffer, path)| { - let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; - let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; - Some(async move { - let base_text = - local_repo_entry.repo().load_index_text(&relative_path); - Some((buffer, base_text)) - }) + .filter_map(|(change_set, buffer_snapshot, path)| { + let local_repo = snapshot.local_repo_for_path(&path)?; + let relative_path = local_repo.relativize(&path).ok()?; + let base_text = local_repo.repo().load_index_text(&relative_path); + Some((change_set, buffer_snapshot, base_text)) }) - .collect::>(); - - let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); - while let Some(diff_base) = diff_base_tasks.next().await { - if let Some(diff_base) = diff_base { - diff_bases.push(diff_base); - } - } - diff_bases + .collect::>() }) .await; this.update(&mut cx, |this, cx| { - // Assign the new diff bases on all of the buffers. - for (buffer, diff_base) in diff_bases_by_buffer { - let buffer_id = buffer.update(cx, |buffer, cx| { - buffer.set_diff_base(diff_base.clone(), cx); - buffer.remote_id().to_proto() + for (change_set, buffer_snapshot, staged_text) in diff_bases_by_buffer { + change_set.update(cx, |change_set, cx| { + if let Some(staged_text) = staged_text.clone() { + let _ = + change_set.set_base_text(staged_text, buffer_snapshot.clone(), cx); + } else { + change_set.unset_base_text(buffer_snapshot.clone(), cx); + } }); - if let Some((client, project_id)) = &this.downstream_client(cx) { + + if let Some((client, project_id)) = &this.downstream_client.clone() { client .send(proto::UpdateDiffBase { project_id: *project_id, - buffer_id, - diff_base, + buffer_id: buffer_snapshot.remote_id().to_proto(), + staged_text, }) .log_err(); } @@ -616,79 +605,74 @@ impl LocalBufferStore { } fn local_worktree_entry_changed( - &mut self, + this: &mut BufferStore, entry_id: ProjectEntryId, path: &Arc, worktree: &Model, snapshot: &worktree::Snapshot, - cx: &mut ModelContext, + cx: &mut ModelContext, ) -> Option<()> { let project_path = ProjectPath { worktree_id: snapshot.id(), path: path.clone(), }; - let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { - Some(&buffer_id) => buffer_id, - None => self.local_buffer_ids_by_path.get(&project_path).copied()?, + + let buffer_id = { + let local = this.as_local_mut()?; + match local.local_buffer_ids_by_entry_id.get(&entry_id) { + Some(&buffer_id) => buffer_id, + None => local.local_buffer_ids_by_path.get(&project_path).copied()?, + } }; - let buffer = self - .buffer_store - .update(cx, |buffer_store, _| { - if let Some(buffer) = buffer_store.get(buffer_id) { - Some(buffer) - } else { - buffer_store.opened_buffers.remove(&buffer_id); - None - } - }) - .ok() - .flatten(); + + let buffer = if let Some(buffer) = this.get(buffer_id) { + Some(buffer) + } else { + this.opened_buffers.remove(&buffer_id); + None + }; + let buffer = if let Some(buffer) = buffer { buffer } else { - self.local_buffer_ids_by_path.remove(&project_path); - self.local_buffer_ids_by_entry_id.remove(&entry_id); + let this = this.as_local_mut()?; + this.local_buffer_ids_by_path.remove(&project_path); + this.local_buffer_ids_by_entry_id.remove(&entry_id); return None; }; let events = buffer.update(cx, |buffer, cx| { + let local = this.as_local_mut()?; let file = buffer.file()?; let old_file = File::from_dyn(Some(file))?; if old_file.worktree != *worktree { return None; } - let new_file = if let Some(entry) = old_file + let snapshot_entry = old_file .entry_id .and_then(|entry_id| snapshot.entry_for_id(entry_id)) - { + .or_else(|| snapshot.entry_for_path(old_file.path.as_ref())); + + let new_file = if let Some(entry) = snapshot_entry { File { + disk_state: match entry.mtime { + Some(mtime) => DiskState::Present { mtime }, + None => old_file.disk_state, + }, is_local: true, entry_id: Some(entry.id), - mtime: entry.mtime, path: entry.path.clone(), worktree: worktree.clone(), - is_deleted: false, - is_private: entry.is_private, - } - } else if let Some(entry) = snapshot.entry_for_path(old_file.path.as_ref()) { - File { - is_local: true, - entry_id: Some(entry.id), - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree.clone(), - is_deleted: false, is_private: entry.is_private, } } else { File { + disk_state: DiskState::Deleted, is_local: true, entry_id: old_file.entry_id, path: old_file.path.clone(), - mtime: old_file.mtime, worktree: worktree.clone(), - is_deleted: true, is_private: old_file.is_private, } }; @@ -699,11 +683,11 @@ impl LocalBufferStore { let mut events = Vec::new(); if new_file.path != old_file.path { - self.local_buffer_ids_by_path.remove(&ProjectPath { + local.local_buffer_ids_by_path.remove(&ProjectPath { path: old_file.path.clone(), worktree_id: old_file.worktree_id(cx), }); - self.local_buffer_ids_by_path.insert( + local.local_buffer_ids_by_path.insert( ProjectPath { worktree_id: new_file.worktree_id(cx), path: new_file.path.clone(), @@ -718,15 +702,16 @@ impl LocalBufferStore { if new_file.entry_id != old_file.entry_id { if let Some(entry_id) = old_file.entry_id { - self.local_buffer_ids_by_entry_id.remove(&entry_id); + local.local_buffer_ids_by_entry_id.remove(&entry_id); } if let Some(entry_id) = new_file.entry_id { - self.local_buffer_ids_by_entry_id + local + .local_buffer_ids_by_entry_id .insert(entry_id, buffer_id); } } - if let Some((client, project_id)) = &self.downstream_client(cx) { + if let Some((client, project_id)) = &this.downstream_client { client .send(proto::UpdateBufferFile { project_id: *project_id, @@ -739,25 +724,14 @@ impl LocalBufferStore { buffer.file_updated(Arc::new(new_file), cx); Some(events) })?; - self.buffer_store - .update(cx, |_buffer_store, cx| { - for event in events { - cx.emit(event); - } - }) - .log_err()?; + + for event in events { + cx.emit(event); + } None } - fn downstream_client(&self, cx: &AppContext) -> Option<(AnyProtoClient, u64)> { - self.buffer_store - .upgrade()? - .read(cx) - .downstream_client - .clone() - } - fn buffer_changed_file(&mut self, buffer: Model, cx: &mut AppContext) -> Option<()> { let file = File::from_dyn(buffer.read(cx).file())?; @@ -783,29 +757,17 @@ impl LocalBufferStore { Some(()) } -} - -impl BufferStoreImpl for Model { - fn as_remote(&self) -> Option> { - None - } - - fn as_local(&self) -> Option> { - Some(self.clone()) - } fn save_buffer( &self, buffer: Model, cx: &mut ModelContext, ) -> Task> { - self.update(cx, |this, cx| { - let Some(file) = File::from_dyn(buffer.read(cx).file()) else { - return Task::ready(Err(anyhow!("buffer doesn't have a file"))); - }; - let worktree = file.worktree.clone(); - this.save_local_buffer(buffer, worktree, file.path.clone(), false, cx) - }) + let Some(file) = File::from_dyn(buffer.read(cx).file()) else { + return Task::ready(Err(anyhow!("buffer doesn't have a file"))); + }; + let worktree = file.worktree.clone(); + self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx) } fn save_buffer_as( @@ -814,16 +776,14 @@ impl BufferStoreImpl for Model { path: ProjectPath, cx: &mut ModelContext, ) -> Task> { - self.update(cx, |this, cx| { - let Some(worktree) = this - .worktree_store - .read(cx) - .worktree_for_id(path.worktree_id, cx) - else { - return Task::ready(Err(anyhow!("no such worktree"))); - }; - this.save_local_buffer(buffer, worktree, path.path.clone(), true, cx) - }) + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx) } fn open_buffer( @@ -832,105 +792,76 @@ impl BufferStoreImpl for Model { worktree: Model, cx: &mut ModelContext, ) -> Task>> { - let buffer_store = cx.weak_model(); - self.update(cx, |_, cx| { - let load_buffer = worktree.update(cx, |worktree, cx| { - let load_file = worktree.load_file(path.as_ref(), cx); - let reservation = cx.reserve_model(); - let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); - cx.spawn(move |_, mut cx| async move { - let loaded = load_file.await?; - let text_buffer = cx - .background_executor() - .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) - .await; - cx.insert_model(reservation, |_| { - Buffer::build( - text_buffer, - loaded.diff_base, - Some(loaded.file), - Capability::ReadWrite, - ) - }) + let load_buffer = worktree.update(cx, |worktree, cx| { + let load_file = worktree.load_file(path.as_ref(), cx); + let reservation = cx.reserve_model(); + let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + cx.spawn(move |_, mut cx| async move { + let loaded = load_file.await?; + let text_buffer = cx + .background_executor() + .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) + .await; + cx.insert_model(reservation, |_| { + Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite) }) - }); - - cx.spawn(move |this, mut cx| async move { - let buffer = match load_buffer.await { - Ok(buffer) => Ok(buffer), - Err(error) if is_not_found_error(&error) => cx.new_model(|cx| { - let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); - let text_buffer = text::Buffer::new(0, buffer_id, "".into()); - Buffer::build( - text_buffer, - None, - Some(Arc::new(File { - worktree, - path, - mtime: None, - entry_id: None, - is_local: true, - is_deleted: false, - is_private: false, - })), - Capability::ReadWrite, - ) - }), - Err(e) => Err(e), - }?; - this.update(&mut cx, |this, cx| { - buffer_store.update(cx, |buffer_store, cx| { - buffer_store.add_buffer(buffer.clone(), cx) - })??; - let buffer_id = buffer.read(cx).remote_id(); - if let Some(file) = File::from_dyn(buffer.read(cx).file()) { - this.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - buffer_id, - ); - - if let Some(entry_id) = file.entry_id { - this.local_buffer_ids_by_entry_id - .insert(entry_id, buffer_id); - } - } - - anyhow::Ok(()) - })??; - - Ok(buffer) }) + }); + + cx.spawn(move |this, mut cx| async move { + let buffer = match load_buffer.await { + Ok(buffer) => Ok(buffer), + Err(error) if is_not_found_error(&error) => cx.new_model(|cx| { + let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); + let text_buffer = text::Buffer::new(0, buffer_id, "".into()); + Buffer::build( + text_buffer, + Some(Arc::new(File { + worktree, + path, + disk_state: DiskState::New, + entry_id: None, + is_local: true, + is_private: false, + })), + Capability::ReadWrite, + ) + }), + Err(e) => Err(e), + }?; + this.update(&mut cx, |this, cx| { + this.add_buffer(buffer.clone(), cx)?; + let buffer_id = buffer.read(cx).remote_id(); + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + let this = this.as_local_mut().unwrap(); + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + + anyhow::Ok(()) + })??; + + Ok(buffer) }) } fn create_buffer(&self, cx: &mut ModelContext) -> Task>> { - let handle = self.clone(); cx.spawn(|buffer_store, mut cx| async move { let buffer = cx.new_model(|cx| { Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx) })?; buffer_store.update(&mut cx, |buffer_store, cx| { buffer_store.add_buffer(buffer.clone(), cx).log_err(); - let buffer_id = buffer.read(cx).remote_id(); - handle.update(cx, |this, cx| { - if let Some(file) = File::from_dyn(buffer.read(cx).file()) { - this.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - buffer_id, - ); - - if let Some(entry_id) = file.entry_id { - this.local_buffer_ids_by_entry_id - .insert(entry_id, buffer_id); - } - } - }); })?; Ok(buffer) }) @@ -968,39 +899,33 @@ impl BufferStore { client.add_model_message_handler(Self::handle_buffer_reloaded); client.add_model_message_handler(Self::handle_buffer_saved); client.add_model_message_handler(Self::handle_update_buffer_file); - client.add_model_message_handler(Self::handle_update_diff_base); client.add_model_request_handler(Self::handle_save_buffer); client.add_model_request_handler(Self::handle_blame_buffer); client.add_model_request_handler(Self::handle_reload_buffers); client.add_model_request_handler(Self::handle_get_permalink_to_line); + client.add_model_request_handler(Self::handle_get_staged_text); + client.add_model_message_handler(Self::handle_update_diff_base); } /// Creates a buffer store, optionally retaining its buffers. pub fn local(worktree_store: Model, cx: &mut ModelContext) -> Self { - let this = cx.weak_model(); Self { - state: Box::new(cx.new_model(|cx| { - let subscription = cx.subscribe( - &worktree_store, - |this: &mut LocalBufferStore, _, event, cx| { - if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { - this.subscribe_to_worktree(worktree, cx); - } - }, - ); - - LocalBufferStore { - local_buffer_ids_by_path: Default::default(), - local_buffer_ids_by_entry_id: Default::default(), - buffer_store: this, - worktree_store: worktree_store.clone(), - _subscription: subscription, - } - })), + state: BufferStoreState::Local(LocalBufferStore { + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), + worktree_store: worktree_store.clone(), + _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| { + if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { + let this = this.as_local_mut().unwrap(); + this.subscribe_to_worktree(worktree, cx); + } + }), + }), downstream_client: None, opened_buffers: Default::default(), shared_buffers: Default::default(), - loading_buffers_by_path: Default::default(), + loading_buffers: Default::default(), + loading_change_sets: Default::default(), worktree_store, } } @@ -1009,84 +934,184 @@ impl BufferStore { worktree_store: Model, upstream_client: AnyProtoClient, remote_id: u64, - cx: &mut ModelContext, + _cx: &mut ModelContext, ) -> Self { - let this = cx.weak_model(); Self { - state: Box::new(cx.new_model(|_| RemoteBufferStore { + state: BufferStoreState::Remote(RemoteBufferStore { shared_with_me: Default::default(), loading_remote_buffers_by_id: Default::default(), remote_buffer_listeners: Default::default(), project_id: remote_id, upstream_client, worktree_store: worktree_store.clone(), - buffer_store: this, - })), + }), downstream_client: None, opened_buffers: Default::default(), - loading_buffers_by_path: Default::default(), + loading_buffers: Default::default(), + loading_change_sets: Default::default(), shared_buffers: Default::default(), worktree_store, } } + fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> { + match &mut self.state { + BufferStoreState::Local(state) => Some(state), + _ => None, + } + } + + fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> { + match &mut self.state { + BufferStoreState::Remote(state) => Some(state), + _ => None, + } + } + + fn as_remote(&self) -> Option<&RemoteBufferStore> { + match &self.state { + BufferStoreState::Remote(state) => Some(state), + _ => None, + } + } + pub fn open_buffer( &mut self, project_path: ProjectPath, cx: &mut ModelContext, ) -> Task>> { - let existing_buffer = self.get_by_path(&project_path, cx); - if let Some(existing_buffer) = existing_buffer { - return Task::ready(Ok(existing_buffer)); + if let Some(buffer) = self.get_by_path(&project_path, cx) { + return Task::ready(Ok(buffer)); } - let Some(worktree) = self - .worktree_store - .read(cx) - .worktree_for_id(project_path.worktree_id, cx) - else { - return Task::ready(Err(anyhow!("no such worktree"))); - }; - - let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) { - // If the given path is already being loaded, then wait for that existing - // task to complete and return the same buffer. + let task = match self.loading_buffers.entry(project_path.clone()) { hash_map::Entry::Occupied(e) => e.get().clone(), - - // Otherwise, record the fact that this path is now being loaded. hash_map::Entry::Vacant(entry) => { - let (mut tx, rx) = postage::watch::channel(); - entry.insert(rx.clone()); + let path = project_path.path.clone(); + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + let load_buffer = match &self.state { + BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx), + BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx), + }; - let project_path = project_path.clone(); - let load_buffer = self - .state - .open_buffer(project_path.path.clone(), worktree, cx); - - cx.spawn(move |this, mut cx| async move { - let load_result = load_buffer.await; - *tx.borrow_mut() = Some(this.update(&mut cx, |this, _cx| { - // Record the fact that the buffer is no longer loading. - this.loading_buffers_by_path.remove(&project_path); - let buffer = load_result.map_err(Arc::new)?; - Ok(buffer) - })?); - anyhow::Ok(()) - }) - .detach(); - rx + entry + .insert( + cx.spawn(move |this, mut cx| async move { + let load_result = load_buffer.await; + this.update(&mut cx, |this, _cx| { + // Record the fact that the buffer is no longer loading. + this.loading_buffers.remove(&project_path); + }) + .ok(); + load_result.map_err(Arc::new) + }) + .shared(), + ) + .clone() } }; - cx.background_executor().spawn(async move { - Self::wait_for_loading_buffer(loading_watch) + cx.background_executor() + .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) + } + + pub fn open_unstaged_changes( + &mut self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task>> { + let buffer_id = buffer.read(cx).remote_id(); + if let Some(change_set) = self.get_unstaged_changes(buffer_id) { + return Task::ready(Ok(change_set)); + } + + let task = match self.loading_change_sets.entry(buffer_id) { + hash_map::Entry::Occupied(e) => e.get().clone(), + hash_map::Entry::Vacant(entry) => { + let load = match &self.state { + BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx), + BufferStoreState::Remote(this) => this.load_staged_text(buffer_id, cx), + }; + + entry + .insert( + cx.spawn(move |this, cx| async move { + Self::open_unstaged_changes_internal(this, load.await, buffer, cx) + .await + .map_err(Arc::new) + }) + .shared(), + ) + .clone() + } + }; + + cx.background_executor() + .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) + } + + #[cfg(any(test, feature = "test-support"))] + pub fn set_change_set(&mut self, buffer_id: BufferId, change_set: Model) { + self.loading_change_sets + .insert(buffer_id, Task::ready(Ok(change_set)).shared()); + } + + pub async fn open_unstaged_changes_internal( + this: WeakModel, + text: Result>, + buffer: Model, + mut cx: AsyncAppContext, + ) -> Result> { + let text = match text { + Err(e) => { + this.update(&mut cx, |this, cx| { + let buffer_id = buffer.read(cx).remote_id(); + this.loading_change_sets.remove(&buffer_id); + })?; + return Err(e); + } + Ok(text) => text, + }; + + let change_set = buffer.update(&mut cx, |buffer, cx| { + cx.new_model(|_| BufferChangeSet::new(buffer)) + })?; + + if let Some(text) = text { + change_set + .update(&mut cx, |change_set, cx| { + let snapshot = buffer.read(cx).text_snapshot(); + change_set.set_base_text(text, snapshot, cx) + })? .await - .map_err(|e| e.cloned()) - }) + .ok(); + } + + this.update(&mut cx, |this, cx| { + let buffer_id = buffer.read(cx).remote_id(); + this.loading_change_sets.remove(&buffer_id); + if let Some(OpenBuffer::Complete { + unstaged_changes, .. + }) = this.opened_buffers.get_mut(&buffer.read(cx).remote_id()) + { + *unstaged_changes = Some(change_set.downgrade()); + } + })?; + + Ok(change_set) } pub fn create_buffer(&mut self, cx: &mut ModelContext) -> Task>> { - self.state.create_buffer(cx) + match &self.state { + BufferStoreState::Local(this) => this.create_buffer(cx), + BufferStoreState::Remote(this) => this.create_buffer(cx), + } } pub fn save_buffer( @@ -1094,7 +1119,10 @@ impl BufferStore { buffer: Model, cx: &mut ModelContext, ) -> Task> { - self.state.save_buffer(buffer, cx) + match &mut self.state { + BufferStoreState::Local(this) => this.save_buffer(buffer, cx), + BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx), + } } pub fn save_buffer_as( @@ -1104,7 +1132,12 @@ impl BufferStore { cx: &mut ModelContext, ) -> Task> { let old_file = buffer.read(cx).file().cloned(); - let task = self.state.save_buffer_as(buffer.clone(), path, cx); + let task = match &self.state { + BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx), + BufferStoreState::Remote(this) => { + this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx) + } + }; cx.spawn(|this, mut cx| async move { task.await?; this.update(&mut cx, |_, cx| { @@ -1128,16 +1161,16 @@ impl BufferStore { Worktree::Local(worktree) => { let worktree = worktree.snapshot(); let blame_params = maybe!({ - let (repo_entry, local_repo_entry) = match worktree.repo_for_path(&file.path) { + let local_repo = match worktree.local_repo_for_path(&file.path) { Some(repo_for_path) => repo_for_path, None => return Ok(None), }; - let relative_path = repo_entry - .relativize(&worktree, &file.path) + let relative_path = local_repo + .relativize(&file.path) .context("failed to relativize buffer path")?; - let repo = local_repo_entry.repo().clone(); + let repo = local_repo.repo().clone(); let content = match version { Some(version) => buffer.rope_for_version(&version).clone(), @@ -1186,13 +1219,38 @@ impl BufferStore { return Task::ready(Err(anyhow!("buffer has no file"))); }; - match file.worktree.clone().read(cx) { + match file.worktree.read(cx) { Worktree::Local(worktree) => { - let Some(repo) = worktree.local_git_repo(file.path()) else { - return Task::ready(Err(anyhow!("no repository for buffer found"))); + let worktree_path = worktree.abs_path().clone(); + let Some((repo_entry, repo)) = + worktree.repository_for_path(file.path()).and_then(|entry| { + let repo = worktree.get_local_repo(&entry)?.repo().clone(); + Some((entry, repo)) + }) + else { + // If we're not in a Git repo, check whether this is a Rust source + // file in the Cargo registry (presumably opened with go-to-definition + // from a normal Rust file). If so, we can put together a permalink + // using crate metadata. + if !buffer + .language() + .is_some_and(|lang| lang.name() == "Rust".into()) + { + return Task::ready(Err(anyhow!("no permalink available"))); + } + let file_path = worktree_path.join(file.path()); + return cx.spawn(|cx| async move { + let provider_registry = + cx.update(GitHostingProviderRegistry::default_global)?; + get_permalink_in_rust_registry_src(provider_registry, file_path, selection) + .map_err(|_| anyhow!("no permalink available")) + }); }; - let path = file.path().clone(); + let path = match repo_entry.relativize(file.path()) { + Ok(RepoPath(path)) => path, + Err(e) => return Task::ready(Err(e)), + }; cx.spawn(|cx| async move { const REMOTE_NAME: &str = "origin"; @@ -1213,7 +1271,7 @@ impl BufferStore { let path = path .to_str() - .context("failed to convert buffer path to string")?; + .ok_or_else(|| anyhow!("failed to convert path to string"))?; Ok(provider.build_permalink( remote, @@ -1250,7 +1308,10 @@ impl BufferStore { fn add_buffer(&mut self, buffer: Model, cx: &mut ModelContext) -> Result<()> { let remote_id = buffer.read(cx).remote_id(); let is_remote = buffer.read(cx).replica_id() != 0; - let open_buffer = OpenBuffer::Buffer(buffer.downgrade()); + let open_buffer = OpenBuffer::Complete { + buffer: buffer.downgrade(), + unstaged_changes: None, + }; let handle = cx.handle().downgrade(); buffer.update(cx, move |_, cx| { @@ -1296,15 +1357,11 @@ impl BufferStore { pub fn loading_buffers( &self, - ) -> impl Iterator< - Item = ( - &ProjectPath, - postage::watch::Receiver, Arc>>>, - ), - > { - self.loading_buffers_by_path - .iter() - .map(|(path, rx)| (path, rx.clone())) + ) -> impl Iterator>>)> { + self.loading_buffers.iter().map(|(path, task)| { + let task = task.clone(); + (path, async move { task.await.map_err(|e| anyhow!("{e}")) }) + }) } pub fn get_by_path(&self, path: &ProjectPath, cx: &AppContext) -> Option> { @@ -1319,9 +1376,7 @@ impl BufferStore { } pub fn get(&self, buffer_id: BufferId) -> Option> { - self.opened_buffers - .get(&buffer_id) - .and_then(|buffer| buffer.upgrade()) + self.opened_buffers.get(&buffer_id)?.upgrade() } pub fn get_existing(&self, buffer_id: BufferId) -> Result> { @@ -1329,22 +1384,24 @@ impl BufferStore { .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id)) } - pub fn get_possibly_incomplete( - &self, - buffer_id: BufferId, - cx: &AppContext, - ) -> Option> { + pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option> { self.get(buffer_id).or_else(|| { - self.state.as_remote().and_then(|remote| { - remote - .read(cx) - .loading_remote_buffers_by_id - .get(&buffer_id) - .cloned() - }) + self.as_remote() + .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned()) }) } + pub fn get_unstaged_changes(&self, buffer_id: BufferId) -> Option> { + if let OpenBuffer::Complete { + unstaged_changes, .. + } = self.opened_buffers.get(&buffer_id)? + { + unstaged_changes.as_ref()?.upgrade() + } else { + None + } + } + pub fn buffer_version_info( &self, cx: &AppContext, @@ -1360,9 +1417,8 @@ impl BufferStore { }) .collect(); let incomplete_buffer_ids = self - .state .as_remote() - .map(|remote| remote.read(cx).incomplete_buffer_ids()) + .map(|remote| remote.incomplete_buffer_ids()) .unwrap_or_default(); (buffers, incomplete_buffer_ids) } @@ -1380,12 +1436,10 @@ impl BufferStore { }); } - if let Some(remote) = self.state.as_remote() { - remote.update(cx, |remote, _| { - // Wake up all futures currently waiting on a buffer to get opened, - // to give them a chance to fail now that we've disconnected. - remote.remote_buffer_listeners.clear() - }) + if let Some(remote) = self.as_remote_mut() { + // Wake up all futures currently waiting on a buffer to get opened, + // to give them a chance to fail now that we've disconnected. + remote.remote_buffer_listeners.clear() } } @@ -1462,6 +1516,35 @@ impl BufferStore { rx } + pub fn recalculate_buffer_diffs( + &mut self, + buffers: Vec>, + cx: &mut ModelContext, + ) -> impl Future { + let mut futures = Vec::new(); + for buffer in buffers { + let buffer = buffer.read(cx).text_snapshot(); + if let Some(OpenBuffer::Complete { + unstaged_changes, .. + }) = self.opened_buffers.get_mut(&buffer.remote_id()) + { + if let Some(unstaged_changes) = unstaged_changes + .as_ref() + .and_then(|changes| changes.upgrade()) + { + unstaged_changes.update(cx, |unstaged_changes, cx| { + futures.push(unstaged_changes.recalculate_diff(buffer.clone(), cx)); + }); + } else { + unstaged_changes.take(); + } + } + } + async move { + futures::future::join_all(futures).await; + } + } + fn on_buffer_event( &mut self, buffer: Model, @@ -1470,10 +1553,8 @@ impl BufferStore { ) { match event { BufferEvent::FileHandleChanged => { - if let Some(local) = self.state.as_local() { - local.update(cx, |local, cx| { - local.buffer_changed_file(buffer, cx); - }) + if let Some(local) = self.as_local_mut() { + local.buffer_changed_file(buffer, cx); } } BufferEvent::Reloaded => { @@ -1511,7 +1592,7 @@ impl BufferStore { match this.opened_buffers.entry(buffer_id) { hash_map::Entry::Occupied(mut e) => match e.get_mut() { OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), - OpenBuffer::Buffer(buffer) => { + OpenBuffer::Complete { buffer, .. } => { if let Some(buffer) = buffer.upgrade() { buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } @@ -1525,6 +1606,21 @@ impl BufferStore { })? } + pub fn register_shared_lsp_handle( + &mut self, + peer_id: proto::PeerId, + buffer_id: BufferId, + handle: OpenLspBufferHandle, + ) { + if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) { + if let Some(buffer) = shared_buffers.get_mut(&buffer_id) { + buffer.lsp_handle = Some(handle); + return; + } + } + debug_panic!("tried to register shared lsp handle, but buffer was not shared") + } + pub fn handle_synchronize_buffers( &mut self, envelope: TypedEnvelope, @@ -1547,7 +1643,12 @@ impl BufferStore { self.shared_buffers .entry(guest_id) .or_default() - .insert(buffer.clone()); + .entry(buffer_id) + .or_insert_with(|| SharedBuffer { + buffer: buffer.clone(), + unstaged_changes: None, + lsp_handle: None, + }); let buffer = buffer.read(cx); response.buffers.push(proto::BufferVersion { @@ -1567,13 +1668,14 @@ impl BufferStore { .log_err(); } - client - .send(proto::UpdateDiffBase { - project_id, - buffer_id: buffer_id.into(), - diff_base: buffer.diff_base().map(ToString::to_string), - }) - .log_err(); + // TODO(max): do something + // client + // .send(proto::UpdateStagedText { + // project_id, + // buffer_id: buffer_id.into(), + // diff_base: buffer.diff_base().map(ToString::to_string), + // }) + // .log_err(); client .send(proto::BufferReloaded { @@ -1616,13 +1718,13 @@ impl BufferStore { capability: Capability, cx: &mut ModelContext, ) -> Result<()> { - let Some(remote) = self.state.as_remote() else { + let Some(remote) = self.as_remote_mut() else { return Err(anyhow!("buffer store is not a remote")); }; - if let Some(buffer) = remote.update(cx, |remote, cx| { - remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx) - })? { + if let Some(buffer) = + remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)? + { self.add_buffer(buffer, cx)?; } @@ -1639,7 +1741,7 @@ impl BufferStore { this.update(&mut cx, |this, cx| { let payload = envelope.payload.clone(); - if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?; let worktree = this .worktree_store @@ -1677,32 +1779,6 @@ impl BufferStore { })? } - pub async fn handle_update_diff_base( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result<()> { - this.update(&mut cx, |this, cx| { - let buffer_id = envelope.payload.buffer_id; - let buffer_id = BufferId::new(buffer_id)?; - if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { - buffer.update(cx, |buffer, cx| { - buffer.set_diff_base(envelope.payload.diff_base.clone(), cx) - }); - } - if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { - downstream_client - .send(proto::UpdateDiffBase { - project_id: *project_id, - buffer_id: buffer_id.into(), - diff_base: envelope.payload.diff_base, - }) - .log_err(); - } - Ok(()) - })? - } - pub async fn handle_save_buffer( this: Model, envelope: TypedEnvelope, @@ -1752,16 +1828,14 @@ impl BufferStore { let peer_id = envelope.sender_id; let buffer_id = BufferId::new(envelope.payload.buffer_id)?; this.update(&mut cx, |this, _| { - if let Some(buffer) = this.get(buffer_id) { - if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { - if shared.remove(&buffer) { - if shared.is_empty() { - this.shared_buffers.remove(&peer_id); - } - return; + if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { + if shared.remove(&buffer_id).is_some() { + if shared.is_empty() { + this.shared_buffers.remove(&peer_id); } + return; } - }; + } debug_panic!( "peer_id {} closed buffer_id {} which was either not open or already closed", peer_id, @@ -1779,7 +1853,7 @@ impl BufferStore { let version = deserialize_version(&envelope.payload.version); let mtime = envelope.payload.mtime.clone().map(|time| time.into()); this.update(&mut cx, move |this, cx| { - if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { buffer.update(cx, |buffer, cx| { buffer.did_save(version, mtime, cx); }); @@ -1811,7 +1885,7 @@ impl BufferStore { .ok_or_else(|| anyhow!("missing line ending"))?, ); this.update(&mut cx, |this, cx| { - if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { buffer.update(cx, |buffer, cx| { buffer.did_reload(version, line_ending, mtime, cx); }); @@ -1877,18 +1951,66 @@ impl BufferStore { }) } - pub async fn wait_for_loading_buffer( - mut receiver: postage::watch::Receiver, Arc>>>, - ) -> Result, Arc> { - loop { - if let Some(result) = receiver.borrow().as_ref() { - match result { - Ok(buffer) => return Ok(buffer.to_owned()), - Err(e) => return Err(e.to_owned()), - } + pub async fn handle_get_staged_text( + this: Model, + request: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let buffer_id = BufferId::new(request.payload.buffer_id)?; + let change_set = this + .update(&mut cx, |this, cx| { + let buffer = this.get(buffer_id)?; + Some(this.open_unstaged_changes(buffer, cx)) + })? + .ok_or_else(|| anyhow!("no such buffer"))? + .await?; + this.update(&mut cx, |this, _| { + let shared_buffers = this + .shared_buffers + .entry(request.original_sender_id.unwrap_or(request.sender_id)) + .or_default(); + debug_assert!(shared_buffers.contains_key(&buffer_id)); + if let Some(shared) = shared_buffers.get_mut(&buffer_id) { + shared.unstaged_changes = Some(change_set.clone()); } - receiver.next().await; - } + })?; + let staged_text = change_set.read_with(&cx, |change_set, cx| { + change_set + .base_text + .as_ref() + .map(|buffer| buffer.read(cx).text()) + })?; + Ok(proto::GetStagedTextResponse { staged_text }) + } + + pub async fn handle_update_diff_base( + this: Model, + request: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result<()> { + let buffer_id = BufferId::new(request.payload.buffer_id)?; + let Some((buffer, change_set)) = this.update(&mut cx, |this, _| { + if let OpenBuffer::Complete { + unstaged_changes, + buffer, + } = this.opened_buffers.get(&buffer_id)? + { + Some((buffer.upgrade()?, unstaged_changes.as_ref()?.upgrade()?)) + } else { + None + } + })? + else { + return Ok(()); + }; + change_set.update(&mut cx, |change_set, cx| { + if let Some(staged_text) = request.payload.staged_text { + let _ = change_set.set_base_text(staged_text, buffer.read(cx).text_snapshot(), cx); + } else { + change_set.unset_base_text(buffer.read(cx).text_snapshot(), cx) + } + })?; + Ok(()) } pub fn reload_buffers( @@ -1900,8 +2022,10 @@ impl BufferStore { if buffers.is_empty() { return Task::ready(Ok(ProjectTransaction::default())); } - - self.state.reload_buffers(buffers, push_to_history, cx) + match &self.state { + BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx), + BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx), + } } async fn handle_reload_buffers( @@ -1935,14 +2059,18 @@ impl BufferStore { cx: &mut ModelContext, ) -> Task> { let buffer_id = buffer.read(cx).remote_id(); - if !self - .shared_buffers - .entry(peer_id) - .or_default() - .insert(buffer.clone()) - { + let shared_buffers = self.shared_buffers.entry(peer_id).or_default(); + if shared_buffers.contains_key(&buffer_id) { return Task::ready(Ok(())); } + shared_buffers.insert( + buffer_id, + SharedBuffer { + buffer: buffer.clone(), + unstaged_changes: None, + lsp_handle: None, + }, + ); let Some((client, project_id)) = self.downstream_client.clone() else { return Task::ready(Ok(())); @@ -2005,8 +2133,8 @@ impl BufferStore { } } - pub fn shared_buffers(&self) -> &HashMap>> { - &self.shared_buffers + pub fn has_shared_buffers(&self) -> bool { + !self.shared_buffers.is_empty() } pub fn create_local_buffer( @@ -2023,26 +2151,23 @@ impl BufferStore { self.add_buffer(buffer.clone(), cx).log_err(); let buffer_id = buffer.read(cx).remote_id(); - let local = self - .state - .as_local() + let this = self + .as_local_mut() .expect("local-only method called in a non-local context"); - local.update(cx, |this, cx| { - if let Some(file) = File::from_dyn(buffer.read(cx).file()) { - this.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - buffer_id, - ); + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); - if let Some(entry_id) = file.entry_id { - this.local_buffer_ids_by_entry_id - .insert(entry_id, buffer_id); - } + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); } - }); + } buffer } @@ -2052,10 +2177,8 @@ impl BufferStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - if let Some(remote) = self.state.as_remote() { - remote.update(cx, |remote, cx| { - remote.deserialize_project_transaction(message, push_to_history, cx) - }) + if let Some(this) = self.as_remote_mut() { + this.deserialize_project_transaction(message, push_to_history, cx) } else { debug_panic!("not a remote buffer store"); Task::ready(Err(anyhow!("not a remote buffer store"))) @@ -2063,12 +2186,12 @@ impl BufferStore { } pub fn wait_for_remote_buffer( - &self, + &mut self, id: BufferId, - cx: &mut AppContext, + cx: &mut ModelContext, ) -> Task>> { - if let Some(remote) = self.state.as_remote() { - remote.update(cx, |remote, cx| remote.wait_for_remote_buffer(id, cx)) + if let Some(this) = self.as_remote_mut() { + this.wait_for_remote_buffer(id, cx) } else { debug_panic!("not a remote buffer store"); Task::ready(Err(anyhow!("not a remote buffer store"))) @@ -2099,10 +2222,129 @@ impl BufferStore { } } +impl BufferChangeSet { + pub fn new(buffer: &text::BufferSnapshot) -> Self { + Self { + buffer_id: buffer.remote_id(), + base_text: None, + diff_to_buffer: git::diff::BufferDiff::new(buffer), + recalculate_diff_task: None, + diff_updated_futures: Vec::new(), + base_text_version: 0, + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn new_with_base_text( + base_text: String, + buffer: text::BufferSnapshot, + cx: &mut ModelContext, + ) -> Self { + let mut this = Self::new(&buffer); + let _ = this.set_base_text(base_text, buffer, cx); + this + } + + pub fn diff_hunks_intersecting_range<'a>( + &'a self, + range: Range, + buffer_snapshot: &'a text::BufferSnapshot, + ) -> impl 'a + Iterator { + self.diff_to_buffer + .hunks_intersecting_range(range, buffer_snapshot) + } + + pub fn diff_hunks_intersecting_range_rev<'a>( + &'a self, + range: Range, + buffer_snapshot: &'a text::BufferSnapshot, + ) -> impl 'a + Iterator { + self.diff_to_buffer + .hunks_intersecting_range_rev(range, buffer_snapshot) + } + + #[cfg(any(test, feature = "test-support"))] + pub fn base_text_string(&self, cx: &AppContext) -> Option { + self.base_text.as_ref().map(|buffer| buffer.read(cx).text()) + } + + pub fn set_base_text( + &mut self, + mut base_text: String, + buffer_snapshot: text::BufferSnapshot, + cx: &mut ModelContext, + ) -> oneshot::Receiver<()> { + LineEnding::normalize(&mut base_text); + self.recalculate_diff_internal(base_text, buffer_snapshot, true, cx) + } + + pub fn unset_base_text( + &mut self, + buffer_snapshot: text::BufferSnapshot, + cx: &mut ModelContext, + ) { + if self.base_text.is_some() { + self.base_text = None; + self.diff_to_buffer = BufferDiff::new(&buffer_snapshot); + self.recalculate_diff_task.take(); + self.base_text_version += 1; + cx.notify(); + } + } + + pub fn recalculate_diff( + &mut self, + buffer_snapshot: text::BufferSnapshot, + cx: &mut ModelContext, + ) -> oneshot::Receiver<()> { + if let Some(base_text) = self.base_text.clone() { + self.recalculate_diff_internal(base_text.read(cx).text(), buffer_snapshot, false, cx) + } else { + oneshot::channel().1 + } + } + + fn recalculate_diff_internal( + &mut self, + base_text: String, + buffer_snapshot: text::BufferSnapshot, + base_text_changed: bool, + cx: &mut ModelContext, + ) -> oneshot::Receiver<()> { + let (tx, rx) = oneshot::channel(); + self.diff_updated_futures.push(tx); + self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { + let (base_text, diff) = cx + .background_executor() + .spawn(async move { + let diff = BufferDiff::build(&base_text, &buffer_snapshot).await; + (base_text, diff) + }) + .await; + this.update(&mut cx, |this, cx| { + if base_text_changed { + this.base_text_version += 1; + this.base_text = Some(cx.new_model(|cx| { + Buffer::local_normalized(Rope::from(base_text), LineEnding::default(), cx) + })); + } + this.diff_to_buffer = diff; + this.recalculate_diff_task.take(); + for tx in this.diff_updated_futures.drain(..) { + tx.send(()).ok(); + } + cx.notify(); + })?; + Ok(()) + })); + rx + } +} + impl OpenBuffer { fn upgrade(&self) -> Option> { match self { - OpenBuffer::Buffer(handle) => handle.upgrade(), + OpenBuffer::Complete { buffer, .. } => buffer.upgrade(), OpenBuffer::Operations(_) => None, } } @@ -2223,3 +2465,52 @@ fn deserialize_blame_buffer_response( remote_url: response.remote_url, }) } + +fn get_permalink_in_rust_registry_src( + provider_registry: Arc, + path: PathBuf, + selection: Range, +) -> Result { + #[derive(Deserialize)] + struct CargoVcsGit { + sha1: String, + } + + #[derive(Deserialize)] + struct CargoVcsInfo { + git: CargoVcsGit, + path_in_vcs: String, + } + + #[derive(Deserialize)] + struct CargoPackage { + repository: String, + } + + #[derive(Deserialize)] + struct CargoToml { + package: CargoPackage, + } + + let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| { + let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?; + Some((dir, json)) + }) else { + bail!("No .cargo_vcs_info.json found in parent directories") + }; + let cargo_vcs_info = serde_json::from_str::(&cargo_vcs_info_json)?; + let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?; + let manifest = toml::from_str::(&cargo_toml)?; + let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository) + .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?; + let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap()); + let permalink = provider.build_permalink( + remote, + BuildPermalinkParams { + sha: &cargo_vcs_info.git.sha1, + path: &path.to_string_lossy(), + selection: Some(selection), + }, + ); + Ok(permalink) +} diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 30764f302b..a0c7d42502 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -14,8 +14,7 @@ use crate::{ pub struct ProjectEnvironment { cli_environment: Option>, - get_environment_task: Option>>>>, - cached_shell_environments: HashMap>, + environments: HashMap>>>>, environment_error_messages: HashMap, } @@ -35,27 +34,15 @@ impl ProjectEnvironment { Self { cli_environment, - get_environment_task: None, - cached_shell_environments: Default::default(), + environments: Default::default(), environment_error_messages: Default::default(), } }) } - #[cfg(any(test, feature = "test-support"))] - pub(crate) fn set_cached( - &mut self, - shell_environments: &[(WorktreeId, HashMap)], - ) { - self.cached_shell_environments = shell_environments - .iter() - .cloned() - .collect::>(); - } - pub(crate) fn remove_worktree_environment(&mut self, worktree_id: WorktreeId) { - self.cached_shell_environments.remove(&worktree_id); self.environment_error_messages.remove(&worktree_id); + self.environments.remove(&worktree_id); } /// Returns the inherited CLI environment, if this project was opened from the Zed CLI. @@ -91,96 +78,83 @@ impl ProjectEnvironment { worktree_abs_path: Option>, cx: &ModelContext, ) -> Shared>>> { - if let Some(task) = self.get_environment_task.as_ref() { + if cfg!(any(test, feature = "test-support")) { + return Task::ready(Some(HashMap::default())).shared(); + } + + if let Some(cli_environment) = self.get_cli_environment() { + return cx + .spawn(|_, _| async move { + let path = cli_environment + .get("PATH") + .map(|path| path.as_str()) + .unwrap_or_default(); + log::info!( + "using project environment variables from CLI. PATH={:?}", + path + ); + Some(cli_environment) + }) + .shared(); + } + + let Some((worktree_id, worktree_abs_path)) = worktree_id.zip(worktree_abs_path) else { + return Task::ready(None).shared(); + }; + + if let Some(task) = self.environments.get(&worktree_id) { task.clone() } else { let task = self - .build_environment_task(worktree_id, worktree_abs_path, cx) + .get_worktree_env(worktree_id, worktree_abs_path, cx) .shared(); - - self.get_environment_task = Some(task.clone()); + self.environments.insert(worktree_id, task.clone()); task } } - fn build_environment_task( - &mut self, - worktree_id: Option, - worktree_abs_path: Option>, - cx: &ModelContext, - ) -> Task>> { - let worktree = worktree_id.zip(worktree_abs_path); - - let cli_environment = self.get_cli_environment(); - if let Some(environment) = cli_environment { - cx.spawn(|_, _| async move { - let path = environment - .get("PATH") - .map(|path| path.as_str()) - .unwrap_or_default(); - log::info!( - "using project environment variables from CLI. PATH={:?}", - path - ); - Some(environment) - }) - } else if let Some((worktree_id, worktree_abs_path)) = worktree { - self.get_worktree_env(worktree_id, worktree_abs_path, cx) - } else { - Task::ready(None) - } - } - fn get_worktree_env( &mut self, worktree_id: WorktreeId, worktree_abs_path: Arc, cx: &ModelContext, ) -> Task>> { - let cached_env = self.cached_shell_environments.get(&worktree_id).cloned(); - if let Some(env) = cached_env { - Task::ready(Some(env)) - } else { - let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); + let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); - cx.spawn(|this, mut cx| async move { - let (mut shell_env, error_message) = cx - .background_executor() - .spawn({ - let cwd = worktree_abs_path.clone(); - async move { load_shell_environment(&cwd, &load_direnv).await } - }) - .await; + cx.spawn(|this, mut cx| async move { + let (mut shell_env, error_message) = cx + .background_executor() + .spawn({ + let worktree_abs_path = worktree_abs_path.clone(); + async move { + load_worktree_shell_environment(&worktree_abs_path, &load_direnv).await + } + }) + .await; - if let Some(shell_env) = shell_env.as_mut() { - let path = shell_env - .get("PATH") - .map(|path| path.as_str()) - .unwrap_or_default(); - log::info!( - "using project environment variables shell launched in {:?}. PATH={:?}", - worktree_abs_path, - path - ); - this.update(&mut cx, |this, _| { - this.cached_shell_environments - .insert(worktree_id, shell_env.clone()); - }) - .log_err(); + if let Some(shell_env) = shell_env.as_mut() { + let path = shell_env + .get("PATH") + .map(|path| path.as_str()) + .unwrap_or_default(); + log::info!( + "using project environment variables shell launched in {:?}. PATH={:?}", + worktree_abs_path, + path + ); - set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); - } + set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); + } - if let Some(error) = error_message { - this.update(&mut cx, |this, _| { - this.environment_error_messages.insert(worktree_id, error); - }) - .log_err(); - } + if let Some(error) = error_message { + this.update(&mut cx, |this, _| { + this.environment_error_messages.insert(worktree_id, error); + }) + .log_err(); + } - shell_env - }) - } + shell_env + }) } } @@ -213,6 +187,42 @@ impl EnvironmentErrorMessage { } } +async fn load_worktree_shell_environment( + worktree_abs_path: &Path, + load_direnv: &DirenvSettings, +) -> ( + Option>, + Option, +) { + match smol::fs::metadata(worktree_abs_path).await { + Ok(meta) => { + let dir = if meta.is_dir() { + worktree_abs_path + } else if let Some(parent) = worktree_abs_path.parent() { + parent + } else { + return ( + None, + Some(EnvironmentErrorMessage(format!( + "Failed to load shell environment in {}: not a directory", + worktree_abs_path.display() + ))), + ); + }; + + load_shell_environment(&dir, load_direnv).await + } + Err(err) => ( + None, + Some(EnvironmentErrorMessage(format!( + "Failed to load shell environment in {}: {}", + worktree_abs_path.display(), + err + ))), + ), + } +} + #[cfg(any(test, feature = "test-support"))] async fn load_shell_environment( _dir: &Path, diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index b3425e7fad..8ad3935eb3 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -1,6 +1,6 @@ use crate::{ worktree_store::{WorktreeStore, WorktreeStoreEvent}, - Project, ProjectEntryId, ProjectPath, + Project, ProjectEntryId, ProjectItem, ProjectPath, }; use anyhow::{Context as _, Result}; use collections::{hash_map, HashMap, HashSet}; @@ -9,7 +9,7 @@ use gpui::{ hash, prelude::*, AppContext, EventEmitter, Img, Model, ModelContext, Subscription, Task, WeakModel, }; -use language::File; +use language::{DiskState, File}; use rpc::{AnyProtoClient, ErrorExt as _}; use std::ffi::OsStr; use std::num::NonZeroU64; @@ -74,11 +74,12 @@ impl ImageItem { file_changed = true; } - if !new_file.is_deleted() { - let new_mtime = new_file.mtime(); - if new_mtime != old_file.mtime() { - file_changed = true; - cx.emit(ImageItemEvent::ReloadNeeded); + let old_state = old_file.disk_state(); + let new_state = new_file.disk_state(); + if old_state != new_state { + file_changed = true; + if matches!(new_state, DiskState::Present { .. }) { + cx.emit(ImageItemEvent::ReloadNeeded) } } @@ -113,7 +114,7 @@ impl ImageItem { } } -impl crate::Item for ImageItem { +impl ProjectItem for ImageItem { fn try_open( project: &Model, path: &ProjectPath, @@ -122,9 +123,17 @@ impl crate::Item for ImageItem { let path = path.clone(); let project = project.clone(); - let ext = path - .path + let worktree_abs_path = project + .read(cx) + .worktree_for_id(path.worktree_id, cx)? + .read(cx) + .abs_path(); + + // Resolve the file extension from either the worktree path (if it's a single file) + // or from the project path's subpath. + let ext = worktree_abs_path .extension() + .or_else(|| path.path.extension()) .and_then(OsStr::to_str) .map(str::to_lowercase) .unwrap_or_default(); @@ -150,6 +159,10 @@ impl crate::Item for ImageItem { fn project_path(&self, cx: &AppContext) -> Option { Some(self.project_path(cx).clone()) } + + fn is_dirty(&self) -> bool { + false + } } trait ImageStoreImpl { @@ -503,37 +516,30 @@ impl LocalImageStore { return; } - let new_file = if let Some(entry) = old_file + let snapshot_entry = old_file .entry_id .and_then(|entry_id| snapshot.entry_for_id(entry_id)) - { + .or_else(|| snapshot.entry_for_path(old_file.path.as_ref())); + + let new_file = if let Some(entry) = snapshot_entry { worktree::File { + disk_state: match entry.mtime { + Some(mtime) => DiskState::Present { mtime }, + None => old_file.disk_state, + }, is_local: true, entry_id: Some(entry.id), - mtime: entry.mtime, path: entry.path.clone(), worktree: worktree.clone(), - is_deleted: false, - is_private: entry.is_private, - } - } else if let Some(entry) = snapshot.entry_for_path(old_file.path.as_ref()) { - worktree::File { - is_local: true, - entry_id: Some(entry.id), - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree.clone(), - is_deleted: false, is_private: entry.is_private, } } else { worktree::File { + disk_state: DiskState::Deleted, is_local: true, entry_id: old_file.entry_id, path: old_file.path.clone(), - mtime: old_file.mtime, worktree: worktree.clone(), - is_deleted: true, is_private: old_file.is_private, } }; diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 57f8cea348..abc2e0a21d 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -1,9 +1,10 @@ mod signature_help; use crate::{ - lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock, - HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip, - InlayHintTooltip, Location, LocationLink, MarkupContent, ProjectTransaction, ResolveState, + lsp_store::{LocalLspStore, LspStore}, + CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, + InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, + LocationLink, MarkupContent, ProjectTransaction, ResolveState, }; use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; @@ -44,6 +45,31 @@ pub fn lsp_formatting_options(settings: &LanguageSettings) -> lsp::FormattingOpt } } +pub(crate) fn file_path_to_lsp_url(path: &Path) -> Result { + match lsp::Url::from_file_path(path) { + Ok(url) => Ok(url), + Err(()) => Err(anyhow!( + "Invalid file path provided to LSP request: {path:?}" + )), + } +} + +pub(crate) fn make_text_document_identifier(path: &Path) -> Result { + Ok(lsp::TextDocumentIdentifier { + uri: file_path_to_lsp_url(path)?, + }) +} + +pub(crate) fn make_lsp_text_document_position( + path: &Path, + position: PointUtf16, +) -> Result { + Ok(lsp::TextDocumentPositionParams { + text_document: make_text_document_identifier(path)?, + position: point_to_lsp(position), + }) +} + #[async_trait(?Send)] pub trait LspCommand: 'static + Sized + Send + std::fmt::Debug { type Response: 'static + Default + Send + std::fmt::Debug; @@ -64,7 +90,7 @@ pub trait LspCommand: 'static + Sized + Send + std::fmt::Debug { buffer: &Buffer, language_server: &Arc, cx: &AppContext, - ) -> ::Params; + ) -> Result<::Params>; async fn response_from_lsp( self, @@ -201,13 +227,8 @@ impl LspCommand for PrepareRename { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::TextDocumentPositionParams { - lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - } + ) -> Result { + make_lsp_text_document_position(path, self.position) } async fn response_from_lsp( @@ -324,17 +345,12 @@ impl LspCommand for PerformRename { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::RenameParams { - lsp::RenameParams { - text_document_position: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::RenameParams { + text_document_position: make_lsp_text_document_position(path, self.position)?, new_name: self.new_name.clone(), work_done_progress_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -348,7 +364,7 @@ impl LspCommand for PerformRename { if let Some(edit) = message { let (lsp_adapter, lsp_server) = language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?; - LspStore::deserialize_workspace_edit( + LocalLspStore::deserialize_workspace_edit( lsp_store, edit, self.push_to_history, @@ -454,17 +470,12 @@ impl LspCommand for GetDefinition { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::GotoDefinitionParams { - lsp::GotoDefinitionParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::GotoDefinitionParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -554,17 +565,12 @@ impl LspCommand for GetDeclaration { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::GotoDeclarationParams { - lsp::GotoDeclarationParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::GotoDeclarationParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -647,17 +653,12 @@ impl LspCommand for GetImplementation { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::GotoImplementationParams { - lsp::GotoImplementationParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::GotoImplementationParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -747,17 +748,12 @@ impl LspCommand for GetTypeDefinition { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::GotoTypeDefinitionParams { - lsp::GotoTypeDefinitionParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::GotoTypeDefinitionParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -837,7 +833,7 @@ fn language_server_for_buffer( lsp_store .update(cx, |lsp_store, cx| { lsp_store - .language_server_for_buffer(buffer.read(cx), server_id, cx) + .language_server_for_local_buffer(buffer.read(cx), server_id, cx) .map(|(adapter, server)| (adapter.clone(), server.clone())) })? .ok_or_else(|| anyhow!("no language server found for buffer")) @@ -1060,20 +1056,15 @@ impl LspCommand for GetReferences { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::ReferenceParams { - lsp::ReferenceParams { - text_document_position: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::ReferenceParams { + text_document_position: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), context: lsp::ReferenceContext { include_declaration: true, }, - } + }) } async fn response_from_lsp( @@ -1236,17 +1227,12 @@ impl LspCommand for GetDocumentHighlights { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::DocumentHighlightParams { - lsp::DocumentHighlightParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::DocumentHighlightParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -1390,22 +1376,12 @@ impl LspCommand for GetSignatureHelp { _: &Buffer, _: &Arc, _cx: &AppContext, - ) -> lsp::SignatureHelpParams { - let url_result = lsp::Url::from_file_path(path); - if url_result.is_err() { - log::error!("an invalid file path has been specified"); - } - - lsp::SignatureHelpParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: url_result.expect("invalid file path"), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::SignatureHelpParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, context: None, work_done_progress_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -1504,16 +1480,11 @@ impl LspCommand for GetHover { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::HoverParams { - lsp::HoverParams { - text_document_position_params: lsp::TextDocumentPositionParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, - position: point_to_lsp(self.position), - }, + ) -> Result { + Ok(lsp::HoverParams { + text_document_position_params: make_lsp_text_document_position(path, self.position)?, work_done_progress_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -1727,16 +1698,13 @@ impl LspCommand for GetCompletions { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::CompletionParams { - lsp::CompletionParams { - text_document_position: lsp::TextDocumentPositionParams::new( - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path).unwrap()), - point_to_lsp(self.position), - ), + ) -> Result { + Ok(lsp::CompletionParams { + text_document_position: make_lsp_text_document_position(path, self.position)?, context: Some(self.context.clone()), work_done_progress_params: Default::default(), partial_result_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -1775,21 +1743,54 @@ impl LspCommand for GetCompletions { if let Some(item_defaults) = item_defaults { let default_data = item_defaults.data.as_ref(); let default_commit_characters = item_defaults.commit_characters.as_ref(); + let default_edit_range = item_defaults.edit_range.as_ref(); + let default_insert_text_format = item_defaults.insert_text_format.as_ref(); let default_insert_text_mode = item_defaults.insert_text_mode.as_ref(); if default_data.is_some() || default_commit_characters.is_some() + || default_edit_range.is_some() + || default_insert_text_format.is_some() || default_insert_text_mode.is_some() { for item in completions.iter_mut() { - if let Some(data) = default_data { - item.data = Some(data.clone()) + if item.data.is_none() && default_data.is_some() { + item.data = default_data.cloned() } - if let Some(characters) = default_commit_characters { - item.commit_characters = Some(characters.clone()) + if item.commit_characters.is_none() && default_commit_characters.is_some() { + item.commit_characters = default_commit_characters.cloned() } - if let Some(text_mode) = default_insert_text_mode { - item.insert_text_mode = Some(*text_mode) + if item.text_edit.is_none() { + if let Some(default_edit_range) = default_edit_range { + match default_edit_range { + CompletionListItemDefaultsEditRange::Range(range) => { + item.text_edit = + Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: *range, + new_text: item.label.clone(), + })) + } + CompletionListItemDefaultsEditRange::InsertAndReplace { + insert, + replace, + } => { + item.text_edit = + Some(lsp::CompletionTextEdit::InsertAndReplace( + lsp::InsertReplaceEdit { + new_text: item.label.clone(), + insert: *insert, + replace: *replace, + }, + )) + } + } + } + } + if item.insert_text_format.is_none() && default_insert_text_format.is_some() { + item.insert_text_format = default_insert_text_format.cloned() + } + if item.insert_text_mode.is_none() && default_insert_text_mode.is_some() { + item.insert_text_mode = default_insert_text_mode.cloned() } } } @@ -1884,6 +1885,7 @@ impl LspCommand for GetCompletions { new_text, server_id, lsp_completion, + resolved: false, } }) .collect()) @@ -2040,12 +2042,14 @@ impl LspCommand for GetCodeActions { buffer: &Buffer, language_server: &Arc, _: &AppContext, - ) -> lsp::CodeActionParams { - let relevant_diagnostics = buffer + ) -> Result { + let mut relevant_diagnostics = Vec::new(); + for entry in buffer .snapshot() .diagnostics_in_range::<_, language::PointUtf16>(self.range.clone(), false) - .map(|entry| entry.to_lsp_diagnostic_stub()) - .collect::>(); + { + relevant_diagnostics.push(entry.to_lsp_diagnostic_stub()?); + } let supported = Self::supported_code_action_kinds(language_server.adapter_server_capabilities()); @@ -2067,11 +2071,9 @@ impl LspCommand for GetCodeActions { supported }; - lsp::CodeActionParams { - text_document: lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(path).unwrap(), - ), - range: range_to_lsp(self.range.to_point_utf16(buffer)), + Ok(lsp::CodeActionParams { + text_document: make_text_document_identifier(path)?, + range: range_to_lsp(self.range.to_point_utf16(buffer))?, work_done_progress_params: Default::default(), partial_result_params: Default::default(), context: lsp::CodeActionContext { @@ -2079,7 +2081,7 @@ impl LspCommand for GetCodeActions { only, ..lsp::CodeActionContext::default() }, - } + }) } async fn response_from_lsp( @@ -2090,19 +2092,33 @@ impl LspCommand for GetCodeActions { server_id: LanguageServerId, _: AsyncAppContext, ) -> Result> { + let requested_kinds_set = if let Some(kinds) = self.kinds { + Some(kinds.into_iter().collect::>()) + } else { + None + }; + Ok(actions .unwrap_or_default() .into_iter() .filter_map(|entry| { - if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry { - Some(CodeAction { - server_id, - range: self.range.clone(), - lsp_action, - }) - } else { - None + let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry else { + return None; + }; + + if let Some((requested_kinds, kind)) = + requested_kinds_set.as_ref().zip(lsp_action.kind.as_ref()) + { + if !requested_kinds.contains(kind) { + return None; + } } + + Some(CodeAction { + server_id, + range: self.range.clone(), + lsp_action, + }) }) .collect()) } @@ -2237,15 +2253,12 @@ impl LspCommand for OnTypeFormatting { _: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::DocumentOnTypeFormattingParams { - lsp::DocumentOnTypeFormattingParams { - text_document_position: lsp::TextDocumentPositionParams::new( - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path).unwrap()), - point_to_lsp(self.position), - ), + ) -> Result { + Ok(lsp::DocumentOnTypeFormattingParams { + text_document_position: make_lsp_text_document_position(path, self.position)?, ch: self.trigger.clone(), options: self.options.clone(), - } + }) } async fn response_from_lsp( @@ -2259,7 +2272,7 @@ impl LspCommand for OnTypeFormatting { if let Some(edits) = message { let (lsp_adapter, lsp_server) = language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?; - LspStore::deserialize_text_edits( + LocalLspStore::deserialize_text_edits( lsp_store, buffer, edits, @@ -2743,14 +2756,14 @@ impl LspCommand for InlayHints { buffer: &Buffer, _: &Arc, _: &AppContext, - ) -> lsp::InlayHintParams { - lsp::InlayHintParams { + ) -> Result { + Ok(lsp::InlayHintParams { text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), + uri: file_path_to_lsp_url(path)?, }, - range: range_to_lsp(self.range.to_point_utf16(buffer)), + range: range_to_lsp(self.range.to_point_utf16(buffer))?, work_done_progress_params: Default::default(), - } + }) } async fn response_from_lsp( @@ -2900,15 +2913,12 @@ impl LspCommand for LinkedEditingRange { buffer: &Buffer, _server: &Arc, _: &AppContext, - ) -> lsp::LinkedEditingRangeParams { + ) -> Result { let position = self.position.to_point_utf16(&buffer.snapshot()); - lsp::LinkedEditingRangeParams { - text_document_position_params: lsp::TextDocumentPositionParams::new( - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path).unwrap()), - point_to_lsp(position), - ), + Ok(lsp::LinkedEditingRangeParams { + text_document_position_params: make_lsp_text_document_position(path, position)?, work_done_progress_params: Default::default(), - } + }) } async fn response_from_lsp( diff --git a/crates/project/src/lsp_ext_command.rs b/crates/project/src/lsp_ext_command.rs index 7890630e31..2d2610bc31 100644 --- a/crates/project/src/lsp_ext_command.rs +++ b/crates/project/src/lsp_ext_command.rs @@ -1,4 +1,4 @@ -use crate::{lsp_command::LspCommand, lsp_store::LspStore}; +use crate::{lsp_command::LspCommand, lsp_store::LspStore, make_text_document_identifier}; use anyhow::{Context, Result}; use async_trait::async_trait; use gpui::{AppContext, AsyncAppContext, Model}; @@ -53,13 +53,11 @@ impl LspCommand for ExpandMacro { _: &Buffer, _: &Arc, _: &AppContext, - ) -> ExpandMacroParams { - ExpandMacroParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }, + ) -> Result { + Ok(ExpandMacroParams { + text_document: make_text_document_identifier(path)?, position: point_to_lsp(self.position), - } + }) } async fn response_from_lsp( @@ -179,13 +177,13 @@ impl LspCommand for OpenDocs { _: &Buffer, _: &Arc, _: &AppContext, - ) -> OpenDocsParams { - OpenDocsParams { + ) -> Result { + Ok(OpenDocsParams { text_document: lsp::TextDocumentIdentifier { uri: lsp::Url::from_file_path(path).unwrap(), }, position: point_to_lsp(self.position), - } + }) } async fn response_from_lsp( @@ -292,10 +290,10 @@ impl LspCommand for SwitchSourceHeader { _: &Buffer, _: &Arc, _: &AppContext, - ) -> SwitchSourceHeaderParams { - SwitchSourceHeaderParams(lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), - }) + ) -> Result { + Ok(SwitchSourceHeaderParams(make_text_document_identifier( + path, + )?)) } async fn response_from_lsp( diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 0723ba689b..90467ec33d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -10,7 +10,7 @@ use crate::{ toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, - CodeAction, Completion, CoreCompletion, Hover, InlayHint, Item as _, ProjectPath, + CodeAction, Completion, CoreCompletion, Hover, InlayHint, ProjectItem as _, ProjectPath, ProjectTransaction, ResolveState, Symbol, ToolchainStore, }; use anyhow::{anyhow, Context as _, Result}; @@ -23,7 +23,7 @@ use futures::{ stream::FuturesUnordered, AsyncWriteExt, Future, FutureExt, StreamExt, }; -use globset::{Glob, GlobSet, GlobSetBuilder}; +use globset::{Glob, GlobBuilder, GlobMatcher, GlobSet, GlobSetBuilder}; use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, @@ -43,15 +43,16 @@ use language::{ Unclipped, }; use lsp::{ - CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, - DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, - LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, - LanguageServerName, LspRequestFuture, MessageActionItem, MessageType, OneOf, - ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, - WorkspaceFolder, + notification::DidRenameFiles, CodeActionKind, CompletionContext, DiagnosticSeverity, + DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, + FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, FileSystemWatcher, + InsertTextFormat, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, + LanguageServerId, LanguageServerName, LspRequestFuture, MessageActionItem, MessageType, OneOf, + RenameFilesParams, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, + WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, }; use node_runtime::read_package_installed_version; -use parking_lot::{Mutex, RwLock}; +use parking_lot::Mutex; use postage::watch; use rand::prelude::*; @@ -64,12 +65,14 @@ use smol::channel::Sender; use snippet::Snippet; use std::{ any::Any, + cell::RefCell, cmp::Ordering, convert::TryInto, ffi::OsStr, iter, mem, ops::{ControlFlow, Range}, path::{self, Path, PathBuf}, + rc::Rc, str, sync::Arc, time::{Duration, Instant}, @@ -84,9 +87,8 @@ pub use language::Location; #[cfg(any(test, feature = "test-support"))] pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; pub use worktree::{ - Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry, - UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, - FS_WATCH_LATENCY, + Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet, + UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY, }; const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); @@ -112,6 +114,10 @@ impl FormatTarget { } } +// proto::RegisterBufferWithLanguageServer {} + +pub type OpenLspBufferHandle = Model>; + // Currently, formatting operations are represented differently depending on // whether they come from a language server or an external command. #[derive(Debug)] @@ -132,24 +138,855 @@ impl FormatTrigger { } pub struct LocalLspStore { + worktree_store: Model, + toolchain_store: Model, http_client: Arc, environment: Model, fs: Arc, + languages: Arc, + language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, yarn: Model, pub language_servers: HashMap, buffers_being_formatted: HashSet, last_workspace_edits_by_language_server: HashMap, - language_server_watched_paths: HashMap>, + language_server_watched_paths: HashMap, + language_server_paths_watched_for_rename: + HashMap, language_server_watcher_registrations: HashMap>>, supplementary_language_servers: HashMap)>, prettier_store: Model, current_lsp_settings: HashMap, + next_diagnostic_group_id: usize, + diagnostics: HashMap< + WorktreeId, + HashMap< + Arc, + Vec<( + LanguageServerId, + Vec>>, + )>, + >, + >, + buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots _subscription: gpui::Subscription, + registered_buffers: HashMap, } impl LocalLspStore { + fn start_language_server( + &mut self, + worktree_handle: &Model, + delegate: Arc, + adapter: Arc, + language: LanguageName, + cx: &mut ModelContext, + ) { + let worktree = worktree_handle.read(cx); + let worktree_id = worktree.id(); + let root_path = worktree.abs_path(); + let key = (worktree_id, adapter.name.clone()); + + if self.language_server_ids.contains_key(&key) { + return; + } + + let project_settings = ProjectSettings::get( + Some(SettingsLocation { + worktree_id, + path: Path::new(""), + }), + cx, + ); + let lsp = project_settings.lsp.get(&adapter.name); + let override_options = lsp.and_then(|s| s.initialization_options.clone()); + + let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); + + let server_id = self.languages.next_language_server_id(); + log::info!( + "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", + adapter.name.0 + ); + + let binary = self.get_language_server_binary(adapter.clone(), delegate.clone(), true, cx); + + let pending_server = cx.spawn({ + let adapter = adapter.clone(); + let server_name = adapter.name.clone(); + let stderr_capture = stderr_capture.clone(); + + move |_lsp_store, cx| async move { + let binary = binary.await?; + + #[cfg(any(test, feature = "test-support"))] + if let Some(server) = _lsp_store + .update(&mut cx.clone(), |this, cx| { + this.languages.create_fake_language_server( + server_id, + &server_name, + binary.clone(), + cx.to_async(), + ) + }) + .ok() + .flatten() + { + return Ok(server); + } + + lsp::LanguageServer::new( + stderr_capture, + server_id, + server_name, + binary, + &root_path, + adapter.code_action_kinds(), + cx, + ) + } + }); + + let state = LanguageServerState::Starting({ + let server_name = adapter.name.0.clone(); + let delegate = delegate as Arc; + let language = language.clone(); + let key = key.clone(); + let adapter = adapter.clone(); + + cx.spawn(move |this, mut cx| async move { + let result = { + let delegate = delegate.clone(); + let adapter = adapter.clone(); + let this = this.clone(); + let toolchains = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; + let mut cx = cx.clone(); + async move { + let language_server = pending_server.await?; + + let workspace_config = adapter + .adapter + .clone() + .workspace_configuration(&delegate, toolchains.clone(), &mut cx) + .await?; + + let mut initialization_options = adapter + .adapter + .clone() + .initialization_options(&(delegate)) + .await?; + + match (&mut initialization_options, override_options) { + (Some(initialization_options), Some(override_options)) => { + merge_json_value_into(override_options, initialization_options); + } + (None, override_options) => initialization_options = override_options, + _ => {} + } + + let initialization_params = cx.update(|cx| { + let mut params = language_server.default_initialize_params(cx); + params.initialization_options = initialization_options; + adapter.adapter.prepare_initialize_params(params) + })??; + + Self::setup_lsp_messages(this.clone(), &language_server, delegate, adapter); + + let language_server = cx + .update(|cx| { + language_server.initialize(Some(initialization_params), cx) + })? + .await + .inspect_err(|_| { + if let Some(this) = this.upgrade() { + this.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) + }) + .ok(); + } + })?; + + language_server + .notify::( + lsp::DidChangeConfigurationParams { + settings: workspace_config, + }, + ) + .ok(); + + anyhow::Ok(language_server) + } + } + .await; + + match result { + Ok(server) => { + this.update(&mut cx, |this, mut cx| { + this.insert_newly_running_language_server( + language, + adapter, + server.clone(), + server_id, + key, + &mut cx, + ); + }) + .ok(); + stderr_capture.lock().take(); + Some(server) + } + + Err(err) => { + let log = stderr_capture.lock().take().unwrap_or_default(); + delegate.update_status( + adapter.name(), + LanguageServerBinaryStatus::Failed { + error: format!("{err}\n-- stderr--\n{}", log), + }, + ); + log::error!("Failed to start language server {server_name:?}: {err}"); + log::error!("server stderr: {:?}", log); + None + } + } + }) + }); + + self.language_servers.insert(server_id, state); + self.language_server_ids.insert(key, server_id); + } + + pub fn start_language_servers( + &mut self, + worktree: &Model, + language: LanguageName, + cx: &mut ModelContext, + ) { + let root_file = worktree + .update(cx, |tree, cx| tree.root_file(cx)) + .map(|f| f as _); + let settings = language_settings(Some(language.clone()), root_file.as_ref(), cx); + if !settings.enable_language_server { + return; + } + + let available_lsp_adapters = self.languages.clone().lsp_adapters(&language); + let available_language_servers = available_lsp_adapters + .iter() + .map(|lsp_adapter| lsp_adapter.name.clone()) + .collect::>(); + + let desired_language_servers = + settings.customized_language_servers(&available_language_servers); + + let mut enabled_lsp_adapters: Vec> = Vec::new(); + for desired_language_server in desired_language_servers { + if let Some(adapter) = available_lsp_adapters + .iter() + .find(|adapter| adapter.name == desired_language_server) + { + enabled_lsp_adapters.push(adapter.clone()); + continue; + } + + if let Some(adapter) = self + .languages + .load_available_lsp_adapter(&desired_language_server) + { + self.languages + .register_lsp_adapter(language.clone(), adapter.adapter.clone()); + enabled_lsp_adapters.push(adapter); + continue; + } + + log::warn!( + "no language server found matching '{}'", + desired_language_server.0 + ); + } + + for adapter in &enabled_lsp_adapters { + let delegate = LocalLspAdapterDelegate::new( + self.languages.clone(), + &self.environment, + cx.weak_model(), + &worktree, + self.http_client.clone(), + self.fs.clone(), + cx, + ); + self.start_language_server(worktree, delegate, adapter.clone(), language.clone(), cx); + } + + // After starting all the language servers, reorder them to reflect the desired order + // based on the settings. + // + // This is done, in part, to ensure that language servers loaded at different points + // (e.g., native vs extension) still end up in the right order at the end, rather than + // it being based on which language server happened to be loaded in first. + self.languages + .reorder_language_servers(&language, enabled_lsp_adapters); + } + + fn get_language_server_binary( + &self, + adapter: Arc, + delegate: Arc, + allow_binary_download: bool, + cx: &mut ModelContext, + ) -> Task> { + let settings = ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: Path::new(""), + }), + cx, + ) + .lsp + .get(&adapter.name) + .and_then(|s| s.binary.clone()); + + if settings.as_ref().is_some_and(|b| b.path.is_some()) { + let settings = settings.unwrap(); + return cx.spawn(|_, _| async move { + Ok(LanguageServerBinary { + path: PathBuf::from(&settings.path.unwrap()), + env: Some(delegate.shell_env().await), + arguments: settings + .arguments + .unwrap_or_default() + .iter() + .map(Into::into) + .collect(), + }) + }); + } + let lsp_binary_options = LanguageServerBinaryOptions { + allow_path_lookup: !settings + .as_ref() + .and_then(|b| b.ignore_system_version) + .unwrap_or_default(), + allow_binary_download, + }; + let toolchains = self.toolchain_store.read(cx).as_language_toolchain_store(); + cx.spawn(|_, mut cx| async move { + let binary_result = adapter + .clone() + .get_language_server_command( + delegate.clone(), + toolchains, + lsp_binary_options, + &mut cx, + ) + .await; + + delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); + + let mut binary = binary_result?; + if let Some(arguments) = settings.and_then(|b| b.arguments) { + binary.arguments = arguments.into_iter().map(Into::into).collect(); + } + + let mut shell_env = delegate.shell_env().await; + shell_env.extend(binary.env.unwrap_or_default()); + binary.env = Some(shell_env); + Ok(binary) + }) + } + + fn setup_lsp_messages( + this: WeakModel, + language_server: &LanguageServer, + delegate: Arc, + adapter: Arc, + ) { + let name = language_server.name(); + let server_id = language_server.server_id(); + language_server + .on_notification::({ + let adapter = adapter.clone(); + let this = this.clone(); + move |mut params, mut cx| { + let adapter = adapter.clone(); + if let Some(this) = this.upgrade() { + adapter.process_diagnostics(&mut params); + this.update(&mut cx, |this, cx| { + this.update_diagnostics( + server_id, + params, + &adapter.disk_based_diagnostic_sources, + cx, + ) + .log_err(); + }) + .ok(); + } + } + }) + .detach(); + language_server + .on_request::({ + let adapter = adapter.adapter.clone(); + let delegate = delegate.clone(); + let this = this.clone(); + move |params, mut cx| { + let adapter = adapter.clone(); + let delegate = delegate.clone(); + let this = this.clone(); + async move { + let toolchains = + this.update(&mut cx, |this, cx| this.toolchain_store(cx))?; + let workspace_config = adapter + .workspace_configuration(&delegate, toolchains, &mut cx) + .await?; + Ok(params + .items + .into_iter() + .map(|item| { + if let Some(section) = &item.section { + workspace_config + .get(section) + .cloned() + .unwrap_or(serde_json::Value::Null) + } else { + workspace_config.clone() + } + }) + .collect()) + } + } + }) + .detach(); + + language_server + .on_request::({ + let this = this.clone(); + move |_, mut cx| { + let this = this.clone(); + async move { + let Some(server) = + this.update(&mut cx, |this, _| this.language_server_for_id(server_id))? + else { + return Ok(None); + }; + let root = server.root_path(); + let Ok(uri) = Url::from_file_path(&root) else { + return Ok(None); + }; + Ok(Some(vec![WorkspaceFolder { + uri, + name: Default::default(), + }])) + } + } + }) + .detach(); + // Even though we don't have handling for these requests, respond to them to + // avoid stalling any language server like `gopls` which waits for a response + // to these requests when initializing. + language_server + .on_request::({ + let this = this.clone(); + move |params, mut cx| { + let this = this.clone(); + async move { + this.update(&mut cx, |this, _| { + if let Some(status) = this.language_server_statuses.get_mut(&server_id) + { + if let lsp::NumberOrString::String(token) = params.token { + status.progress_tokens.insert(token); + } + } + })?; + + Ok(()) + } + } + }) + .detach(); + + language_server + .on_request::({ + let this = this.clone(); + move |params, mut cx| { + let this = this.clone(); + async move { + for reg in params.registrations { + match reg.method.as_str() { + "workspace/didChangeWatchedFiles" => { + if let Some(options) = reg.register_options { + let options = serde_json::from_value(options)?; + this.update(&mut cx, |this, cx| { + this.as_local_mut()?.on_lsp_did_change_watched_files( + server_id, ®.id, options, cx, + ); + Some(()) + })?; + } + } + "textDocument/rangeFormatting" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + let options = reg + .register_options + .map(|options| { + serde_json::from_value::< + lsp::DocumentRangeFormattingOptions, + >( + options + ) + }) + .transpose()?; + let provider = match options { + None => OneOf::Left(true), + Some(options) => OneOf::Right(options), + }; + server.update_capabilities(|capabilities| { + capabilities.document_range_formatting_provider = + Some(provider); + }) + } + anyhow::Ok(()) + })??; + } + "textDocument/onTypeFormatting" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + let options = reg + .register_options + .map(|options| { + serde_json::from_value::< + lsp::DocumentOnTypeFormattingOptions, + >( + options + ) + }) + .transpose()?; + if let Some(options) = options { + server.update_capabilities(|capabilities| { + capabilities + .document_on_type_formatting_provider = + Some(options); + }) + } + } + anyhow::Ok(()) + })??; + } + "textDocument/formatting" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + let options = reg + .register_options + .map(|options| { + serde_json::from_value::< + lsp::DocumentFormattingOptions, + >( + options + ) + }) + .transpose()?; + let provider = match options { + None => OneOf::Left(true), + Some(options) => OneOf::Right(options), + }; + server.update_capabilities(|capabilities| { + capabilities.document_formatting_provider = + Some(provider); + }) + } + anyhow::Ok(()) + })??; + } + _ => log::warn!("unhandled capability registration: {reg:?}"), + } + } + Ok(()) + } + } + }) + .detach(); + + language_server + .on_request::({ + let this = this.clone(); + move |params, mut cx| { + let this = this.clone(); + async move { + for unreg in params.unregisterations.iter() { + match unreg.method.as_str() { + "workspace/didChangeWatchedFiles" => { + this.update(&mut cx, |this, cx| { + this.as_local_mut()? + .on_lsp_unregister_did_change_watched_files( + server_id, &unreg.id, cx, + ); + Some(()) + })?; + } + "textDocument/rename" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + server.update_capabilities(|capabilities| { + capabilities.rename_provider = None + }) + } + })?; + } + "textDocument/rangeFormatting" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + server.update_capabilities(|capabilities| { + capabilities.document_range_formatting_provider = + None + }) + } + })?; + } + "textDocument/onTypeFormatting" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + server.update_capabilities(|capabilities| { + capabilities.document_on_type_formatting_provider = + None; + }) + } + })?; + } + "textDocument/formatting" => { + this.update(&mut cx, |this, _| { + if let Some(server) = this.language_server_for_id(server_id) + { + server.update_capabilities(|capabilities| { + capabilities.document_formatting_provider = None; + }) + } + })?; + } + _ => log::warn!("unhandled capability unregistration: {unreg:?}"), + } + } + Ok(()) + } + } + }) + .detach(); + + language_server + .on_request::({ + let adapter = adapter.clone(); + let this = this.clone(); + move |params, cx| { + LocalLspStore::on_lsp_workspace_edit( + this.clone(), + params, + server_id, + adapter.clone(), + cx, + ) + } + }) + .detach(); + + language_server + .on_request::({ + let this = this.clone(); + move |(), mut cx| { + let this = this.clone(); + async move { + this.update(&mut cx, |this, cx| { + cx.emit(LspStoreEvent::RefreshInlayHints); + this.downstream_client.as_ref().map(|(client, project_id)| { + client.send(proto::RefreshInlayHints { + project_id: *project_id, + }) + }) + })? + .transpose()?; + Ok(()) + } + } + }) + .detach(); + + language_server + .on_request::({ + let this = this.clone(); + let name = name.to_string(); + move |params, mut cx| { + let this = this.clone(); + let name = name.to_string(); + async move { + let actions = params.actions.unwrap_or_default(); + let (tx, mut rx) = smol::channel::bounded(1); + let request = LanguageServerPromptRequest { + level: match params.typ { + lsp::MessageType::ERROR => PromptLevel::Critical, + lsp::MessageType::WARNING => PromptLevel::Warning, + _ => PromptLevel::Info, + }, + message: params.message, + actions, + response_channel: tx, + lsp_name: name.clone(), + }; + + let did_update = this + .update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerPrompt(request)); + }) + .is_ok(); + if did_update { + let response = rx.next().await; + + Ok(response) + } else { + Ok(None) + } + } + } + }) + .detach(); + + language_server + .on_notification::({ + let this = this.clone(); + let name = name.to_string(); + move |params, mut cx| { + let this = this.clone(); + let name = name.to_string(); + if let Some(ref message) = params.message { + let message = message.trim(); + if !message.is_empty() { + let formatted_message = format!( + "Language server {name} (id {server_id}) status update: {message}" + ); + match params.health { + ServerHealthStatus::Ok => log::info!("{}", formatted_message), + ServerHealthStatus::Warning => log::warn!("{}", formatted_message), + ServerHealthStatus::Error => { + log::error!("{}", formatted_message); + let (tx, _rx) = smol::channel::bounded(1); + let request = LanguageServerPromptRequest { + level: PromptLevel::Critical, + message: params.message.unwrap_or_default(), + actions: Vec::new(), + response_channel: tx, + lsp_name: name.clone(), + }; + let _ = this + .update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerPrompt(request)); + }) + .ok(); + } + ServerHealthStatus::Other(status) => { + log::info!( + "Unknown server health: {status}\n{formatted_message}" + ) + } + } + } + } + } + }) + .detach(); + language_server + .on_notification::({ + let this = this.clone(); + let name = name.to_string(); + move |params, mut cx| { + let this = this.clone(); + let name = name.to_string(); + + let (tx, _) = smol::channel::bounded(1); + let request = LanguageServerPromptRequest { + level: match params.typ { + lsp::MessageType::ERROR => PromptLevel::Critical, + lsp::MessageType::WARNING => PromptLevel::Warning, + _ => PromptLevel::Info, + }, + message: params.message, + actions: vec![], + response_channel: tx, + lsp_name: name.clone(), + }; + + let _ = this.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerPrompt(request)); + }); + } + }) + .detach(); + + let disk_based_diagnostics_progress_token = + adapter.disk_based_diagnostics_progress_token.clone(); + + language_server + .on_notification::({ + let this = this.clone(); + move |params, mut cx| { + if let Some(this) = this.upgrade() { + this.update(&mut cx, |this, cx| { + this.on_lsp_progress( + params, + server_id, + disk_based_diagnostics_progress_token.clone(), + cx, + ); + }) + .ok(); + } + } + }) + .detach(); + + language_server + .on_notification::({ + let this = this.clone(); + move |params, mut cx| { + if let Some(this) = this.upgrade() { + this.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerLog( + server_id, + LanguageServerLogType::Log(params.typ), + params.message, + )); + }) + .ok(); + } + } + }) + .detach(); + + language_server + .on_notification::({ + let this = this.clone(); + move |params, mut cx| { + if let Some(this) = this.upgrade() { + this.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerLog( + server_id, + LanguageServerLogType::Trace(params.verbose), + params.message, + )); + }) + .ok(); + } + } + }) + .detach(); + } + fn shutdown_language_servers( &mut self, _cx: &mut ModelContext, @@ -171,6 +1008,70 @@ impl LocalLspStore { } } + fn language_servers_for_worktree( + &self, + worktree_id: WorktreeId, + ) -> impl Iterator> { + self.language_server_ids + .iter() + .filter_map(move |((language_server_worktree_id, _), id)| { + if *language_server_worktree_id == worktree_id { + if let Some(LanguageServerState::Running { server, .. }) = + self.language_servers.get(id) + { + return Some(server); + } + } + None + }) + } + + pub(crate) fn language_server_ids_for_buffer( + &self, + buffer: &Buffer, + cx: &AppContext, + ) -> Vec { + if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { + let worktree_id = file.worktree_id(cx); + self.languages + .lsp_adapters(&language.name()) + .iter() + .flat_map(|adapter| { + let key = (worktree_id, adapter.name.clone()); + self.language_server_ids.get(&key).copied() + }) + .collect() + } else { + Vec::new() + } + } + + pub(crate) fn language_servers_for_buffer<'a>( + &'a self, + buffer: &'a Buffer, + cx: &'a AppContext, + ) -> impl Iterator, &'a Arc)> { + self.language_server_ids_for_buffer(buffer, cx) + .into_iter() + .filter_map(|server_id| match self.language_servers.get(&server_id)? { + LanguageServerState::Running { + adapter, server, .. + } => Some((adapter, server)), + _ => None, + }) + } + + fn primary_language_server_for_buffer<'a>( + &'a self, + buffer: &'a Buffer, + cx: &'a AppContext, + ) -> Option<(&'a Arc, &'a Arc)> { + // The list of language servers is ordered based on the `language_servers` setting + // for each language, thus we can consider the first one in the list to be the + // primary one. + self.language_servers_for_buffer(buffer, cx).next() + } + async fn format_locally( lsp_store: WeakModel, mut buffers: Vec, @@ -212,11 +1113,15 @@ impl LocalLspStore { let buffer = buffer.handle.read(cx); let adapters_and_servers = lsp_store + .as_local() + .unwrap() .language_servers_for_buffer(buffer, cx) .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())) .collect::>(); let primary_adapter = lsp_store + .as_local() + .unwrap() .primary_language_server_for_buffer(buffer, cx) .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())); @@ -260,7 +1165,7 @@ impl LocalLspStore { if !code_actions.is_empty() && !(trigger == FormatTrigger::Save && settings.format_on_save == FormatOnSave::Off) { - LspStore::execute_code_actions_on_servers( + Self::execute_code_actions_on_servers( &lsp_store, &adapters_and_servers, code_actions, @@ -529,7 +1434,7 @@ impl LocalLspStore { match format_target { FormatTarget::Buffer => Some(FormatOperation::Lsp( - LspStore::format_via_lsp( + Self::format_via_lsp( &lsp_store, &buffer.handle, buffer_abs_path, @@ -541,7 +1446,7 @@ impl LocalLspStore { .context("failed to format via language server")?, )), FormatTarget::Ranges(selections) => Some(FormatOperation::Lsp( - LspStore::format_range_via_lsp( + Self::format_range_via_lsp( &lsp_store, &buffer.handle, selections.as_slice(), @@ -578,7 +1483,7 @@ impl LocalLspStore { Formatter::CodeActions(code_actions) => { let code_actions = deserialize_code_actions(code_actions); if !code_actions.is_empty() { - LspStore::execute_code_actions_on_servers( + Self::execute_code_actions_on_servers( &lsp_store, adapters_and_servers, code_actions, @@ -595,6 +1500,124 @@ impl LocalLspStore { anyhow::Ok(result) } + pub async fn format_range_via_lsp( + this: &WeakModel, + buffer: &Model, + selections: &[Selection], + abs_path: &Path, + language_server: &Arc, + settings: &LanguageSettings, + cx: &mut AsyncAppContext, + ) -> Result, String)>> { + let capabilities = &language_server.capabilities(); + let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref(); + if range_formatting_provider.map_or(false, |provider| provider == &OneOf::Left(false)) { + return Err(anyhow!( + "{} language server does not support range formatting", + language_server.name() + )); + } + + let uri = lsp::Url::from_file_path(abs_path) + .map_err(|_| anyhow!("failed to convert abs path to uri"))?; + let text_document = lsp::TextDocumentIdentifier::new(uri); + + let lsp_edits = { + let ranges = selections.into_iter().map(|s| { + let start = lsp::Position::new(s.start.row, s.start.column); + let end = lsp::Position::new(s.end.row, s.end.column); + lsp::Range::new(start, end) + }); + + let mut edits = None; + for range in ranges { + if let Some(mut edit) = language_server + .request::(lsp::DocumentRangeFormattingParams { + text_document: text_document.clone(), + range, + options: lsp_command::lsp_formatting_options(settings), + work_done_progress_params: Default::default(), + }) + .await? + { + edits.get_or_insert_with(Vec::new).append(&mut edit); + } + } + edits + }; + + if let Some(lsp_edits) = lsp_edits { + this.update(cx, |this, cx| { + this.as_local_mut().unwrap().edits_from_lsp( + buffer, + lsp_edits, + language_server.server_id(), + None, + cx, + ) + })? + .await + } else { + Ok(Vec::with_capacity(0)) + } + } + + #[allow(clippy::too_many_arguments)] + async fn format_via_lsp( + this: &WeakModel, + buffer: &Model, + abs_path: &Path, + language_server: &Arc, + settings: &LanguageSettings, + cx: &mut AsyncAppContext, + ) -> Result, String)>> { + let uri = lsp::Url::from_file_path(abs_path) + .map_err(|_| anyhow!("failed to convert abs path to uri"))?; + let text_document = lsp::TextDocumentIdentifier::new(uri); + let capabilities = &language_server.capabilities(); + + let formatting_provider = capabilities.document_formatting_provider.as_ref(); + let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref(); + + let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) { + language_server + .request::(lsp::DocumentFormattingParams { + text_document, + options: lsp_command::lsp_formatting_options(settings), + work_done_progress_params: Default::default(), + }) + .await? + } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) { + let buffer_start = lsp::Position::new(0, 0); + let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?; + language_server + .request::(lsp::DocumentRangeFormattingParams { + text_document: text_document.clone(), + range: lsp::Range::new(buffer_start, buffer_end), + options: lsp_command::lsp_formatting_options(settings), + work_done_progress_params: Default::default(), + }) + .await? + } else { + None + }; + + if let Some(lsp_edits) = lsp_edits { + this.update(cx, |this, cx| { + this.as_local_mut().unwrap().edits_from_lsp( + buffer, + lsp_edits, + language_server.server_id(), + None, + cx, + ) + })? + .await + } else { + Ok(Vec::with_capacity(0)) + } + } + async fn format_via_external_command( buffer: &FormattableBuffer, command: &str, @@ -611,12 +1634,7 @@ impl LocalLspStore { Some(worktree_path) })?; - let mut child = smol::process::Command::new(command); - #[cfg(target_os = "windows")] - { - use smol::process::windows::CommandExt; - child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } + let mut child = util::command::new_smol_command(command); if let Some(buffer_env) = buffer.env.as_ref() { child.envs(buffer_env); @@ -672,6 +1690,1046 @@ impl LocalLspStore { .await, )) } + + async fn try_resolve_code_action( + lang_server: &LanguageServer, + action: &mut CodeAction, + ) -> anyhow::Result<()> { + if GetCodeActions::can_resolve_actions(&lang_server.capabilities()) + && action.lsp_action.data.is_some() + && (action.lsp_action.command.is_none() || action.lsp_action.edit.is_none()) + { + action.lsp_action = lang_server + .request::(action.lsp_action.clone()) + .await?; + } + + anyhow::Ok(()) + } + + fn initialize_buffer( + &mut self, + buffer_handle: &Model, + cx: &mut ModelContext, + ) { + let buffer = buffer_handle.read(cx); + + let Some(file) = File::from_dyn(buffer.file()) else { + return; + }; + if !file.is_local() { + return; + } + + let worktree_id = file.worktree_id(cx); + let language = buffer.language().cloned(); + + if let Some(diagnostics) = self.diagnostics.get(&worktree_id) { + for (server_id, diagnostics) in + diagnostics.get(file.path()).cloned().unwrap_or_default() + { + self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx) + .log_err(); + } + } + + let Some(language) = language else { + return; + }; + for adapter in self.languages.lsp_adapters(&language.name()) { + let server = self + .language_server_ids + .get(&(worktree_id, adapter.name.clone())) + .and_then(|id| self.language_servers.get(id)) + .and_then(|server_state| { + if let LanguageServerState::Running { server, .. } = server_state { + Some(server.clone()) + } else { + None + } + }); + let server = match server { + Some(server) => server, + None => continue, + }; + + buffer_handle.update(cx, |buffer, cx| { + buffer.set_completion_triggers( + server.server_id(), + server + .capabilities() + .completion_provider + .as_ref() + .and_then(|provider| { + provider + .trigger_characters + .as_ref() + .map(|characters| characters.iter().cloned().collect()) + }) + .unwrap_or_default(), + cx, + ); + }); + } + } + + pub(crate) fn reset_buffer( + &mut self, + buffer: &Model, + old_file: &File, + cx: &mut AppContext, + ) { + buffer.update(cx, |buffer, cx| { + let worktree_id = old_file.worktree_id(cx); + + let ids = &self.language_server_ids; + + if let Some(language) = buffer.language().cloned() { + for adapter in self.languages.lsp_adapters(&language.name()) { + if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) { + buffer.update_diagnostics(*server_id, DiagnosticSet::new([], buffer), cx); + buffer.set_completion_triggers(*server_id, Default::default(), cx); + } + } + } + }); + } + + fn update_buffer_diagnostics( + &mut self, + buffer: &Model, + server_id: LanguageServerId, + version: Option, + mut diagnostics: Vec>>, + cx: &mut ModelContext, + ) -> Result<()> { + fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { + Ordering::Equal + .then_with(|| b.is_primary.cmp(&a.is_primary)) + .then_with(|| a.is_disk_based.cmp(&b.is_disk_based)) + .then_with(|| a.severity.cmp(&b.severity)) + .then_with(|| a.message.cmp(&b.message)) + } + + let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?; + + diagnostics.sort_unstable_by(|a, b| { + Ordering::Equal + .then_with(|| a.range.start.cmp(&b.range.start)) + .then_with(|| b.range.end.cmp(&a.range.end)) + .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic)) + }); + + let mut sanitized_diagnostics = Vec::new(); + let edits_since_save = Patch::new( + snapshot + .edits_since::>(buffer.read(cx).saved_version()) + .collect(), + ); + for entry in diagnostics { + let start; + let end; + if entry.diagnostic.is_disk_based { + // Some diagnostics are based on files on disk instead of buffers' + // current contents. Adjust these diagnostics' ranges to reflect + // any unsaved edits. + start = edits_since_save.old_to_new(entry.range.start); + end = edits_since_save.old_to_new(entry.range.end); + } else { + start = entry.range.start; + end = entry.range.end; + } + + let mut range = snapshot.clip_point_utf16(start, Bias::Left) + ..snapshot.clip_point_utf16(end, Bias::Right); + + // Expand empty ranges by one codepoint + if range.start == range.end { + // This will be go to the next boundary when being clipped + range.end.column += 1; + range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right); + if range.start == range.end && range.end.column > 0 { + range.start.column -= 1; + range.start = snapshot.clip_point_utf16(Unclipped(range.start), Bias::Left); + } + } + + sanitized_diagnostics.push(DiagnosticEntry { + range, + diagnostic: entry.diagnostic, + }); + } + drop(edits_since_save); + + let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot); + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics(server_id, set, cx) + }); + Ok(()) + } + + fn register_buffer_with_language_servers( + &mut self, + buffer_handle: &Model, + cx: &mut ModelContext, + ) { + let buffer = buffer_handle.read(cx); + let buffer_id = buffer.remote_id(); + + let Some(file) = File::from_dyn(buffer.file()) else { + return; + }; + if !file.is_local() { + return; + } + + let abs_path = file.abs_path(cx); + let Some(uri) = lsp::Url::from_file_path(&abs_path).log_err() else { + return; + }; + let initial_snapshot = buffer.text_snapshot(); + let worktree_id = file.worktree_id(cx); + let worktree = file.worktree.clone(); + + let Some(language) = buffer.language().cloned() else { + return; + }; + self.start_language_servers(&worktree, language.name(), cx); + for adapter in self.languages.lsp_adapters(&language.name()) { + let server = self + .language_server_ids + .get(&(worktree_id, adapter.name.clone())) + .and_then(|id| self.language_servers.get(id)) + .and_then(|server_state| { + if let LanguageServerState::Running { server, .. } = server_state { + Some(server.clone()) + } else { + None + } + }); + let server = match server { + Some(server) => server, + None => continue, + }; + + server + .notify::(lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri.clone(), + adapter.language_id(&language.name()), + 0, + initial_snapshot.text(), + ), + }) + .log_err(); + + let snapshot = LspBufferSnapshot { + version: 0, + snapshot: initial_snapshot.clone(), + }; + self.buffer_snapshots + .entry(buffer_id) + .or_default() + .insert(server.server_id(), vec![snapshot]); + } + } + pub(crate) fn unregister_old_buffer_from_language_servers( + &mut self, + buffer: &Model, + old_file: &File, + + cx: &mut AppContext, + ) { + let old_path = match old_file.as_local() { + Some(local) => local.abs_path(cx), + None => return, + }; + let file_url = lsp::Url::from_file_path(old_path).unwrap(); + self.unregister_buffer_from_language_servers(buffer, file_url, cx); + } + + pub(crate) fn unregister_buffer_from_language_servers( + &mut self, + buffer: &Model, + file_url: lsp::Url, + cx: &mut AppContext, + ) { + buffer.update(cx, |buffer, cx| { + self.buffer_snapshots.remove(&buffer.remote_id()); + for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { + language_server + .notify::( + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(file_url.clone()), + }, + ) + .log_err(); + } + }); + } + + fn buffer_snapshot_for_lsp_version( + &mut self, + buffer: &Model, + server_id: LanguageServerId, + version: Option, + cx: &AppContext, + ) -> Result { + const OLD_VERSIONS_TO_RETAIN: i32 = 10; + + if let Some(version) = version { + let buffer_id = buffer.read(cx).remote_id(); + let snapshots = self + .buffer_snapshots + .get_mut(&buffer_id) + .and_then(|m| m.get_mut(&server_id)) + .ok_or_else(|| { + anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}") + })?; + + let found_snapshot = snapshots + .binary_search_by_key(&version, |e| e.version) + .map(|ix| snapshots[ix].snapshot.clone()) + .map_err(|_| { + anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}") + })?; + + snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version); + Ok(found_snapshot) + } else { + Ok((buffer.read(cx)).text_snapshot()) + } + } + + async fn execute_code_actions_on_servers( + this: &WeakModel, + adapters_and_servers: &[(Arc, Arc)], + code_actions: Vec, + buffer: &Model, + push_to_history: bool, + project_transaction: &mut ProjectTransaction, + cx: &mut AsyncAppContext, + ) -> Result<(), anyhow::Error> { + for (lsp_adapter, language_server) in adapters_and_servers.iter() { + let code_actions = code_actions.clone(); + + let actions = this + .update(cx, move |this, cx| { + let request = GetCodeActions { + range: text::Anchor::MIN..text::Anchor::MAX, + kinds: Some(code_actions), + }; + let server = LanguageServerToQuery::Other(language_server.server_id()); + this.request_lsp(buffer.clone(), server, request, cx) + })? + .await?; + + for mut action in actions { + Self::try_resolve_code_action(language_server, &mut action) + .await + .context("resolving a formatting code action")?; + + if let Some(edit) = action.lsp_action.edit { + if edit.changes.is_none() && edit.document_changes.is_none() { + continue; + } + + let new = Self::deserialize_workspace_edit( + this.upgrade().ok_or_else(|| anyhow!("project dropped"))?, + edit, + push_to_history, + lsp_adapter.clone(), + language_server.clone(), + cx, + ) + .await?; + project_transaction.0.extend(new.0); + } + + if let Some(command) = action.lsp_action.command { + this.update(cx, |this, _| { + if let LspStoreMode::Local(mode) = &mut this.mode { + mode.last_workspace_edits_by_language_server + .remove(&language_server.server_id()); + } + })?; + + language_server + .request::(lsp::ExecuteCommandParams { + command: command.command, + arguments: command.arguments.unwrap_or_default(), + ..Default::default() + }) + .await?; + + this.update(cx, |this, _| { + if let LspStoreMode::Local(mode) = &mut this.mode { + project_transaction.0.extend( + mode.last_workspace_edits_by_language_server + .remove(&language_server.server_id()) + .unwrap_or_default() + .0, + ) + } + })?; + } + } + } + + Ok(()) + } + + pub async fn deserialize_text_edits( + this: Model, + buffer_to_edit: Model, + edits: Vec, + push_to_history: bool, + _: Arc, + language_server: Arc, + cx: &mut AsyncAppContext, + ) -> Result> { + let edits = this + .update(cx, |this, cx| { + this.as_local_mut().unwrap().edits_from_lsp( + &buffer_to_edit, + edits, + language_server.server_id(), + None, + cx, + ) + })? + .await?; + + let transaction = buffer_to_edit.update(cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + for (range, text) in edits { + buffer.edit([(range, text)], None, cx); + } + + if buffer.end_transaction(cx).is_some() { + let transaction = buffer.finalize_last_transaction().unwrap().clone(); + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + Some(transaction) + } else { + None + } + })?; + + Ok(transaction) + } + + #[allow(clippy::type_complexity)] + pub(crate) fn edits_from_lsp( + &mut self, + buffer: &Model, + lsp_edits: impl 'static + Send + IntoIterator, + server_id: LanguageServerId, + version: Option, + cx: &mut ModelContext, + ) -> Task, String)>>> { + let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx); + cx.background_executor().spawn(async move { + let snapshot = snapshot?; + let mut lsp_edits = lsp_edits + .into_iter() + .map(|edit| (range_from_lsp(edit.range), edit.new_text)) + .collect::>(); + lsp_edits.sort_by_key(|(range, _)| range.start); + + let mut lsp_edits = lsp_edits.into_iter().peekable(); + let mut edits = Vec::new(); + while let Some((range, mut new_text)) = lsp_edits.next() { + // Clip invalid ranges provided by the language server. + let mut range = snapshot.clip_point_utf16(range.start, Bias::Left) + ..snapshot.clip_point_utf16(range.end, Bias::Left); + + // Combine any LSP edits that are adjacent. + // + // Also, combine LSP edits that are separated from each other by only + // a newline. This is important because for some code actions, + // Rust-analyzer rewrites the entire buffer via a series of edits that + // are separated by unchanged newline characters. + // + // In order for the diffing logic below to work properly, any edits that + // cancel each other out must be combined into one. + while let Some((next_range, next_text)) = lsp_edits.peek() { + if next_range.start.0 > range.end { + if next_range.start.0.row > range.end.row + 1 + || next_range.start.0.column > 0 + || snapshot.clip_point_utf16( + Unclipped(PointUtf16::new(range.end.row, u32::MAX)), + Bias::Left, + ) > range.end + { + break; + } + new_text.push('\n'); + } + range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left); + new_text.push_str(next_text); + lsp_edits.next(); + } + + // For multiline edits, perform a diff of the old and new text so that + // we can identify the changes more precisely, preserving the locations + // of any anchors positioned in the unchanged regions. + if range.end.row > range.start.row { + let mut offset = range.start.to_offset(&snapshot); + let old_text = snapshot.text_for_range(range).collect::(); + + let diff = TextDiff::from_lines(old_text.as_str(), &new_text); + let mut moved_since_edit = true; + for change in diff.iter_all_changes() { + let tag = change.tag(); + let value = change.value(); + match tag { + ChangeTag::Equal => { + offset += value.len(); + moved_since_edit = true; + } + ChangeTag::Delete => { + let start = snapshot.anchor_after(offset); + let end = snapshot.anchor_before(offset + value.len()); + if moved_since_edit { + edits.push((start..end, String::new())); + } else { + edits.last_mut().unwrap().0.end = end; + } + offset += value.len(); + moved_since_edit = false; + } + ChangeTag::Insert => { + if moved_since_edit { + let anchor = snapshot.anchor_after(offset); + edits.push((anchor..anchor, value.to_string())); + } else { + edits.last_mut().unwrap().1.push_str(value); + } + moved_since_edit = false; + } + } + } + } else if range.end == range.start { + let anchor = snapshot.anchor_after(range.start); + edits.push((anchor..anchor, new_text)); + } else { + let edit_start = snapshot.anchor_after(range.start); + let edit_end = snapshot.anchor_before(range.end); + edits.push((edit_start..edit_end, new_text)); + } + } + + Ok(edits) + }) + } + + pub(crate) async fn deserialize_workspace_edit( + this: Model, + edit: lsp::WorkspaceEdit, + push_to_history: bool, + lsp_adapter: Arc, + language_server: Arc, + cx: &mut AsyncAppContext, + ) -> Result { + let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone())?; + + let mut operations = Vec::new(); + if let Some(document_changes) = edit.document_changes { + match document_changes { + lsp::DocumentChanges::Edits(edits) => { + operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)) + } + lsp::DocumentChanges::Operations(ops) => operations = ops, + } + } else if let Some(changes) = edit.changes { + operations.extend(changes.into_iter().map(|(uri, edits)| { + lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { + text_document: lsp::OptionalVersionedTextDocumentIdentifier { + uri, + version: None, + }, + edits: edits.into_iter().map(Edit::Plain).collect(), + }) + })); + } + + let mut project_transaction = ProjectTransaction::default(); + for operation in operations { + match operation { + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => { + let abs_path = op + .uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + + if let Some(parent_path) = abs_path.parent() { + fs.create_dir(parent_path).await?; + } + if abs_path.ends_with("/") { + fs.create_dir(&abs_path).await?; + } else { + fs.create_file( + &abs_path, + op.options + .map(|options| fs::CreateOptions { + overwrite: options.overwrite.unwrap_or(false), + ignore_if_exists: options.ignore_if_exists.unwrap_or(false), + }) + .unwrap_or_default(), + ) + .await?; + } + } + + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => { + let source_abs_path = op + .old_uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + let target_abs_path = op + .new_uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + fs.rename( + &source_abs_path, + &target_abs_path, + op.options + .map(|options| fs::RenameOptions { + overwrite: options.overwrite.unwrap_or(false), + ignore_if_exists: options.ignore_if_exists.unwrap_or(false), + }) + .unwrap_or_default(), + ) + .await?; + } + + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => { + let abs_path = op + .uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + let options = op + .options + .map(|options| fs::RemoveOptions { + recursive: options.recursive.unwrap_or(false), + ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false), + }) + .unwrap_or_default(); + if abs_path.ends_with("/") { + fs.remove_dir(&abs_path, options).await?; + } else { + fs.remove_file(&abs_path, options).await?; + } + } + + lsp::DocumentChangeOperation::Edit(op) => { + let buffer_to_edit = this + .update(cx, |this, cx| { + this.open_local_buffer_via_lsp( + op.text_document.uri.clone(), + language_server.server_id(), + lsp_adapter.name.clone(), + cx, + ) + })? + .await?; + + let edits = this + .update(cx, |this, cx| { + let path = buffer_to_edit.read(cx).project_path(cx); + let active_entry = this.active_entry; + let is_active_entry = path.clone().map_or(false, |project_path| { + this.worktree_store + .read(cx) + .entry_for_path(&project_path, cx) + .map_or(false, |entry| Some(entry.id) == active_entry) + }); + let local = this.as_local_mut().unwrap(); + + let (mut edits, mut snippet_edits) = (vec![], vec![]); + for edit in op.edits { + match edit { + Edit::Plain(edit) => { + if !edits.contains(&edit) { + edits.push(edit) + } + } + Edit::Annotated(edit) => { + if !edits.contains(&edit.text_edit) { + edits.push(edit.text_edit) + } + } + Edit::Snippet(edit) => { + let Ok(snippet) = Snippet::parse(&edit.snippet.value) + else { + continue; + }; + + if is_active_entry { + snippet_edits.push((edit.range, snippet)); + } else { + // Since this buffer is not focused, apply a normal edit. + let new_edit = TextEdit { + range: edit.range, + new_text: snippet.text, + }; + if !edits.contains(&new_edit) { + edits.push(new_edit); + } + } + } + } + } + if !snippet_edits.is_empty() { + let buffer_id = buffer_to_edit.read(cx).remote_id(); + let version = if let Some(buffer_version) = op.text_document.version + { + local + .buffer_snapshot_for_lsp_version( + &buffer_to_edit, + language_server.server_id(), + Some(buffer_version), + cx, + ) + .ok() + .map(|snapshot| snapshot.version) + } else { + Some(buffer_to_edit.read(cx).saved_version().clone()) + }; + + let most_recent_edit = version.and_then(|version| { + version.iter().max_by_key(|timestamp| timestamp.value) + }); + // Check if the edit that triggered that edit has been made by this participant. + + if let Some(most_recent_edit) = most_recent_edit { + cx.emit(LspStoreEvent::SnippetEdit { + buffer_id, + edits: snippet_edits, + most_recent_edit, + }); + } + } + + local.edits_from_lsp( + &buffer_to_edit, + edits, + language_server.server_id(), + op.text_document.version, + cx, + ) + })? + .await?; + + let transaction = buffer_to_edit.update(cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + for (range, text) in edits { + buffer.edit([(range, text)], None, cx); + } + let transaction = if buffer.end_transaction(cx).is_some() { + let transaction = buffer.finalize_last_transaction().unwrap().clone(); + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + Some(transaction) + } else { + None + }; + + transaction + })?; + if let Some(transaction) = transaction { + project_transaction.0.insert(buffer_to_edit, transaction); + } + } + } + } + + Ok(project_transaction) + } + + async fn on_lsp_workspace_edit( + this: WeakModel, + params: lsp::ApplyWorkspaceEditParams, + server_id: LanguageServerId, + adapter: Arc, + mut cx: AsyncAppContext, + ) -> Result { + let this = this + .upgrade() + .ok_or_else(|| anyhow!("project project closed"))?; + let language_server = this + .update(&mut cx, |this, _| this.language_server_for_id(server_id))? + .ok_or_else(|| anyhow!("language server not found"))?; + let transaction = Self::deserialize_workspace_edit( + this.clone(), + params.edit, + true, + adapter.clone(), + language_server.clone(), + &mut cx, + ) + .await + .log_err(); + this.update(&mut cx, |this, _| { + if let Some(transaction) = transaction { + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server + .insert(server_id, transaction); + } + })?; + Ok(lsp::ApplyWorkspaceEditResponse { + applied: true, + failed_change: None, + failure_reason: None, + }) + } + fn rebuild_watched_paths_inner<'a>( + &'a self, + language_server_id: LanguageServerId, + watchers: impl Iterator, + cx: &mut ModelContext, + ) -> LanguageServerWatchedPathsBuilder { + let worktrees = self + .worktree_store + .read(cx) + .worktrees() + .filter_map(|worktree| { + self.language_servers_for_worktree(worktree.read(cx).id()) + .find(|server| server.server_id() == language_server_id) + .map(|_| worktree) + }) + .collect::>(); + + let mut worktree_globs = HashMap::default(); + let mut abs_globs = HashMap::default(); + log::trace!( + "Processing new watcher paths for language server with id {}", + language_server_id + ); + + enum PathToWatch { + Worktree { + literal_prefix: Arc, + pattern: String, + }, + Absolute { + path: Arc, + pattern: String, + }, + } + for watcher in watchers { + let mut found_host = false; + for worktree in &worktrees { + let glob_is_inside_worktree = worktree.update(cx, |tree, _| { + if let Some(worktree_root_path) = tree.abs_path().to_str() { + let path_to_watch = match &watcher.glob_pattern { + lsp::GlobPattern::String(s) => { + match s.strip_prefix(worktree_root_path) { + Some(relative) => { + let pattern = relative + .strip_prefix(std::path::MAIN_SEPARATOR) + .unwrap_or(relative) + .to_owned(); + let literal_prefix = glob_literal_prefix(&pattern); + + let literal_prefix = Arc::from(PathBuf::from( + literal_prefix + .strip_prefix(std::path::MAIN_SEPARATOR) + .unwrap_or(literal_prefix), + )); + PathToWatch::Worktree { + literal_prefix, + pattern, + } + } + None => { + let path = glob_literal_prefix(s); + let glob = &s[path.len()..]; + let pattern = glob + .strip_prefix(std::path::MAIN_SEPARATOR) + .unwrap_or(glob) + .to_owned(); + let path = if Path::new(path).components().next().is_none() + { + Arc::from(Path::new(worktree_root_path)) + } else { + PathBuf::from(path).into() + }; + + PathToWatch::Absolute { path, pattern } + } + } + } + lsp::GlobPattern::Relative(rp) => { + let Ok(mut base_uri) = match &rp.base_uri { + lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri, + lsp::OneOf::Right(base_uri) => base_uri, + } + .to_file_path() else { + return false; + }; + + match base_uri.strip_prefix(worktree_root_path) { + Ok(relative) => { + let mut literal_prefix = relative.to_owned(); + literal_prefix.push(glob_literal_prefix(&rp.pattern)); + + PathToWatch::Worktree { + literal_prefix: literal_prefix.into(), + pattern: rp.pattern.clone(), + } + } + Err(_) => { + let path = glob_literal_prefix(&rp.pattern); + let glob = &rp.pattern[path.len()..]; + let pattern = glob + .strip_prefix(std::path::MAIN_SEPARATOR) + .unwrap_or(glob) + .to_owned(); + base_uri.push(path); + + let path = if base_uri.components().next().is_none() { + Arc::from(Path::new("/")) + } else { + base_uri.into() + }; + PathToWatch::Absolute { path, pattern } + } + } + } + }; + match path_to_watch { + PathToWatch::Worktree { + literal_prefix, + pattern, + } => { + if let Some((tree, glob)) = + tree.as_local_mut().zip(Glob::new(&pattern).log_err()) + { + tree.add_path_prefix_to_scan(literal_prefix); + worktree_globs + .entry(tree.id()) + .or_insert_with(GlobSetBuilder::new) + .add(glob); + } else { + return false; + } + } + PathToWatch::Absolute { path, pattern } => { + if let Some(glob) = Glob::new(&pattern).log_err() { + abs_globs + .entry(path) + .or_insert_with(GlobSetBuilder::new) + .add(glob); + } + } + } + return true; + } + false + }); + if glob_is_inside_worktree { + log::trace!( + "Watcher pattern `{}` has been attached to the worktree at `{}`", + serde_json::to_string(&watcher.glob_pattern).unwrap(), + worktree.read(cx).abs_path().display() + ); + found_host = true; + } + } + if !found_host { + log::error!( + "Watcher pattern `{}` has not been attached to any worktree or absolute path", + serde_json::to_string(&watcher.glob_pattern).unwrap() + ) + } + } + + let mut watch_builder = LanguageServerWatchedPathsBuilder::default(); + for (worktree_id, builder) in worktree_globs { + if let Ok(globset) = builder.build() { + watch_builder.watch_worktree(worktree_id, globset); + } + } + for (abs_path, builder) in abs_globs { + if let Ok(globset) = builder.build() { + watch_builder.watch_abs_path(abs_path, globset); + } + } + watch_builder + } + + fn rebuild_watched_paths( + &mut self, + language_server_id: LanguageServerId, + cx: &mut ModelContext, + ) { + let Some(watchers) = self + .language_server_watcher_registrations + .get(&language_server_id) + else { + return; + }; + + let watch_builder = + self.rebuild_watched_paths_inner(language_server_id, watchers.values().flatten(), cx); + let watcher = watch_builder.build(self.fs.clone(), language_server_id, cx); + self.language_server_watched_paths + .insert(language_server_id, watcher); + + cx.notify(); + } + + fn on_lsp_did_change_watched_files( + &mut self, + language_server_id: LanguageServerId, + registration_id: &str, + params: DidChangeWatchedFilesRegistrationOptions, + cx: &mut ModelContext, + ) { + let registrations = self + .language_server_watcher_registrations + .entry(language_server_id) + .or_default(); + + registrations.insert(registration_id.to_string(), params.watchers); + + self.rebuild_watched_paths(language_server_id, cx); + } + + fn on_lsp_unregister_did_change_watched_files( + &mut self, + language_server_id: LanguageServerId, + registration_id: &str, + cx: &mut ModelContext, + ) { + let registrations = self + .language_server_watcher_registrations + .entry(language_server_id) + .or_default(); + + if registrations.remove(registration_id).is_some() { + log::info!( + "language server {}: unregistered workspace/DidChangeWatchedFiles capability with id {}", + language_server_id, + registration_id + ); + } else { + log::warn!( + "language server {}: failed to unregister workspace/DidChangeWatchedFiles capability with id {}. not registered.", + language_server_id, + registration_id + ); + } + + self.rebuild_watched_paths(language_server_id, cx); + } } #[derive(Debug)] @@ -687,7 +2745,7 @@ pub struct RemoteLspStore { } #[allow(clippy::large_enum_variant)] -pub enum LspStoreMode { +pub(crate) enum LspStoreMode { Local(LocalLspStore), // ssh host and collab host Remote(RemoteLspStore), // collab guest } @@ -696,10 +2754,6 @@ impl LspStoreMode { fn is_local(&self) -> bool { matches!(self, LspStoreMode::Local(_)) } - - fn is_remote(&self) -> bool { - matches!(self, LspStoreMode::Remote(_)) - } } pub struct LspStore { @@ -710,26 +2764,13 @@ pub struct LspStore { buffer_store: Model, worktree_store: Model, toolchain_store: Option>, - buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots pub languages: Arc, - language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, pub language_server_statuses: BTreeMap, active_entry: Option, _maintain_workspace_config: (Task>, watch::Sender<()>), _maintain_buffer_languages: Task<()>, - next_diagnostic_group_id: usize, diagnostic_summaries: HashMap, HashMap>>, - diagnostics: HashMap< - WorktreeId, - HashMap< - Arc, - Vec<( - LanguageServerId, - Vec>>, - )>, - >, - >, } pub enum LspStoreEvent { @@ -802,6 +2843,7 @@ impl LspStore { client.add_model_request_handler(Self::handle_refresh_inlay_hints); client.add_model_request_handler(Self::handle_on_type_formatting); client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion); + client.add_model_request_handler(Self::handle_register_buffer_with_language_servers); client.add_model_request_handler(Self::handle_lsp_command::); client.add_model_request_handler(Self::handle_lsp_command::); client.add_model_request_handler(Self::handle_lsp_command::); @@ -900,21 +2942,30 @@ impl LspStore { }; Self { mode: LspStoreMode::Local(LocalLspStore { + worktree_store: worktree_store.clone(), + toolchain_store: toolchain_store.clone(), supplementary_language_servers: Default::default(), + languages: languages.clone(), + language_server_ids: Default::default(), language_servers: Default::default(), last_workspace_edits_by_language_server: Default::default(), language_server_watched_paths: Default::default(), + language_server_paths_watched_for_rename: Default::default(), language_server_watcher_registrations: Default::default(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), buffers_being_formatted: Default::default(), + buffer_snapshots: Default::default(), prettier_store, environment, http_client, fs, yarn, + next_diagnostic_group_id: Default::default(), + diagnostics: Default::default(), _subscription: cx.on_app_quit(|this, cx| { this.as_local_mut().unwrap().shutdown_language_servers(cx) }), + registered_buffers: HashMap::default(), }), last_formatting_failure: None, downstream_client: None, @@ -922,13 +2973,9 @@ impl LspStore { worktree_store, toolchain_store: Some(toolchain_store), languages: languages.clone(), - language_server_ids: Default::default(), language_server_statuses: Default::default(), nonce: StdRng::from_entropy().gen(), - buffer_snapshots: Default::default(), - next_diagnostic_group_id: Default::default(), diagnostic_summaries: Default::default(), - diagnostics: Default::default(), active_entry: None, _maintain_workspace_config, @@ -981,13 +3028,9 @@ impl LspStore { buffer_store, worktree_store, languages: languages.clone(), - language_server_ids: Default::default(), language_server_statuses: Default::default(), nonce: StdRng::from_entropy().gen(), - buffer_snapshots: Default::default(), - next_diagnostic_group_id: Default::default(), diagnostic_summaries: Default::default(), - diagnostics: Default::default(), active_entry: None, toolchain_store, _maintain_workspace_config, @@ -1014,14 +3057,26 @@ impl LspStore { ) { match event { BufferStoreEvent::BufferAdded(buffer) => { - self.register_buffer(buffer, cx).log_err(); + self.on_buffer_added(buffer, cx).log_err(); } BufferStoreEvent::BufferChangedFilePath { buffer, old_file } => { + let buffer_id = buffer.read(cx).remote_id(); if let Some(old_file) = File::from_dyn(old_file.as_ref()) { - self.unregister_buffer_from_language_servers(buffer, old_file, cx); + if let Some(local) = self.as_local_mut() { + local.reset_buffer(buffer, old_file, cx); + if local.registered_buffers.contains_key(&buffer_id) { + local.unregister_old_buffer_from_language_servers(buffer, old_file, cx); + } + } } - self.register_buffer_with_language_servers(buffer, cx); + self.detect_language_for_buffer(buffer, cx); + if let Some(local) = self.as_local_mut() { + local.initialize_buffer(buffer, cx); + if local.registered_buffers.contains_key(&buffer_id) { + local.register_buffer_with_language_servers(buffer, cx); + } + } } BufferStoreEvent::BufferDropped(_) => {} } @@ -1097,7 +3152,7 @@ impl LspStore { fn request_workspace_config_refresh(&mut self) { *self._maintain_workspace_config.1.borrow_mut() = (); } - // todo! + pub fn prettier_store(&self) -> Option> { self.as_local().map(|local| local.prettier_store.clone()) } @@ -1121,7 +3176,7 @@ impl LspStore { } } - fn register_buffer( + fn on_buffer_added( &mut self, buffer: &Model, cx: &mut ModelContext, @@ -1135,29 +3190,69 @@ impl LspStore { }) .detach(); - self.register_buffer_with_language_servers(buffer, cx); - cx.observe_release(buffer, |this, buffer, cx| { - if let Some(file) = File::from_dyn(buffer.file()) { - if file.is_local() { - let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - for server in this.language_servers_for_buffer(buffer, cx) { - server - .1 - .notify::( - lsp::DidCloseTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new(uri.clone()), - }, - ) - .log_err(); - } - } - } - }) - .detach(); + self.detect_language_for_buffer(buffer, cx); + if let Some(local) = self.as_local_mut() { + local.initialize_buffer(buffer, cx); + } Ok(()) } + pub fn register_buffer_with_language_servers( + &mut self, + buffer: &Model, + cx: &mut ModelContext, + ) -> OpenLspBufferHandle { + let buffer_id = buffer.read(cx).remote_id(); + + let handle = cx.new_model(|_| buffer.clone()); + + if let Some(local) = self.as_local_mut() { + let Some(file) = File::from_dyn(buffer.read(cx).file()) else { + return handle; + }; + if !file.is_local() { + return handle; + } + let refcount = local.registered_buffers.entry(buffer_id).or_insert(0); + *refcount += 1; + if *refcount == 1 { + local.register_buffer_with_language_servers(buffer, cx); + } + + cx.observe_release(&handle, move |this, buffer, cx| { + let local = this.as_local_mut().unwrap(); + let Some(refcount) = local.registered_buffers.get_mut(&buffer_id) else { + debug_panic!("bad refcounting"); + return; + }; + *refcount -= 1; + if *refcount == 0 { + local.registered_buffers.remove(&buffer_id); + if let Some(file) = File::from_dyn(buffer.read(cx).file()).cloned() { + local.unregister_old_buffer_from_language_servers(&buffer, &file, cx); + } + } + }) + .detach(); + } else if let Some((upstream_client, upstream_project_id)) = self.upstream_client() { + let buffer_id = buffer.read(cx).remote_id().to_proto(); + cx.background_executor() + .spawn(async move { + upstream_client + .request(proto::RegisterBufferWithLanguageServers { + project_id: upstream_project_id, + buffer_id, + }) + .await + }) + .detach(); + } else { + panic!("oops!"); + } + handle + } + fn maintain_buffer_languages( languages: Arc, cx: &mut ModelContext, @@ -1177,11 +3272,19 @@ impl LspStore { for buffer in buffer_store.buffers() { if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() { - this.unregister_buffer_from_language_servers( - &buffer, &f, cx, - ); buffer .update(cx, |buffer, cx| buffer.set_language(None, cx)); + if let Some(local) = this.as_local_mut() { + local.reset_buffer(&buffer, &f, cx); + if local + .registered_buffers + .contains_key(&buffer.read(cx).remote_id()) + { + local.unregister_old_buffer_from_language_servers( + &buffer, &f, cx, + ); + } + } } } }); @@ -1203,7 +3306,16 @@ impl LspStore { } } for buffer in plain_text_buffers { - this.register_buffer_with_language_servers(&buffer, cx); + this.detect_language_for_buffer(&buffer, cx); + if let Some(local) = this.as_local_mut() { + local.initialize_buffer(&buffer, cx); + if local + .registered_buffers + .contains_key(&buffer.read(cx).remote_id()) + { + local.register_buffer_with_language_servers(&buffer, cx); + } + } } for buffer in buffers_with_unknown_injections { @@ -1245,12 +3357,25 @@ impl LspStore { available_language } - pub fn set_language_for_buffer( + pub(crate) fn set_language_for_buffer( &mut self, buffer: &Model, new_language: Arc, cx: &mut ModelContext, ) { + let buffer_file = buffer.read(cx).file().cloned(); + let buffer_id = buffer.read(cx).remote_id(); + if let Some(local_store) = self.as_local_mut() { + if local_store.registered_buffers.contains_key(&buffer_id) { + if let Some(abs_path) = + File::from_dyn(buffer_file.as_ref()).map(|file| file.abs_path(cx)) + { + if let Some(file_url) = lsp::Url::from_file_path(&abs_path).log_err() { + local_store.unregister_buffer_from_language_servers(buffer, file_url, cx); + } + } + } + } buffer.update(cx, |buffer, cx| { if buffer.language().map_or(true, |old_language| { !Arc::ptr_eq(old_language, &new_language) @@ -1259,15 +3384,18 @@ impl LspStore { } }); - let buffer_file = buffer.read(cx).file().cloned(); let settings = language_settings(Some(new_language.name()), buffer_file.as_ref(), cx).into_owned(); let buffer_file = File::from_dyn(buffer_file.as_ref()); let worktree_id = if let Some(file) = buffer_file { let worktree = file.worktree.clone(); - self.start_language_servers(&worktree, new_language.name(), cx); + if let Some(local) = self.as_local_mut() { + if local.registered_buffers.contains_key(&buffer_id) { + local.register_buffer_with_language_servers(buffer, cx); + } + } Some(worktree.read(cx).id()) } else { None @@ -1322,7 +3450,7 @@ impl LspStore { } pub fn request_lsp( - &self, + &mut self, buffer_handle: Model, server: LanguageServerToQuery, request: R, @@ -1332,8 +3460,6 @@ impl LspStore { ::Result: Send, ::Params: Send, { - let buffer = buffer_handle.read(cx); - if let Some((upstream_client, upstream_project_id)) = self.upstream_client() { return self.send_lsp_proto_request( buffer_handle, @@ -1343,21 +3469,35 @@ impl LspStore { cx, ); } - + let buffer = buffer_handle.read(cx); let language_server = match server { LanguageServerToQuery::Primary => { - match self.primary_language_server_for_buffer(buffer, cx) { + match self + .as_local() + .and_then(|local| local.primary_language_server_for_buffer(buffer, cx)) + { Some((_, server)) => Some(Arc::clone(server)), None => return Task::ready(Ok(Default::default())), } } LanguageServerToQuery::Other(id) => self - .language_server_for_buffer(buffer, id, cx) + .language_server_for_local_buffer(buffer, id, cx) .map(|(_, server)| Arc::clone(server)), }; let file = File::from_dyn(buffer.file()).and_then(File::as_local); if let (Some(file), Some(language_server)) = (file, language_server) { - let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx); + let lsp_params = match request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx) + { + Ok(lsp_params) => lsp_params, + Err(err) => { + log::error!( + "Preparing LSP request to {} failed: {}", + language_server.name(), + err + ); + return Task::ready(Err(err)); + } + }; let status = request.status(); return cx.spawn(move |this, cx| async move { if !request.check_capabilities(language_server.adapter_server_capabilities()) { @@ -1406,15 +3546,11 @@ impl LspStore { let result = lsp_request.await; let response = result.map_err(|err| { - log::warn!( - "Generic lsp request to {} failed: {}", - language_server.name(), - err - ); + log::warn!("LSP request to {} failed: {}", language_server.name(), err); err })?; - request + let response = request .response_from_lsp( response, this.upgrade().ok_or_else(|| anyhow!("no app context"))?, @@ -1422,7 +3558,8 @@ impl LspStore { language_server.server_id(), cx.clone(), ) - .await + .await; + response }); } @@ -1438,7 +3575,13 @@ impl LspStore { let buffer_language = buffer.language(); let settings = language_settings(buffer_language.map(|l| l.name()), buffer.file(), cx); if let Some(language) = buffer_language { - if settings.enable_language_server { + if settings.enable_language_server + && self + .as_local() + .unwrap() + .registered_buffers + .contains_key(&buffer.remote_id()) + { if let Some(file) = buffer_file { language_servers_to_start.push((file.worktree.clone(), language.name())); } @@ -1459,7 +3602,9 @@ impl LspStore { else { return; }; - for (worktree_id, started_lsp_name) in self.started_language_servers() { + for (worktree_id, started_lsp_name) in + self.as_local().unwrap().language_server_ids.keys().cloned() + { let language = languages.iter().find_map(|l| { let adapter = self .languages @@ -1512,7 +3657,9 @@ impl LspStore { // Start all the newly-enabled language servers. for (worktree, language) in language_servers_to_start { - self.start_language_servers(&worktree, language, cx); + self.as_local_mut() + .unwrap() + .start_language_servers(&worktree, language, cx); } // Restart all language servers with changed initialization options. @@ -1523,100 +3670,6 @@ impl LspStore { cx.notify(); } - pub async fn execute_code_actions_on_servers( - this: &WeakModel, - adapters_and_servers: &[(Arc, Arc)], - code_actions: Vec, - buffer: &Model, - push_to_history: bool, - project_transaction: &mut ProjectTransaction, - cx: &mut AsyncAppContext, - ) -> Result<(), anyhow::Error> { - for (lsp_adapter, language_server) in adapters_and_servers.iter() { - let code_actions = code_actions.clone(); - - let actions = this - .update(cx, move |this, cx| { - let request = GetCodeActions { - range: text::Anchor::MIN..text::Anchor::MAX, - kinds: Some(code_actions), - }; - let server = LanguageServerToQuery::Other(language_server.server_id()); - this.request_lsp(buffer.clone(), server, request, cx) - })? - .await?; - - for mut action in actions { - Self::try_resolve_code_action(language_server, &mut action) - .await - .context("resolving a formatting code action")?; - - if let Some(edit) = action.lsp_action.edit { - if edit.changes.is_none() && edit.document_changes.is_none() { - continue; - } - - let new = Self::deserialize_workspace_edit( - this.upgrade().ok_or_else(|| anyhow!("project dropped"))?, - edit, - push_to_history, - lsp_adapter.clone(), - language_server.clone(), - cx, - ) - .await?; - project_transaction.0.extend(new.0); - } - - if let Some(command) = action.lsp_action.command { - this.update(cx, |this, _| { - if let LspStoreMode::Local(mode) = &mut this.mode { - mode.last_workspace_edits_by_language_server - .remove(&language_server.server_id()); - } - })?; - - language_server - .request::(lsp::ExecuteCommandParams { - command: command.command, - arguments: command.arguments.unwrap_or_default(), - ..Default::default() - }) - .await?; - - this.update(cx, |this, _| { - if let LspStoreMode::Local(mode) = &mut this.mode { - project_transaction.0.extend( - mode.last_workspace_edits_by_language_server - .remove(&language_server.server_id()) - .unwrap_or_default() - .0, - ) - } - })?; - } - } - } - - Ok(()) - } - - async fn try_resolve_code_action( - lang_server: &LanguageServer, - action: &mut CodeAction, - ) -> anyhow::Result<()> { - if GetCodeActions::can_resolve_actions(&lang_server.capabilities()) - && action.lsp_action.data.is_some() - && (action.lsp_action.command.is_none() || action.lsp_action.edit.is_none()) - { - action.lsp_action = lang_server - .request::(action.lsp_action.clone()) - .await?; - } - - anyhow::Ok(()) - } - pub fn apply_code_action( &self, buffer_handle: Model, @@ -1644,22 +3697,22 @@ impl LspStore { })? .await }) - } else { + } else if self.mode.is_local() { let buffer = buffer_handle.read(cx); let (lsp_adapter, lang_server) = if let Some((adapter, server)) = - self.language_server_for_buffer(buffer, action.server_id, cx) + self.language_server_for_local_buffer(buffer, action.server_id, cx) { (adapter.clone(), server.clone()) } else { return Task::ready(Ok(Default::default())); }; cx.spawn(move |this, mut cx| async move { - Self::try_resolve_code_action(&lang_server, &mut action) + LocalLspStore::try_resolve_code_action(&lang_server, &mut action) .await .context("resolving a code action")?; if let Some(edit) = action.lsp_action.edit { if edit.changes.is_some() || edit.document_changes.is_some() { - return Self::deserialize_workspace_edit( + return LocalLspStore::deserialize_workspace_edit( this.upgrade().ok_or_else(|| anyhow!("no app present"))?, edit, push_to_history, @@ -1700,6 +3753,8 @@ impl LspStore { Ok(ProjectTransaction::default()) }) + } else { + Task::ready(Err(anyhow!("no upstream client and not local"))) } } @@ -1731,7 +3786,7 @@ impl LspStore { } else { let buffer = buffer_handle.read(cx); let (_, lang_server) = if let Some((adapter, server)) = - self.language_server_for_buffer(buffer, server_id, cx) + self.language_server_for_local_buffer(buffer, server_id, cx) { (adapter.clone(), server.clone()) } else { @@ -1764,7 +3819,7 @@ impl LspStore { } pub(crate) fn linked_edit( - &self, + &mut self, buffer: &Model, position: Anchor, cx: &mut ModelContext, @@ -1772,21 +3827,25 @@ impl LspStore { let snapshot = buffer.read(cx).snapshot(); let scope = snapshot.language_scope_at(position); let Some(server_id) = self - .language_servers_for_buffer(buffer.read(cx), cx) - .filter(|(_, server)| { - server - .capabilities() - .linked_editing_range_provider - .is_some() + .as_local() + .and_then(|local| { + local + .language_servers_for_buffer(buffer.read(cx), cx) + .filter(|(_, server)| { + server + .capabilities() + .linked_editing_range_provider + .is_some() + }) + .filter(|(adapter, _)| { + scope + .as_ref() + .map(|scope| scope.language_allowed(&adapter.name)) + .unwrap_or(true) + }) + .map(|(_, server)| LanguageServerToQuery::Other(server.server_id())) + .next() }) - .filter(|(adapter, _)| { - scope - .as_ref() - .map(|scope| scope.language_allowed(&adapter.name)) - .unwrap_or(true) - }) - .map(|(_, server)| LanguageServerToQuery::Other(server.server_id())) - .next() .or_else(|| { self.upstream_client() .is_some() @@ -1911,115 +3970,11 @@ impl LspStore { cx, ) } - - pub async fn format_via_lsp( - this: &WeakModel, - buffer: &Model, - abs_path: &Path, - language_server: &Arc, - settings: &LanguageSettings, - cx: &mut AsyncAppContext, - ) -> Result, String)>> { - let uri = lsp::Url::from_file_path(abs_path) - .map_err(|_| anyhow!("failed to convert abs path to uri"))?; - let text_document = lsp::TextDocumentIdentifier::new(uri); - let capabilities = &language_server.capabilities(); - - let formatting_provider = capabilities.document_formatting_provider.as_ref(); - let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref(); - - let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) { - language_server - .request::(lsp::DocumentFormattingParams { - text_document, - options: lsp_command::lsp_formatting_options(settings), - work_done_progress_params: Default::default(), - }) - .await? - } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) { - let buffer_start = lsp::Position::new(0, 0); - let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?; - language_server - .request::(lsp::DocumentRangeFormattingParams { - text_document: text_document.clone(), - range: lsp::Range::new(buffer_start, buffer_end), - options: lsp_command::lsp_formatting_options(settings), - work_done_progress_params: Default::default(), - }) - .await? - } else { - None - }; - - if let Some(lsp_edits) = lsp_edits { - this.update(cx, |this, cx| { - this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx) - })? - .await - } else { - Ok(Vec::with_capacity(0)) - } - } - pub async fn format_range_via_lsp( - this: &WeakModel, - buffer: &Model, - selections: &[Selection], - abs_path: &Path, - language_server: &Arc, - settings: &LanguageSettings, - cx: &mut AsyncAppContext, - ) -> Result, String)>> { - let capabilities = &language_server.capabilities(); - let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref(); - if range_formatting_provider.map_or(false, |provider| provider == &OneOf::Left(false)) { - return Err(anyhow!( - "{} language server does not support range formatting", - language_server.name() - )); - } - - let uri = lsp::Url::from_file_path(abs_path) - .map_err(|_| anyhow!("failed to convert abs path to uri"))?; - let text_document = lsp::TextDocumentIdentifier::new(uri); - - let lsp_edits = { - let ranges = selections.into_iter().map(|s| { - let start = lsp::Position::new(s.start.row, s.start.column); - let end = lsp::Position::new(s.end.row, s.end.column); - lsp::Range::new(start, end) - }); - - let mut edits = None; - for range in ranges { - if let Some(mut edit) = language_server - .request::(lsp::DocumentRangeFormattingParams { - text_document: text_document.clone(), - range, - options: lsp_command::lsp_formatting_options(settings), - work_done_progress_params: Default::default(), - }) - .await? - { - edits.get_or_insert_with(Vec::new).append(&mut edit); - } - } - edits - }; - - if let Some(lsp_edits) = lsp_edits { - this.update(cx, |this, cx| { - this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx) - })? - .await - } else { - Ok(Vec::with_capacity(0)) - } - } - pub fn code_actions( &mut self, buffer_handle: &Model, range: Range, + kinds: Option>, cx: &mut ModelContext, ) -> Task>> { if let Some((upstream_client, project_id)) = self.upstream_client() { @@ -2033,7 +3988,7 @@ impl LspStore { request: Some(proto::multi_lsp_query::Request::GetCodeActions( GetCodeActions { range: range.clone(), - kinds: None, + kinds: kinds.clone(), } .to_proto(project_id, buffer_handle.read(cx)), )), @@ -2059,7 +4014,7 @@ impl LspStore { .map(|code_actions_response| { GetCodeActions { range: range.clone(), - kinds: None, + kinds: kinds.clone(), } .response_from_proto( code_actions_response, @@ -2084,7 +4039,7 @@ impl LspStore { Some(range.start), GetCodeActions { range: range.clone(), - kinds: None, + kinds: kinds.clone(), }, cx, ); @@ -2137,13 +4092,13 @@ impl LspStore { .await; Ok(result) }) - } else { + } else if let Some(local) = self.as_local() { let snapshot = buffer.read(cx).snapshot(); let offset = position.to_offset(&snapshot); let scope = snapshot.language_scope_at(offset); let language = snapshot.language().cloned(); - let server_ids: Vec<_> = self + let server_ids: Vec<_> = local .language_servers_for_buffer(buffer.read(cx), cx) .filter(|(_, server)| server.capabilities().completion_provider.is_some()) .filter(|(adapter, _)| { @@ -2192,6 +4147,8 @@ impl LspStore { Ok(completions) }) + } else { + Task::ready(Err(anyhow!("No upstream client or local language server"))) } } @@ -2199,7 +4156,7 @@ impl LspStore { &self, buffer: Model, completion_indices: Vec, - completions: Arc>>, + completions: Rc>>, cx: &mut ModelContext, ) -> Task> { let client = self.upstream_client(); @@ -2212,57 +4169,62 @@ impl LspStore { let mut did_resolve = false; if let Some((client, project_id)) = client { for completion_index in completion_indices { - let (server_id, completion) = { - let completions_guard = completions.read(); - let completion = &completions_guard[completion_index]; - did_resolve = true; - let server_id = completion.server_id; - let completion = completion.lsp_completion.clone(); + let server_id = completions.borrow()[completion_index].server_id; - (server_id, completion) - }; - - Self::resolve_completion_remote( + if Self::resolve_completion_remote( project_id, server_id, buffer_id, completions.clone(), completion_index, - completion, client.clone(), language_registry.clone(), ) - .await; + .await + .log_err() + .is_some() + { + did_resolve = true; + } } } else { for completion_index in completion_indices { - let (server_id, completion) = { - let completions_guard = completions.read(); - let completion = &completions_guard[completion_index]; - let server_id = completion.server_id; - let completion = completion.lsp_completion.clone(); + let server_id = completions.borrow()[completion_index].server_id; - (server_id, completion) - }; - - let server = this - .read_with(&cx, |this, _| this.language_server_for_id(server_id)) + let server_and_adapter = this + .read_with(&cx, |lsp_store, _| { + let server = lsp_store.language_server_for_id(server_id)?; + let adapter = + lsp_store.language_server_adapter_for_id(server.server_id())?; + Some((server, adapter)) + }) .ok() .flatten(); - let Some(server) = server else { + let Some((server, adapter)) = server_and_adapter else { continue; }; - did_resolve = true; - Self::resolve_completion_local( + let resolved = Self::resolve_completion_local( server, &buffer_snapshot, completions.clone(), completion_index, - completion, - language_registry.clone(), ) - .await; + .await + .log_err() + .is_some(); + if resolved { + Self::regenerate_completion_labels( + adapter, + &buffer_snapshot, + completions.clone(), + completion_index, + language_registry.clone(), + ) + .await + .log_err(); + did_resolve = true; + } } } @@ -2273,11 +4235,9 @@ impl LspStore { async fn resolve_completion_local( server: Arc, snapshot: &BufferSnapshot, - completions: Arc>>, + completions: Rc>>, completion_index: usize, - completion: lsp::CompletionItem, - language_registry: Arc, - ) { + ) -> Result<()> { let can_resolve = server .capabilities() .completion_provider @@ -2285,30 +4245,17 @@ impl LspStore { .and_then(|options| options.resolve_provider) .unwrap_or(false); if !can_resolve { - return; + return Ok(()); } - let request = server.request::(completion); - let Some(completion_item) = request.await.log_err() else { - return; + let request = { + let completion = &completions.borrow()[completion_index]; + if completion.resolved { + return Ok(()); + } + server.request::(completion.lsp_completion.clone()) }; - - if let Some(lsp_documentation) = completion_item.documentation.as_ref() { - let documentation = language::prepare_completion_documentation( - lsp_documentation, - &language_registry, - None, // TODO: Try to reasonably work out which language the completion is for - ) - .await; - - let mut completions = completions.write(); - let completion = &mut completions[completion_index]; - completion.documentation = Some(documentation); - } else { - let mut completions = completions.write(); - let completion = &mut completions[completion_index]; - completion.documentation = Some(Documentation::Undocumented); - } + let completion_item = request.await?; if let Some(text_edit) = completion_item.text_edit.as_ref() { // Technically we don't have to parse the whole `text_edit`, since the only @@ -2320,7 +4267,7 @@ impl LspStore { if let Some((old_range, mut new_text)) = edit { LineEnding::normalize(&mut new_text); - let mut completions = completions.write(); + let mut completions = completions.borrow_mut(); let completion = &mut completions[completion_index]; completion.new_text = new_text; @@ -2329,16 +4276,82 @@ impl LspStore { } if completion_item.insert_text_format == Some(InsertTextFormat::SNIPPET) { // vtsls might change the type of completion after resolution. - let mut completions = completions.write(); + let mut completions = completions.borrow_mut(); let completion = &mut completions[completion_index]; if completion_item.insert_text_format != completion.lsp_completion.insert_text_format { completion.lsp_completion.insert_text_format = completion_item.insert_text_format; } } - let mut completions = completions.write(); + let mut completions = completions.borrow_mut(); let completion = &mut completions[completion_index]; completion.lsp_completion = completion_item; + completion.resolved = true; + Ok(()) + } + + async fn regenerate_completion_labels( + adapter: Arc, + snapshot: &BufferSnapshot, + completions: Rc>>, + completion_index: usize, + language_registry: Arc, + ) -> Result<()> { + let completion_item = completions.borrow()[completion_index] + .lsp_completion + .clone(); + if let Some(lsp_documentation) = completion_item.documentation.as_ref() { + let documentation = language::prepare_completion_documentation( + lsp_documentation, + &language_registry, + snapshot.language().cloned(), + ) + .await; + + let mut completions = completions.borrow_mut(); + let completion = &mut completions[completion_index]; + completion.documentation = Some(documentation); + } else { + let mut completions = completions.borrow_mut(); + let completion = &mut completions[completion_index]; + completion.documentation = Some(Documentation::Undocumented); + } + + // NB: Zed does not have `details` inside the completion resolve capabilities, but certain language servers violate the spec and do not return `details` immediately, e.g. https://github.com/yioneko/vtsls/issues/213 + // So we have to update the label here anyway... + let new_label = match snapshot.language() { + Some(language) => { + adapter + .labels_for_completions(&[completion_item.clone()], language) + .await? + } + None => Vec::new(), + } + .pop() + .flatten() + .unwrap_or_else(|| { + CodeLabel::plain( + completion_item.label, + completion_item.filter_text.as_deref(), + ) + }); + + let mut completions = completions.borrow_mut(); + let completion = &mut completions[completion_index]; + if completion.label.filter_text() == new_label.filter_text() { + completion.label = new_label; + } else { + log::error!( + "Resolved completion changed display label from {} to {}. \ + Refusing to apply this because it changes the fuzzy match text from {} to {}", + completion.label.text(), + new_label.text(), + completion.label.filter_text(), + new_label.filter_text() + ); + } + + Ok(()) } #[allow(clippy::too_many_arguments)] @@ -2346,31 +4359,32 @@ impl LspStore { project_id: u64, server_id: LanguageServerId, buffer_id: BufferId, - completions: Arc>>, + completions: Rc>>, completion_index: usize, - completion: lsp::CompletionItem, client: AnyProtoClient, language_registry: Arc, - ) { + ) -> Result<()> { + let lsp_completion = { + let completion = &completions.borrow()[completion_index]; + if completion.resolved { + return Ok(()); + } + serde_json::to_string(&completion.lsp_completion) + .unwrap() + .into_bytes() + }; let request = proto::ResolveCompletionDocumentation { project_id, language_server_id: server_id.0 as u64, - lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(), + lsp_completion, buffer_id: buffer_id.into(), }; - let Some(response) = client + let response = client .request(request) .await - .context("completion documentation resolve proto request") - .log_err() - else { - return; - }; - let Some(lsp_completion) = serde_json::from_slice(&response.lsp_completion).log_err() - else { - return; - }; + .context("completion documentation resolve proto request")?; + let lsp_completion = serde_json::from_slice(&response.lsp_completion)?; let documentation = if response.documentation.is_empty() { Documentation::Undocumented @@ -2384,10 +4398,11 @@ impl LspStore { Documentation::MultiLinePlainText(response.documentation) }; - let mut completions = completions.write(); + let mut completions = completions.borrow_mut(); let completion = &mut completions[completion_index]; completion.documentation = Some(documentation); completion.lsp_completion = lsp_completion; + completion.resolved = true; let old_range = response .old_start @@ -2399,12 +4414,15 @@ impl LspStore { completion.old_range = old_start..old_end; } } + + Ok(()) } pub fn apply_additional_edits_for_completion( &self, buffer_handle: Model, - completion: Completion, + completions: Rc>>, + completion_index: usize, push_to_history: bool, cx: &mut ModelContext, ) -> Task>> { @@ -2413,8 +4431,9 @@ impl LspStore { if let Some((client, project_id)) = self.upstream_client() { cx.spawn(move |_, mut cx| async move { - let response = client - .request(proto::ApplyCompletionAdditionalEdits { + let request = { + let completion = completions.borrow()[completion_index].clone(); + proto::ApplyCompletionAdditionalEdits { project_id, buffer_id: buffer_id.into(), completion: Some(Self::serialize_completion(&CoreCompletion { @@ -2422,9 +4441,13 @@ impl LspStore { new_text: completion.new_text, server_id: completion.server_id, lsp_completion: completion.lsp_completion, + resolved: completion.resolved, })), - }) - .await?; + } + }; + + let response = client.request(request).await?; + completions.borrow_mut()[completion_index].resolved = true; if let Some(transaction) = response.transaction { let transaction = language::proto::deserialize_transaction(transaction)?; @@ -2444,34 +4467,31 @@ impl LspStore { } }) } else { - let server_id = completion.server_id; - let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) { + let server_id = completions.borrow()[completion_index].server_id; + let server = match self.language_server_for_local_buffer(buffer, server_id, cx) { Some((_, server)) => server.clone(), - _ => return Task::ready(Ok(Default::default())), + _ => return Task::ready(Ok(None)), }; + let snapshot = buffer_handle.read(&cx).snapshot(); cx.spawn(move |this, mut cx| async move { - let can_resolve = lang_server - .capabilities() - .completion_provider - .as_ref() - .and_then(|options| options.resolve_provider) - .unwrap_or(false); - let additional_text_edits = if can_resolve { - lang_server - .request::(completion.lsp_completion) - .await? - .additional_text_edits - } else { - completion.lsp_completion.additional_text_edits - }; + Self::resolve_completion_local( + server.clone(), + &snapshot, + completions.clone(), + completion_index, + ) + .await + .context("resolving completion")?; + let completion = completions.borrow()[completion_index].clone(); + let additional_text_edits = completion.lsp_completion.additional_text_edits; if let Some(edits) = additional_text_edits { let edits = this .update(&mut cx, |this, cx| { - this.edits_from_lsp( + this.as_local_mut().unwrap().edits_from_lsp( &buffer_handle, edits, - lang_server.server_id(), + server.server_id(), None, cx, ) @@ -2569,7 +4589,7 @@ impl LspStore { } pub fn signature_help( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -2643,7 +4663,7 @@ impl LspStore { } pub fn hover( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -2745,7 +4765,7 @@ impl LspStore { .await; Ok(symbols) }) - } else { + } else if let Some(local) = self.as_local() { struct WorkspaceSymbolsResult { lsp_adapter: Arc, language: LanguageName, @@ -2755,7 +4775,7 @@ impl LspStore { } let mut requests = Vec::new(); - for ((worktree_id, _), server_id) in self.language_server_ids.iter() { + for ((worktree_id, _), server_id) in local.language_server_ids.iter() { let Some(worktree_handle) = self .worktree_store .read(cx) @@ -2769,17 +4789,16 @@ impl LspStore { } let worktree_abs_path = worktree.abs_path().clone(); - let (lsp_adapter, language, server) = - match self.as_local().unwrap().language_servers.get(server_id) { - Some(LanguageServerState::Running { - adapter, - language, - server, - .. - }) => (adapter.clone(), language.clone(), server), + let (lsp_adapter, language, server) = match local.language_servers.get(server_id) { + Some(LanguageServerState::Running { + adapter, + language, + server, + .. + }) => (adapter.clone(), language.clone(), server), - _ => continue, - }; + _ => continue, + }; requests.push( server @@ -2883,9 +4902,20 @@ impl LspStore { Ok(symbols) }) + } else { + Task::ready(Err(anyhow!("No upstream client or local language server"))) } } + pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary { + let mut summary = DiagnosticSummary::default(); + for (_, _, path_summary) in self.diagnostic_summaries(include_ignored, cx) { + summary.error_count += path_summary.error_count; + summary.warning_count += path_summary.warning_count; + } + summary + } + pub fn diagnostic_summaries<'a>( &'a self, include_ignored: bool, @@ -2923,10 +4953,6 @@ impl LspStore { }) } - pub fn started_language_servers(&self) -> Vec<(WorktreeId, LanguageServerName)> { - self.language_server_ids.keys().cloned().collect() - } - pub fn on_buffer_edited( &mut self, buffer: Model, @@ -2939,6 +4965,8 @@ impl LspStore { let next_snapshot = buffer.text_snapshot(); let language_servers: Vec<_> = self + .as_local() + .unwrap() .language_servers_for_buffer(buffer, cx) .map(|i| i.1.clone()) .collect(); @@ -2947,6 +4975,8 @@ impl LspStore { let language_server = language_server.clone(); let buffer_snapshots = self + .as_local_mut() + .unwrap() .buffer_snapshots .get_mut(&buffer.remote_id()) .and_then(|m| m.get_mut(&language_server.server_id()))?; @@ -3037,8 +5067,9 @@ impl LspStore { let text_document = lsp::TextDocumentIdentifier { uri: lsp::Url::from_file_path(abs_path).log_err()?, }; + let local = self.as_local()?; - for server in self.language_servers_for_worktree(worktree_id) { + for server in local.language_servers_for_worktree(worktree_id) { if let Some(include_text) = include_text(server.as_ref()) { let text = if include_text { Some(buffer.read(cx).text()) @@ -3056,7 +5087,7 @@ impl LspStore { } } - for language_server_id in self.language_server_ids_for_buffer(buffer.read(cx), cx) { + for language_server_id in local.language_server_ids_for_buffer(buffer.read(cx), cx) { self.simulate_disk_based_diagnostics_events_if_needed(language_server_id, cx); } @@ -3070,15 +5101,27 @@ impl LspStore { maybe!(async move { let servers = this .update(&mut cx, |this, cx| { - this.language_server_ids + let Some(local) = this.as_local() else { + return Vec::default(); + }; + local + .language_server_ids .iter() .filter_map(|((worktree_id, _), server_id)| { let worktree = this .worktree_store .read(cx) .worktree_for_id(*worktree_id, cx)?; - let state = this.as_local()?.language_servers.get(server_id)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); + let state = local.language_servers.get(server_id)?; + let delegate = LocalLspAdapterDelegate::new( + local.languages.clone(), + &local.environment, + cx.weak_model(), + &worktree, + local.http_client.clone(), + local.fs.clone(), + cx, + ); match state { LanguageServerState::Starting(_) => None, LanguageServerState::Running { @@ -3144,81 +5187,70 @@ impl LspStore { }) } - fn primary_language_server_for_buffer<'a>( + pub(crate) fn language_servers_for_local_buffer<'a>( &'a self, buffer: &'a Buffer, cx: &'a AppContext, - ) -> Option<(&'a Arc, &'a Arc)> { - // The list of language servers is ordered based on the `language_servers` setting - // for each language, thus we can consider the first one in the list to be the - // primary one. - self.language_servers_for_buffer(buffer, cx).next() + ) -> impl Iterator, &'a Arc)> { + self.as_local().into_iter().flat_map(|local| { + local + .language_server_ids_for_buffer(buffer, cx) + .into_iter() + .filter_map(|server_id| match local.language_servers.get(&server_id)? { + LanguageServerState::Running { + adapter, server, .. + } => Some((adapter, server)), + _ => None, + }) + }) } - pub fn language_server_for_buffer<'a>( + pub fn language_server_for_local_buffer<'a>( &'a self, buffer: &'a Buffer, server_id: LanguageServerId, cx: &'a AppContext, ) -> Option<(&'a Arc, &'a Arc)> { - self.language_servers_for_buffer(buffer, cx) + self.as_local()? + .language_servers_for_buffer(buffer, cx) .find(|(_, s)| s.server_id() == server_id) } - fn language_servers_for_worktree( - &self, - worktree_id: WorktreeId, - ) -> impl Iterator> { - self.language_server_ids - .iter() - .filter_map(move |((language_server_worktree_id, _), id)| { - if *language_server_worktree_id == worktree_id { - if let Some(LanguageServerState::Running { server, .. }) = - self.as_local()?.language_servers.get(id) - { - return Some(server); - } - } - None - }) - } - fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { - self.diagnostics.remove(&id_to_remove); self.diagnostic_summaries.remove(&id_to_remove); - - let mut servers_to_remove = HashMap::default(); - let mut servers_to_preserve = HashSet::default(); - for ((worktree_id, server_name), &server_id) in &self.language_server_ids { - if worktree_id == &id_to_remove { - servers_to_remove.insert(server_id, server_name.clone()); - } else { - servers_to_preserve.insert(server_id); - } - } - servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id)); - for (server_id_to_remove, server_name) in servers_to_remove { - self.language_server_ids - .remove(&(id_to_remove, server_name)); - self.language_server_statuses.remove(&server_id_to_remove); - if let Some(local_lsp_store) = self.as_local_mut() { - local_lsp_store - .language_server_watched_paths - .remove(&server_id_to_remove); - local_lsp_store - .last_workspace_edits_by_language_server - .remove(&server_id_to_remove); - local_lsp_store - .language_servers - .remove(&server_id_to_remove); - } - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id_to_remove)); - } - - if let Some(local) = self.as_local() { + let to_remove = Vec::new(); + if let Some(local) = self.as_local_mut() { + local.diagnostics.remove(&id_to_remove); local.prettier_store.update(cx, |prettier_store, cx| { prettier_store.remove_worktree(id_to_remove, cx); - }) + }); + + let mut servers_to_remove = HashMap::default(); + let mut servers_to_preserve = HashSet::default(); + for ((worktree_id, server_name), &server_id) in &local.language_server_ids { + if worktree_id == &id_to_remove { + servers_to_remove.insert(server_id, server_name.clone()); + } else { + servers_to_preserve.insert(server_id); + } + } + servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id)); + for (server_id_to_remove, server_name) in servers_to_remove { + local + .language_server_ids + .remove(&(id_to_remove, server_name)); + local + .language_server_watched_paths + .remove(&server_id_to_remove); + local + .last_workspace_edits_by_language_server + .remove(&server_id_to_remove); + local.language_servers.remove(&server_id_to_remove); + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id_to_remove)); + } + } + for server in to_remove { + self.language_server_statuses.remove(&server); } } @@ -3277,149 +5309,18 @@ impl LspStore { .collect(); } - pub(crate) fn register_language_server( + fn register_local_language_server( &mut self, worktree_id: WorktreeId, language_server_name: LanguageServerName, language_server_id: LanguageServerId, ) { - self.language_server_ids + self.as_local_mut() + .unwrap() + .language_server_ids .insert((worktree_id, language_server_name), language_server_id); } - #[track_caller] - pub(crate) fn register_buffer_with_language_servers( - &mut self, - buffer_handle: &Model, - cx: &mut ModelContext, - ) { - let available_language = self.detect_language_for_buffer(buffer_handle, cx); - - let buffer = buffer_handle.read(cx); - let buffer_id = buffer.remote_id(); - - if let Some(file) = File::from_dyn(buffer.file()) { - if !file.is_local() { - return; - } - - let abs_path = file.abs_path(cx); - let Some(uri) = lsp::Url::from_file_path(&abs_path).log_err() else { - return; - }; - let initial_snapshot = buffer.text_snapshot(); - let worktree_id = file.worktree_id(cx); - - if let Some(diagnostics) = self.diagnostics.get(&worktree_id) { - for (server_id, diagnostics) in - diagnostics.get(file.path()).cloned().unwrap_or_default() - { - self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx) - .log_err(); - } - } - - if let Some(language) = available_language { - for adapter in self.languages.lsp_adapters(&language.name()) { - let server = self - .language_server_ids - .get(&(worktree_id, adapter.name.clone())) - .and_then(|id| self.as_local()?.language_servers.get(id)) - .and_then(|server_state| { - if let LanguageServerState::Running { server, .. } = server_state { - Some(server.clone()) - } else { - None - } - }); - let server = match server { - Some(server) => server, - None => continue, - }; - - server - .notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri.clone(), - adapter.language_id(&language.name()), - 0, - initial_snapshot.text(), - ), - }, - ) - .log_err(); - - buffer_handle.update(cx, |buffer, cx| { - buffer.set_completion_triggers( - server.server_id(), - server - .capabilities() - .completion_provider - .as_ref() - .and_then(|provider| { - provider - .trigger_characters - .as_ref() - .map(|characters| characters.iter().cloned().collect()) - }) - .unwrap_or_default(), - cx, - ); - }); - - let snapshot = LspBufferSnapshot { - version: 0, - snapshot: initial_snapshot.clone(), - }; - self.buffer_snapshots - .entry(buffer_id) - .or_default() - .insert(server.server_id(), vec![snapshot]); - } - } - } - } - - pub(crate) fn unregister_buffer_from_language_servers( - &mut self, - buffer: &Model, - old_file: &File, - cx: &mut AppContext, - ) { - let old_path = match old_file.as_local() { - Some(local) => local.abs_path(cx), - None => return, - }; - - buffer.update(cx, |buffer, cx| { - let worktree_id = old_file.worktree_id(cx); - - let ids = &self.language_server_ids; - - if let Some(language) = buffer.language().cloned() { - for adapter in self.languages.lsp_adapters(&language.name()) { - if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) { - buffer.update_diagnostics(*server_id, DiagnosticSet::new([], buffer), cx); - buffer.set_completion_triggers(*server_id, Default::default(), cx); - } - } - } - - self.buffer_snapshots.remove(&buffer.remote_id()); - let file_url = lsp::Url::from_file_path(old_path).unwrap(); - for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { - language_server - .notify::( - lsp::DidCloseTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new(file_url.clone()), - }, - ) - .log_err(); - } - }); - } - pub fn update_diagnostic_entries( &mut self, server_id: LanguageServerId, @@ -3441,7 +5342,13 @@ impl LspStore { }; if let Some(buffer) = self.buffer_store.read(cx).get_by_path(&project_path, cx) { - self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?; + self.as_local_mut().unwrap().update_buffer_diagnostics( + &buffer, + server_id, + version, + diagnostics.clone(), + cx, + )?; } let updated = worktree.update(cx, |worktree, cx| { @@ -3470,8 +5377,13 @@ impl LspStore { diagnostics: Vec>>, _: &mut ModelContext, ) -> Result { + let local = match &mut self.mode { + LspStoreMode::Local(local_lsp_store) => local_lsp_store, + _ => anyhow::bail!("update_worktree_diagnostics called on remote"), + }; + let summaries_for_tree = self.diagnostic_summaries.entry(worktree_id).or_default(); - let diagnostics_for_tree = self.diagnostics.entry(worktree_id).or_default(); + let diagnostics_for_tree = local.diagnostics.entry(worktree_id).or_default(); let summaries_by_server_id = summaries_for_tree.entry(worktree_path.clone()).or_default(); let old_summary = summaries_by_server_id @@ -3541,8 +5453,8 @@ impl LspStore { })? .await }) - } else { - let Some(&language_server_id) = self.language_server_ids.get(&( + } else if let Some(local) = self.as_local() { + let Some(&language_server_id) = local.language_server_ids.get(&( symbol.source_worktree_id, symbol.language_server_name.clone(), )) else { @@ -3575,6 +5487,8 @@ impl LspStore { symbol.language_server_name.clone(), cx, ) + } else { + Task::ready(Err(anyhow!("no upstream client or local store"))) } } @@ -3635,7 +5549,7 @@ impl LspStore { if worktree.update(&mut cx, |worktree, _| worktree.is_local())? { lsp_store .update(&mut cx, |lsp_store, cx| { - lsp_store.register_language_server( + lsp_store.register_local_language_server( worktree.read(cx).id(), language_server_name, language_server_id, @@ -3665,81 +5579,8 @@ impl LspStore { }) } - pub(crate) fn update_buffer_diagnostics( - &mut self, - buffer: &Model, - server_id: LanguageServerId, - version: Option, - mut diagnostics: Vec>>, - cx: &mut ModelContext, - ) -> Result<()> { - fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { - Ordering::Equal - .then_with(|| b.is_primary.cmp(&a.is_primary)) - .then_with(|| a.is_disk_based.cmp(&b.is_disk_based)) - .then_with(|| a.severity.cmp(&b.severity)) - .then_with(|| a.message.cmp(&b.message)) - } - - let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?; - - diagnostics.sort_unstable_by(|a, b| { - Ordering::Equal - .then_with(|| a.range.start.cmp(&b.range.start)) - .then_with(|| b.range.end.cmp(&a.range.end)) - .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic)) - }); - - let mut sanitized_diagnostics = Vec::new(); - let edits_since_save = Patch::new( - snapshot - .edits_since::>(buffer.read(cx).saved_version()) - .collect(), - ); - for entry in diagnostics { - let start; - let end; - if entry.diagnostic.is_disk_based { - // Some diagnostics are based on files on disk instead of buffers' - // current contents. Adjust these diagnostics' ranges to reflect - // any unsaved edits. - start = edits_since_save.old_to_new(entry.range.start); - end = edits_since_save.old_to_new(entry.range.end); - } else { - start = entry.range.start; - end = entry.range.end; - } - - let mut range = snapshot.clip_point_utf16(start, Bias::Left) - ..snapshot.clip_point_utf16(end, Bias::Right); - - // Expand empty ranges by one codepoint - if range.start == range.end { - // This will be go to the next boundary when being clipped - range.end.column += 1; - range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right); - if range.start == range.end && range.end.column > 0 { - range.start.column -= 1; - range.start = snapshot.clip_point_utf16(Unclipped(range.start), Bias::Left); - } - } - - sanitized_diagnostics.push(DiagnosticEntry { - range, - diagnostic: entry.diagnostic, - }); - } - drop(edits_since_save); - - let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot); - buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(server_id, set, cx) - }); - Ok(()) - } - fn request_multiple_lsp_locally( - &self, + &mut self, buffer: &Model, position: Option

, request: R, @@ -3751,11 +5592,13 @@ impl LspStore { ::Result: Send, ::Params: Send, { - debug_assert!(self.upstream_client().is_none()); + let Some(local) = self.as_local() else { + return Task::ready(Vec::new()); + }; let snapshot = buffer.read(cx).snapshot(); let scope = position.and_then(|position| snapshot.language_scope_at(position)); - let server_ids = self + let server_ids = local .language_servers_for_buffer(buffer.read(cx), cx) .filter(|(adapter, _)| { scope @@ -4015,6 +5858,35 @@ impl LspStore { }) } + async fn handle_register_buffer_with_language_servers( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let buffer_id = BufferId::new(envelope.payload.buffer_id)?; + let peer_id = envelope.original_sender_id.unwrap_or(envelope.sender_id); + this.update(&mut cx, |this, cx| { + if let Some((upstream_client, upstream_project_id)) = this.upstream_client() { + return upstream_client.send(proto::RegisterBufferWithLanguageServers { + project_id: upstream_project_id, + buffer_id: buffer_id.to_proto(), + }); + } + + let Some(buffer) = this.buffer_store().read(cx).get(buffer_id) else { + anyhow::bail!("buffer is not open"); + }; + + let handle = this.register_buffer_with_language_servers(&buffer, cx); + this.buffer_store().update(cx, |buffer_store, _| { + buffer_store.register_shared_lsp_handle(peer_id, buffer_id, handle); + }); + + Ok(()) + })??; + Ok(proto::Ack {}) + } + async fn handle_update_diagnostic_summary( this: Model, envelope: TypedEnvelope, @@ -4294,6 +6166,112 @@ impl LspStore { .map(|(key, value)| (*key, value)) } + pub(super) fn did_rename_entry( + &self, + worktree_id: WorktreeId, + old_path: &Path, + new_path: &Path, + is_dir: bool, + ) { + maybe!({ + let local_store = self.as_local()?; + + let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from)?; + let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from)?; + + for language_server in local_store.language_servers_for_worktree(worktree_id) { + let Some(filter) = local_store + .language_server_paths_watched_for_rename + .get(&language_server.server_id()) + else { + continue; + }; + + if filter.should_send_did_rename(&old_uri, is_dir) { + language_server + .notify::(RenameFilesParams { + files: vec![FileRename { + old_uri: old_uri.clone(), + new_uri: new_uri.clone(), + }], + }) + .log_err(); + } + } + Some(()) + }); + } + + pub(super) fn will_rename_entry( + this: WeakModel, + worktree_id: WorktreeId, + old_path: &Path, + new_path: &Path, + is_dir: bool, + cx: AsyncAppContext, + ) -> Task<()> { + let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from); + let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from); + cx.spawn(move |mut cx| async move { + let mut tasks = vec![]; + this.update(&mut cx, |this, cx| { + let local_store = this.as_local()?; + let old_uri = old_uri?; + let new_uri = new_uri?; + for language_server in local_store.language_servers_for_worktree(worktree_id) { + let Some(filter) = local_store + .language_server_paths_watched_for_rename + .get(&language_server.server_id()) + else { + continue; + }; + let Some(adapter) = + this.language_server_adapter_for_id(language_server.server_id()) + else { + continue; + }; + if filter.should_send_will_rename(&old_uri, is_dir) { + let apply_edit = cx.spawn({ + let old_uri = old_uri.clone(); + let new_uri = new_uri.clone(); + let language_server = language_server.clone(); + |this, mut cx| async move { + let edit = language_server + .request::(RenameFilesParams { + files: vec![FileRename { old_uri, new_uri }], + }) + .log_err() + .await + .flatten()?; + + LocalLspStore::deserialize_workspace_edit( + this.upgrade()?, + edit, + false, + adapter.clone(), + language_server.clone(), + &mut cx, + ) + .await + .ok(); + Some(()) + } + }); + tasks.push(apply_edit); + } + } + Some(()) + }) + .ok() + .flatten(); + for task in tasks { + // Await on tasks sequentially so that the order of application of edits is deterministic + // (at least with regards to the order of registration of language servers) + task.await; + } + }) + } + fn lsp_notify_abs_paths_changed( &mut self, server_id: LanguageServerId, @@ -4326,195 +6304,6 @@ impl LspStore { }); } - fn rebuild_watched_paths( - &mut self, - language_server_id: LanguageServerId, - cx: &mut ModelContext, - ) { - let worktrees = self - .worktree_store - .read(cx) - .worktrees() - .filter_map(|worktree| { - self.language_servers_for_worktree(worktree.read(cx).id()) - .find(|server| server.server_id() == language_server_id) - .map(|_| worktree) - }) - .collect::>(); - - let local_lsp_store = self.as_local_mut().unwrap(); - - let Some(watchers) = local_lsp_store - .language_server_watcher_registrations - .get(&language_server_id) - else { - return; - }; - - let mut worktree_globs = HashMap::default(); - let mut abs_globs = HashMap::default(); - log::trace!( - "Processing new watcher paths for language server with id {}", - language_server_id - ); - - enum PathToWatch { - Worktree { - literal_prefix: Arc, - pattern: String, - }, - Absolute { - path: Arc, - pattern: String, - }, - } - for watcher in watchers.values().flatten() { - let mut found_host = false; - for worktree in &worktrees { - let glob_is_inside_worktree = worktree.update(cx, |tree, _| { - if let Some(worktree_root_path) = tree.abs_path().to_str() { - let path_to_watch = match &watcher.glob_pattern { - lsp::GlobPattern::String(s) => { - match s.strip_prefix(worktree_root_path) { - Some(relative) => { - let pattern = relative - .strip_prefix(std::path::MAIN_SEPARATOR) - .unwrap_or(relative) - .to_owned(); - let literal_prefix = glob_literal_prefix(&pattern); - - let literal_prefix = Arc::from(PathBuf::from( - literal_prefix - .strip_prefix(std::path::MAIN_SEPARATOR) - .unwrap_or(literal_prefix), - )); - PathToWatch::Worktree { - literal_prefix, - pattern, - } - } - None => { - let path = glob_literal_prefix(s); - let glob = &s[path.len()..]; - let pattern = glob - .strip_prefix(std::path::MAIN_SEPARATOR) - .unwrap_or(glob) - .to_owned(); - let path = if Path::new(path).components().next().is_none() - { - Arc::from(Path::new(worktree_root_path)) - } else { - PathBuf::from(path).into() - }; - - PathToWatch::Absolute { path, pattern } - } - } - } - lsp::GlobPattern::Relative(rp) => { - let Ok(mut base_uri) = match &rp.base_uri { - lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri, - lsp::OneOf::Right(base_uri) => base_uri, - } - .to_file_path() else { - return false; - }; - - match base_uri.strip_prefix(worktree_root_path) { - Ok(relative) => { - let mut literal_prefix = relative.to_owned(); - literal_prefix.push(glob_literal_prefix(&rp.pattern)); - - PathToWatch::Worktree { - literal_prefix: literal_prefix.into(), - pattern: rp.pattern.clone(), - } - } - Err(_) => { - let path = glob_literal_prefix(&rp.pattern); - let glob = &rp.pattern[path.len()..]; - let pattern = glob - .strip_prefix(std::path::MAIN_SEPARATOR) - .unwrap_or(glob) - .to_owned(); - base_uri.push(path); - - let path = if base_uri.components().next().is_none() { - Arc::from(Path::new("/")) - } else { - base_uri.into() - }; - PathToWatch::Absolute { path, pattern } - } - } - } - }; - match path_to_watch { - PathToWatch::Worktree { - literal_prefix, - pattern, - } => { - if let Some((tree, glob)) = - tree.as_local_mut().zip(Glob::new(&pattern).log_err()) - { - tree.add_path_prefix_to_scan(literal_prefix); - worktree_globs - .entry(tree.id()) - .or_insert_with(GlobSetBuilder::new) - .add(glob); - } else { - return false; - } - } - PathToWatch::Absolute { path, pattern } => { - if let Some(glob) = Glob::new(&pattern).log_err() { - abs_globs - .entry(path) - .or_insert_with(GlobSetBuilder::new) - .add(glob); - } - } - } - return true; - } - false - }); - if glob_is_inside_worktree { - log::trace!( - "Watcher pattern `{}` has been attached to the worktree at `{}`", - serde_json::to_string(&watcher.glob_pattern).unwrap(), - worktree.read(cx).abs_path().display() - ); - found_host = true; - } - } - if !found_host { - log::error!( - "Watcher pattern `{}` has not been attached to any worktree or absolute path", - serde_json::to_string(&watcher.glob_pattern).unwrap() - ) - } - } - - let mut watch_builder = LanguageServerWatchedPathsBuilder::default(); - for (worktree_id, builder) in worktree_globs { - if let Ok(globset) = builder.build() { - watch_builder.watch_worktree(worktree_id, globset); - } - } - for (abs_path, builder) in abs_globs { - if let Ok(globset) = builder.build() { - watch_builder.watch_abs_path(abs_path, globset); - } - } - let watcher = watch_builder.build(local_lsp_store.fs.clone(), language_server_id, cx); - local_lsp_store - .language_server_watched_paths - .insert(language_server_id, watcher); - - cx.notify(); - } - pub fn language_server_for_id(&self, id: LanguageServerId) -> Option> { if let Some(local_lsp_store) = self.as_local() { if let Some(LanguageServerState::Running { server, .. }) = @@ -4533,44 +6322,6 @@ impl LspStore { } } - async fn on_lsp_workspace_edit( - this: WeakModel, - params: lsp::ApplyWorkspaceEditParams, - server_id: LanguageServerId, - adapter: Arc, - mut cx: AsyncAppContext, - ) -> Result { - let this = this - .upgrade() - .ok_or_else(|| anyhow!("project project closed"))?; - let language_server = this - .update(&mut cx, |this, _| this.language_server_for_id(server_id))? - .ok_or_else(|| anyhow!("language server not found"))?; - let transaction = Self::deserialize_workspace_edit( - this.clone(), - params.edit, - true, - adapter.clone(), - language_server.clone(), - &mut cx, - ) - .await - .log_err(); - this.update(&mut cx, |this, _| { - if let Some(transaction) = transaction { - this.as_local_mut() - .unwrap() - .last_workspace_edits_by_language_server - .insert(server_id, transaction); - } - })?; - Ok(lsp::ApplyWorkspaceEditResponse { - applied: true, - failed_change: None, - failure_reason: None, - }) - } - fn on_lsp_progress( &mut self, progress: lsp::ProgressParams, @@ -4743,160 +6494,6 @@ impl LspStore { }) } - fn on_lsp_did_change_watched_files( - &mut self, - language_server_id: LanguageServerId, - registration_id: &str, - params: DidChangeWatchedFilesRegistrationOptions, - cx: &mut ModelContext, - ) { - if let Some(local) = self.as_local_mut() { - let registrations = local - .language_server_watcher_registrations - .entry(language_server_id) - .or_default(); - - registrations.insert(registration_id.to_string(), params.watchers); - - self.rebuild_watched_paths(language_server_id, cx); - } - } - - fn on_lsp_unregister_did_change_watched_files( - &mut self, - language_server_id: LanguageServerId, - registration_id: &str, - cx: &mut ModelContext, - ) { - if let Some(local) = self.as_local_mut() { - let registrations = local - .language_server_watcher_registrations - .entry(language_server_id) - .or_default(); - - if registrations.remove(registration_id).is_some() { - log::info!( - "language server {}: unregistered workspace/DidChangeWatchedFiles capability with id {}", - language_server_id, - registration_id - ); - } else { - log::warn!( - "language server {}: failed to unregister workspace/DidChangeWatchedFiles capability with id {}. not registered.", - language_server_id, - registration_id - ); - } - - self.rebuild_watched_paths(language_server_id, cx); - } - } - - #[allow(clippy::type_complexity)] - pub(crate) fn edits_from_lsp( - &mut self, - buffer: &Model, - lsp_edits: impl 'static + Send + IntoIterator, - server_id: LanguageServerId, - version: Option, - cx: &mut ModelContext, - ) -> Task, String)>>> { - let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx); - cx.background_executor().spawn(async move { - let snapshot = snapshot?; - let mut lsp_edits = lsp_edits - .into_iter() - .map(|edit| (range_from_lsp(edit.range), edit.new_text)) - .collect::>(); - lsp_edits.sort_by_key(|(range, _)| range.start); - - let mut lsp_edits = lsp_edits.into_iter().peekable(); - let mut edits = Vec::new(); - while let Some((range, mut new_text)) = lsp_edits.next() { - // Clip invalid ranges provided by the language server. - let mut range = snapshot.clip_point_utf16(range.start, Bias::Left) - ..snapshot.clip_point_utf16(range.end, Bias::Left); - - // Combine any LSP edits that are adjacent. - // - // Also, combine LSP edits that are separated from each other by only - // a newline. This is important because for some code actions, - // Rust-analyzer rewrites the entire buffer via a series of edits that - // are separated by unchanged newline characters. - // - // In order for the diffing logic below to work properly, any edits that - // cancel each other out must be combined into one. - while let Some((next_range, next_text)) = lsp_edits.peek() { - if next_range.start.0 > range.end { - if next_range.start.0.row > range.end.row + 1 - || next_range.start.0.column > 0 - || snapshot.clip_point_utf16( - Unclipped(PointUtf16::new(range.end.row, u32::MAX)), - Bias::Left, - ) > range.end - { - break; - } - new_text.push('\n'); - } - range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left); - new_text.push_str(next_text); - lsp_edits.next(); - } - - // For multiline edits, perform a diff of the old and new text so that - // we can identify the changes more precisely, preserving the locations - // of any anchors positioned in the unchanged regions. - if range.end.row > range.start.row { - let mut offset = range.start.to_offset(&snapshot); - let old_text = snapshot.text_for_range(range).collect::(); - - let diff = TextDiff::from_lines(old_text.as_str(), &new_text); - let mut moved_since_edit = true; - for change in diff.iter_all_changes() { - let tag = change.tag(); - let value = change.value(); - match tag { - ChangeTag::Equal => { - offset += value.len(); - moved_since_edit = true; - } - ChangeTag::Delete => { - let start = snapshot.anchor_after(offset); - let end = snapshot.anchor_before(offset + value.len()); - if moved_since_edit { - edits.push((start..end, String::new())); - } else { - edits.last_mut().unwrap().0.end = end; - } - offset += value.len(); - moved_since_edit = false; - } - ChangeTag::Insert => { - if moved_since_edit { - let anchor = snapshot.anchor_after(offset); - edits.push((anchor..anchor, value.to_string())); - } else { - edits.last_mut().unwrap().1.push_str(value); - } - moved_since_edit = false; - } - } - } - } else if range.end == range.start { - let anchor = snapshot.anchor_after(range.start); - edits.push((anchor..anchor, new_text)); - } else { - let edit_start = snapshot.anchor_after(range.start); - let edit_end = snapshot.anchor_before(range.end); - edits.push((edit_start..edit_end, new_text)); - } - } - - Ok(edits) - }) - } - pub async fn handle_resolve_completion_documentation( this: Model, envelope: TypedEnvelope, @@ -5248,7 +6845,7 @@ impl LspStore { let apply_additional_edits = this.update(&mut cx, |this, cx| { this.apply_additional_edits_for_completion( buffer, - Completion { + Rc::new(RefCell::new(Box::new([Completion { old_range: completion.old_range, new_text: completion.new_text, lsp_completion: completion.lsp_completion, @@ -5260,7 +6857,9 @@ impl LspStore { filter_range: Default::default(), }, confirm: None, - }, + resolved: completion.resolved, + }]))), + 0, false, cx, ) @@ -5424,314 +7023,6 @@ impl LspStore { }) } - pub fn start_language_servers( - &mut self, - worktree: &Model, - language: LanguageName, - cx: &mut ModelContext, - ) { - let root_file = worktree - .update(cx, |tree, cx| tree.root_file(cx)) - .map(|f| f as _); - let settings = language_settings(Some(language.clone()), root_file.as_ref(), cx); - if !settings.enable_language_server || self.mode.is_remote() { - return; - } - - let available_lsp_adapters = self.languages.clone().lsp_adapters(&language); - let available_language_servers = available_lsp_adapters - .iter() - .map(|lsp_adapter| lsp_adapter.name.clone()) - .collect::>(); - - let desired_language_servers = - settings.customized_language_servers(&available_language_servers); - - let mut enabled_lsp_adapters: Vec> = Vec::new(); - for desired_language_server in desired_language_servers { - if let Some(adapter) = available_lsp_adapters - .iter() - .find(|adapter| adapter.name == desired_language_server) - { - enabled_lsp_adapters.push(adapter.clone()); - continue; - } - - if let Some(adapter) = self - .languages - .load_available_lsp_adapter(&desired_language_server) - { - self.languages - .register_lsp_adapter(language.clone(), adapter.adapter.clone()); - enabled_lsp_adapters.push(adapter); - continue; - } - - log::warn!( - "no language server found matching '{}'", - desired_language_server.0 - ); - } - - for adapter in &enabled_lsp_adapters { - self.start_language_server(worktree, adapter.clone(), language.clone(), cx); - } - - // After starting all the language servers, reorder them to reflect the desired order - // based on the settings. - // - // This is done, in part, to ensure that language servers loaded at different points - // (e.g., native vs extension) still end up in the right order at the end, rather than - // it being based on which language server happened to be loaded in first. - self.languages - .reorder_language_servers(&language, enabled_lsp_adapters); - } - - fn get_language_server_binary( - &self, - adapter: Arc, - delegate: Arc, - allow_binary_download: bool, - cx: &mut ModelContext, - ) -> Task> { - let settings = ProjectSettings::get( - Some(SettingsLocation { - worktree_id: delegate.worktree_id(), - path: Path::new(""), - }), - cx, - ) - .lsp - .get(&adapter.name) - .and_then(|s| s.binary.clone()); - - if settings.as_ref().is_some_and(|b| b.path.is_some()) { - let settings = settings.unwrap(); - return cx.spawn(|_, _| async move { - Ok(LanguageServerBinary { - path: PathBuf::from(&settings.path.unwrap()), - env: Some(delegate.shell_env().await), - arguments: settings - .arguments - .unwrap_or_default() - .iter() - .map(Into::into) - .collect(), - }) - }); - } - let lsp_binary_options = LanguageServerBinaryOptions { - allow_path_lookup: !settings - .as_ref() - .and_then(|b| b.ignore_system_version) - .unwrap_or_default(), - allow_binary_download, - }; - cx.spawn(|_, mut cx| async move { - let binary_result = adapter - .clone() - .get_language_server_command(delegate.clone(), lsp_binary_options, &mut cx) - .await; - - delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); - - let mut binary = binary_result?; - if let Some(arguments) = settings.and_then(|b| b.arguments) { - binary.arguments = arguments.into_iter().map(Into::into).collect(); - } - - let mut shell_env = delegate.shell_env().await; - shell_env.extend(binary.env.unwrap_or_default()); - binary.env = Some(shell_env); - Ok(binary) - }) - } - - fn start_language_server( - &mut self, - worktree_handle: &Model, - adapter: Arc, - language: LanguageName, - cx: &mut ModelContext, - ) { - if self.mode.is_remote() { - return; - } - - let worktree = worktree_handle.read(cx); - let worktree_id = worktree.id(); - let worktree_path = worktree.abs_path(); - let key = (worktree_id, adapter.name.clone()); - - if self.language_server_ids.contains_key(&key) { - return; - } - - let project_settings = ProjectSettings::get( - Some(SettingsLocation { - worktree_id, - path: Path::new(""), - }), - cx, - ); - let lsp = project_settings.lsp.get(&adapter.name); - let override_options = lsp.and_then(|s| s.initialization_options.clone()); - - let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx) - as Arc; - - let server_id = self.languages.next_language_server_id(); - let root_path = worktree_path.clone(); - log::info!( - "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", - adapter.name.0 - ); - - let binary = self.get_language_server_binary(adapter.clone(), delegate.clone(), true, cx); - - let pending_server = cx.spawn({ - let adapter = adapter.clone(); - let server_name = adapter.name.clone(); - let stderr_capture = stderr_capture.clone(); - - move |_lsp_store, cx| async move { - let binary = binary.await?; - - #[cfg(any(test, feature = "test-support"))] - if let Some(server) = _lsp_store - .update(&mut cx.clone(), |this, cx| { - this.languages.create_fake_language_server( - server_id, - &server_name, - binary.clone(), - cx.to_async(), - ) - }) - .ok() - .flatten() - { - return Ok(server); - } - - lsp::LanguageServer::new( - stderr_capture, - server_id, - server_name, - binary, - &root_path, - adapter.code_action_kinds(), - cx, - ) - } - }); - - let state = LanguageServerState::Starting({ - let server_name = adapter.name.0.clone(); - let delegate = delegate as Arc; - let language = language.clone(); - let key = key.clone(); - let adapter = adapter.clone(); - - cx.spawn(move |this, mut cx| async move { - let result = { - let delegate = delegate.clone(); - let adapter = adapter.clone(); - let this = this.clone(); - let toolchains = this - .update(&mut cx, |this, cx| this.toolchain_store(cx)) - .ok()?; - let mut cx = cx.clone(); - async move { - let language_server = pending_server.await?; - - let workspace_config = adapter - .adapter - .clone() - .workspace_configuration(&delegate, toolchains.clone(), &mut cx) - .await?; - - let mut initialization_options = adapter - .adapter - .clone() - .initialization_options(&(delegate)) - .await?; - - Self::setup_lsp_messages(this.clone(), &language_server, delegate, adapter); - - match (&mut initialization_options, override_options) { - (Some(initialization_options), Some(override_options)) => { - merge_json_value_into(override_options, initialization_options); - } - (None, override_options) => initialization_options = override_options, - _ => {} - } - - let language_server = cx - .update(|cx| language_server.initialize(initialization_options, cx))? - .await - .inspect_err(|_| { - if let Some(this) = this.upgrade() { - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) - }) - .ok(); - } - })?; - - language_server - .notify::( - lsp::DidChangeConfigurationParams { - settings: workspace_config, - }, - ) - .ok(); - - anyhow::Ok(language_server) - } - } - .await; - - match result { - Ok(server) => { - this.update(&mut cx, |this, mut cx| { - this.insert_newly_running_language_server( - language, - adapter, - server.clone(), - server_id, - key, - &mut cx, - ); - }) - .ok(); - stderr_capture.lock().take(); - Some(server) - } - - Err(err) => { - let log = stderr_capture.lock().take().unwrap_or_default(); - delegate.update_status( - adapter.name(), - LanguageServerBinaryStatus::Failed { - error: format!("{err}\n-- stderr--\n{}", log), - }, - ); - log::error!("Failed to start language server {server_name:?}: {err}"); - log::error!("server stderr: {:?}", log); - None - } - } - }) - }); - - self.as_local_mut() - .unwrap() - .language_servers - .insert(server_id, state); - self.language_server_ids.insert(key, server_id); - } - async fn shutdown_language_server( server_state: Option, name: LanguageServerName, @@ -5777,94 +7068,85 @@ impl LspStore { cx: &mut ModelContext, ) -> Task> { let key = (worktree_id, adapter_name); - if self.mode.is_local() { - if let Some(server_id) = self.language_server_ids.remove(&key) { - let name = key.1; - log::info!("stopping language server {name}"); - - // Remove other entries for this language server as well - let mut orphaned_worktrees = vec![worktree_id]; - let other_keys = self.language_server_ids.keys().cloned().collect::>(); - for other_key in other_keys { - if self.language_server_ids.get(&other_key) == Some(&server_id) { - self.language_server_ids.remove(&other_key); - orphaned_worktrees.push(other_key.0); - } - } - - self.buffer_store.update(cx, |buffer_store, cx| { - for buffer in buffer_store.buffers() { - buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics( - server_id, - DiagnosticSet::new([], buffer), - cx, - ); - buffer.set_completion_triggers(server_id, Default::default(), cx); - }); - } - }); - - for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { - summaries.retain(|path, summaries_by_server_id| { - if summaries_by_server_id.remove(&server_id).is_some() { - if let Some((client, project_id)) = self.downstream_client.clone() { - client - .send(proto::UpdateDiagnosticSummary { - project_id, - worktree_id: worktree_id.to_proto(), - summary: Some(proto::DiagnosticSummary { - path: path.to_string_lossy().to_string(), - language_server_id: server_id.0 as u64, - error_count: 0, - warning_count: 0, - }), - }) - .log_err(); - } - !summaries_by_server_id.is_empty() - } else { - true - } - }); - } - - for diagnostics in self.diagnostics.values_mut() { - diagnostics.retain(|_, diagnostics_by_server_id| { - if let Ok(ix) = - diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) - { - diagnostics_by_server_id.remove(ix); - !diagnostics_by_server_id.is_empty() - } else { - true - } - }); - } - - self.as_local_mut() - .unwrap() - .language_server_watched_paths - .remove(&server_id); - self.language_server_statuses.remove(&server_id); - cx.notify(); - - let server_state = self - .as_local_mut() - .unwrap() - .language_servers - .remove(&server_id); - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); - cx.spawn(move |_, cx| async move { - Self::shutdown_language_server(server_state, name, cx).await; - orphaned_worktrees - }) - } else { - Task::ready(Vec::new()) + let local = match &mut self.mode { + LspStoreMode::Local(local) => local, + _ => { + return Task::ready(Vec::new()); + } + }; + let Some(server_id) = local.language_server_ids.remove(&key) else { + return Task::ready(Vec::new()); + }; + let name = key.1; + log::info!("stopping language server {name}"); + + // Remove other entries for this language server as well + let mut orphaned_worktrees = vec![worktree_id]; + let other_keys = local + .language_server_ids + .keys() + .cloned() + .collect::>(); + for other_key in other_keys { + if local.language_server_ids.get(&other_key) == Some(&server_id) { + local.language_server_ids.remove(&other_key); + orphaned_worktrees.push(other_key.0); } - } else { - Task::ready(Vec::new()) } + + self.buffer_store.update(cx, |buffer_store, cx| { + for buffer in buffer_store.buffers() { + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics(server_id, DiagnosticSet::new([], buffer), cx); + buffer.set_completion_triggers(server_id, Default::default(), cx); + }); + } + }); + + for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { + summaries.retain(|path, summaries_by_server_id| { + if summaries_by_server_id.remove(&server_id).is_some() { + if let Some((client, project_id)) = self.downstream_client.clone() { + client + .send(proto::UpdateDiagnosticSummary { + project_id, + worktree_id: worktree_id.to_proto(), + summary: Some(proto::DiagnosticSummary { + path: path.to_string_lossy().to_string(), + language_server_id: server_id.0 as u64, + error_count: 0, + warning_count: 0, + }), + }) + .log_err(); + } + !summaries_by_server_id.is_empty() + } else { + true + } + }); + } + + self.language_server_statuses.remove(&server_id); + let local = self.as_local_mut().unwrap(); + for diagnostics in local.diagnostics.values_mut() { + diagnostics.retain(|_, diagnostics_by_server_id| { + if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { + diagnostics_by_server_id.remove(ix); + !diagnostics_by_server_id.is_empty() + } else { + true + } + }); + } + local.language_server_watched_paths.remove(&server_id); + let server_state = local.language_servers.remove(&server_id); + cx.notify(); + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); + cx.spawn(move |_, cx| async move { + Self::shutdown_language_server(server_state, name, cx).await; + orphaned_worktrees + }) } pub fn restart_language_servers_for_buffers( @@ -5938,17 +7220,20 @@ impl LspStore { } this.update(&mut cx, |this, cx| { + let local = this.as_local_mut().unwrap(); // Restart the language server for the given worktree. - this.start_language_servers(&worktree, language.clone(), cx); + // + local.start_language_servers(&worktree, language.clone(), cx); // Lookup new server ids and set them for each of the orphaned worktrees for (affected_worktree_id, language_server_name) in affected_worktrees { - if let Some(new_server_id) = this + if let Some(new_server_id) = local .language_server_ids .get(&(worktree_id, language_server_name.clone())) .cloned() { - this.language_server_ids + local + .language_server_ids .insert((affected_worktree_id, language_server_name), new_server_id); } } @@ -5958,493 +7243,6 @@ impl LspStore { .detach(); } - fn setup_lsp_messages( - this: WeakModel, - language_server: &LanguageServer, - delegate: Arc, - adapter: Arc, - ) { - let name = language_server.name(); - let server_id = language_server.server_id(); - language_server - .on_notification::({ - let adapter = adapter.clone(); - let this = this.clone(); - move |mut params, mut cx| { - let adapter = adapter.clone(); - if let Some(this) = this.upgrade() { - adapter.process_diagnostics(&mut params); - this.update(&mut cx, |this, cx| { - this.update_diagnostics( - server_id, - params, - &adapter.disk_based_diagnostic_sources, - cx, - ) - .log_err(); - }) - .ok(); - } - } - }) - .detach(); - language_server - .on_request::({ - let adapter = adapter.adapter.clone(); - let delegate = delegate.clone(); - let this = this.clone(); - move |params, mut cx| { - let adapter = adapter.clone(); - let delegate = delegate.clone(); - let this = this.clone(); - async move { - let toolchains = - this.update(&mut cx, |this, cx| this.toolchain_store(cx))?; - let workspace_config = adapter - .workspace_configuration(&delegate, toolchains, &mut cx) - .await?; - Ok(params - .items - .into_iter() - .map(|item| { - if let Some(section) = &item.section { - workspace_config - .get(section) - .cloned() - .unwrap_or(serde_json::Value::Null) - } else { - workspace_config.clone() - } - }) - .collect()) - } - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.clone(); - move |_, mut cx| { - let this = this.clone(); - async move { - let Some(server) = - this.update(&mut cx, |this, _| this.language_server_for_id(server_id))? - else { - return Ok(None); - }; - let root = server.root_path(); - let Ok(uri) = Url::from_file_path(&root) else { - return Ok(None); - }; - Ok(Some(vec![WorkspaceFolder { - uri, - name: Default::default(), - }])) - } - } - }) - .detach(); - // Even though we don't have handling for these requests, respond to them to - // avoid stalling any language server like `gopls` which waits for a response - // to these requests when initializing. - language_server - .on_request::({ - let this = this.clone(); - move |params, mut cx| { - let this = this.clone(); - async move { - this.update(&mut cx, |this, _| { - if let Some(status) = this.language_server_statuses.get_mut(&server_id) - { - if let lsp::NumberOrString::String(token) = params.token { - status.progress_tokens.insert(token); - } - } - })?; - - Ok(()) - } - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.clone(); - move |params, mut cx| { - let this = this.clone(); - async move { - for reg in params.registrations { - match reg.method.as_str() { - "workspace/didChangeWatchedFiles" => { - if let Some(options) = reg.register_options { - let options = serde_json::from_value(options)?; - this.update(&mut cx, |this, cx| { - this.on_lsp_did_change_watched_files( - server_id, ®.id, options, cx, - ); - })?; - } - } - "textDocument/rangeFormatting" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::< - lsp::DocumentRangeFormattingOptions, - >( - options - ) - }) - .transpose()?; - let provider = match options { - None => OneOf::Left(true), - Some(options) => OneOf::Right(options), - }; - server.update_capabilities(|capabilities| { - capabilities.document_range_formatting_provider = - Some(provider); - }) - } - anyhow::Ok(()) - })??; - } - "textDocument/onTypeFormatting" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::< - lsp::DocumentOnTypeFormattingOptions, - >( - options - ) - }) - .transpose()?; - if let Some(options) = options { - server.update_capabilities(|capabilities| { - capabilities - .document_on_type_formatting_provider = - Some(options); - }) - } - } - anyhow::Ok(()) - })??; - } - "textDocument/formatting" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::< - lsp::DocumentFormattingOptions, - >( - options - ) - }) - .transpose()?; - let provider = match options { - None => OneOf::Left(true), - Some(options) => OneOf::Right(options), - }; - server.update_capabilities(|capabilities| { - capabilities.document_formatting_provider = - Some(provider); - }) - } - anyhow::Ok(()) - })??; - } - _ => log::warn!("unhandled capability registration: {reg:?}"), - } - } - Ok(()) - } - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.clone(); - move |params, mut cx| { - let this = this.clone(); - async move { - for unreg in params.unregisterations.iter() { - match unreg.method.as_str() { - "workspace/didChangeWatchedFiles" => { - this.update(&mut cx, |this, cx| { - this.on_lsp_unregister_did_change_watched_files( - server_id, &unreg.id, cx, - ); - })?; - } - "textDocument/rename" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.rename_provider = None - }) - } - })?; - } - "textDocument/rangeFormatting" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.document_range_formatting_provider = - None - }) - } - })?; - } - "textDocument/onTypeFormatting" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.document_on_type_formatting_provider = - None; - }) - } - })?; - } - "textDocument/formatting" => { - this.update(&mut cx, |this, _| { - if let Some(server) = this.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.document_formatting_provider = None; - }) - } - })?; - } - _ => log::warn!("unhandled capability unregistration: {unreg:?}"), - } - } - Ok(()) - } - } - }) - .detach(); - - language_server - .on_request::({ - let adapter = adapter.clone(); - let this = this.clone(); - move |params, cx| { - Self::on_lsp_workspace_edit( - this.clone(), - params, - server_id, - adapter.clone(), - cx, - ) - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.clone(); - move |(), mut cx| { - let this = this.clone(); - async move { - this.update(&mut cx, |this, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints); - this.downstream_client.as_ref().map(|(client, project_id)| { - client.send(proto::RefreshInlayHints { - project_id: *project_id, - }) - }) - })? - .transpose()?; - Ok(()) - } - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.clone(); - let name = name.to_string(); - move |params, mut cx| { - let this = this.clone(); - let name = name.to_string(); - async move { - let actions = params.actions.unwrap_or_default(); - let (tx, mut rx) = smol::channel::bounded(1); - let request = LanguageServerPromptRequest { - level: match params.typ { - lsp::MessageType::ERROR => PromptLevel::Critical, - lsp::MessageType::WARNING => PromptLevel::Warning, - _ => PromptLevel::Info, - }, - message: params.message, - actions, - response_channel: tx, - lsp_name: name.clone(), - }; - - let did_update = this - .update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerPrompt(request)); - }) - .is_ok(); - if did_update { - let response = rx.next().await; - - Ok(response) - } else { - Ok(None) - } - } - } - }) - .detach(); - - language_server - .on_notification::({ - let this = this.clone(); - let name = name.to_string(); - move |params, mut cx| { - let this = this.clone(); - let name = name.to_string(); - if let Some(ref message) = params.message { - let message = message.trim(); - if !message.is_empty() { - let formatted_message = format!( - "Language server {name} (id {server_id}) status update: {message}" - ); - match params.health { - ServerHealthStatus::Ok => log::info!("{}", formatted_message), - ServerHealthStatus::Warning => log::warn!("{}", formatted_message), - ServerHealthStatus::Error => { - log::error!("{}", formatted_message); - let (tx, _rx) = smol::channel::bounded(1); - let request = LanguageServerPromptRequest { - level: PromptLevel::Critical, - message: params.message.unwrap_or_default(), - actions: Vec::new(), - response_channel: tx, - lsp_name: name.clone(), - }; - let _ = this - .update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerPrompt(request)); - }) - .ok(); - } - ServerHealthStatus::Other(status) => { - log::info!( - "Unknown server health: {status}\n{formatted_message}" - ) - } - } - } - } - } - }) - .detach(); - language_server - .on_notification::({ - let this = this.clone(); - let name = name.to_string(); - move |params, mut cx| { - let this = this.clone(); - let name = name.to_string(); - - let (tx, _) = smol::channel::bounded(1); - let request = LanguageServerPromptRequest { - level: match params.typ { - lsp::MessageType::ERROR => PromptLevel::Critical, - lsp::MessageType::WARNING => PromptLevel::Warning, - _ => PromptLevel::Info, - }, - message: params.message, - actions: vec![], - response_channel: tx, - lsp_name: name.clone(), - }; - - let _ = this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerPrompt(request)); - }); - } - }) - .detach(); - - let disk_based_diagnostics_progress_token = - adapter.disk_based_diagnostics_progress_token.clone(); - - language_server - .on_notification::({ - let this = this.clone(); - move |params, mut cx| { - if let Some(this) = this.upgrade() { - this.update(&mut cx, |this, cx| { - this.on_lsp_progress( - params, - server_id, - disk_based_diagnostics_progress_token.clone(), - cx, - ); - }) - .ok(); - } - } - }) - .detach(); - - language_server - .on_notification::({ - let this = this.clone(); - move |params, mut cx| { - if let Some(this) = this.upgrade() { - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerLog( - server_id, - LanguageServerLogType::Log(params.typ), - params.message, - )); - }) - .ok(); - } - } - }) - .detach(); - - language_server - .on_notification::({ - let this = this.clone(); - move |params, mut cx| { - if let Some(this) = this.upgrade() { - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerLog( - server_id, - LanguageServerLogType::Trace(params.verbose), - params.message, - )); - }) - .ok(); - } - } - }) - .detach(); - } - pub fn update_diagnostics( &mut self, language_server_id: LanguageServerId, @@ -6452,6 +7250,9 @@ impl LspStore { disk_based_sources: &[String], cx: &mut ModelContext, ) -> Result<()> { + if !self.mode.is_local() { + anyhow::bail!("called update_diagnostics on remote"); + } let abs_path = params .uri .to_file_path() @@ -6494,7 +7295,7 @@ impl LspStore { (diagnostic.severity, is_unnecessary), ); } else { - let group_id = post_inc(&mut self.next_diagnostic_group_id); + let group_id = post_inc(&mut self.as_local_mut().unwrap().next_diagnostic_group_id); let is_disk_based = source.map_or(false, |source| disk_based_sources.contains(source)); @@ -6576,9 +7377,12 @@ impl LspStore { key: (WorktreeId, LanguageServerName), cx: &mut ModelContext, ) { + let Some(local) = self.as_local_mut() else { + return; + }; // If the language server for this key doesn't match the server id, don't store the // server. Which will cause it to be dropped, killing the process - if self + if local .language_server_ids .get(&key) .map(|id| id != &server_id) @@ -6589,16 +7393,31 @@ impl LspStore { // Update language_servers collection with Running variant of LanguageServerState // indicating that the server is up and running and ready - if let Some(local) = self.as_local_mut() { - local.language_servers.insert( - server_id, - LanguageServerState::Running { - adapter: adapter.clone(), - language: language.clone(), - server: language_server.clone(), - simulate_disk_based_diagnostics_completion: None, - }, - ); + local.language_servers.insert( + server_id, + LanguageServerState::Running { + adapter: adapter.clone(), + language: language.clone(), + server: language_server.clone(), + simulate_disk_based_diagnostics_completion: None, + }, + ); + if let Some(file_ops_caps) = language_server + .capabilities() + .workspace + .as_ref() + .and_then(|ws| ws.file_operations.as_ref()) + { + let did_rename_caps = file_ops_caps.did_rename.as_ref(); + let will_rename_caps = file_ops_caps.will_rename.as_ref(); + if did_rename_caps.or(will_rename_caps).is_some() { + let watcher = RenamePathsWatchedForServer::default() + .with_did_rename_patterns(did_rename_caps) + .with_will_rename_patterns(will_rename_caps); + local + .language_server_paths_watched_for_rename + .insert(server_id, watcher); + } } self.language_server_statuses.insert( @@ -6616,6 +7435,7 @@ impl LspStore { language_server.name(), Some(key.0), )); + cx.emit(LspStoreEvent::RefreshInlayHints); if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { downstream_client @@ -6631,7 +7451,7 @@ impl LspStore { } // Tell the language server about every open buffer in the worktree that matches the language. - self.buffer_store.update(cx, |buffer_store, cx| { + self.buffer_store.clone().update(cx, |buffer_store, cx| { for buffer_handle in buffer_store.buffers() { let buffer = buffer_handle.read(cx); let file = match File::from_dyn(buffer.file()) { @@ -6652,40 +7472,44 @@ impl LspStore { { continue; } - + // didOpen let file = match file.as_local() { Some(file) => file, None => continue, }; - let versions = self - .buffer_snapshots - .entry(buffer.remote_id()) - .or_default() - .entry(server_id) - .or_insert_with(|| { - vec![LspBufferSnapshot { - version: 0, - snapshot: buffer.text_snapshot(), - }] - }); + let local = self.as_local_mut().unwrap(); - let snapshot = versions.last().unwrap(); - let version = snapshot.version; - let initial_snapshot = &snapshot.snapshot; - let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - language_server - .notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri, - adapter.language_id(&language.name()), - version, - initial_snapshot.text(), - ), - }, - ) - .log_err(); + if local.registered_buffers.contains_key(&buffer.remote_id()) { + let versions = local + .buffer_snapshots + .entry(buffer.remote_id()) + .or_default() + .entry(server_id) + .or_insert_with(|| { + vec![LspBufferSnapshot { + version: 0, + snapshot: buffer.text_snapshot(), + }] + }); + + let snapshot = versions.last().unwrap(); + let version = snapshot.version; + let initial_snapshot = &snapshot.snapshot; + let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); + language_server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri, + adapter.language_id(&language.name()), + version, + initial_snapshot.text(), + ), + }, + ) + .log_err(); + } buffer_handle.update(cx, |buffer, cx| { buffer.set_completion_triggers( @@ -6710,39 +7534,6 @@ impl LspStore { cx.notify(); } - fn buffer_snapshot_for_lsp_version( - &mut self, - buffer: &Model, - server_id: LanguageServerId, - version: Option, - cx: &AppContext, - ) -> Result { - const OLD_VERSIONS_TO_RETAIN: i32 = 10; - - if let Some(version) = version { - let buffer_id = buffer.read(cx).remote_id(); - let snapshots = self - .buffer_snapshots - .get_mut(&buffer_id) - .and_then(|m| m.get_mut(&server_id)) - .ok_or_else(|| { - anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}") - })?; - - let found_snapshot = snapshots - .binary_search_by_key(&version, |e| e.version) - .map(|ix| snapshots[ix].snapshot.clone()) - .map_err(|_| { - anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}") - })?; - - snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version); - Ok(found_snapshot) - } else { - Ok((buffer.read(cx)).text_snapshot()) - } - } - pub fn language_servers_running_disk_based_diagnostics( &self, ) -> impl Iterator + '_ { @@ -6757,23 +7548,6 @@ impl LspStore { }) } - pub(crate) fn language_servers_for_buffer<'a>( - &'a self, - buffer: &'a Buffer, - cx: &'a AppContext, - ) -> impl Iterator, &'a Arc)> { - self.language_server_ids_for_buffer(buffer, cx) - .into_iter() - .filter_map( - |server_id| match self.as_local()?.language_servers.get(&server_id)? { - LanguageServerState::Running { - adapter, server, .. - } => Some((adapter, server)), - _ => None, - }, - ) - } - pub(crate) fn cancel_language_server_work_for_buffers( &mut self, buffers: impl IntoIterator>, @@ -6794,11 +7568,12 @@ impl LspStore { cx.background_executor() .spawn(request) .detach_and_log_err(cx); - } else { + } else if let Some(local) = self.as_local() { let servers = buffers .into_iter() .flat_map(|buffer| { - self.language_server_ids_for_buffer(buffer.read(cx), cx) + local + .language_server_ids_for_buffer(buffer.read(cx), cx) .into_iter() }) .collect::>(); @@ -6865,16 +7640,6 @@ impl LspStore { } } - pub fn language_servers( - &self, - ) -> impl '_ + Iterator { - self.language_server_ids - .iter() - .map(|((worktree_id, server_name), server_id)| { - (*server_id, server_name.clone(), *worktree_id) - }) - } - fn register_supplementary_language_server( &mut self, id: LanguageServerId, @@ -6941,7 +7706,7 @@ impl LspStore { }); let worktree_id = worktree_handle.read(cx).id(); - let mut language_server_ids = self + let mut language_server_ids = local .language_server_ids .iter() .filter_map(|((server_worktree_id, _), server_id)| { @@ -6959,7 +7724,7 @@ impl LspStore { if let Some(watched_paths) = local .language_server_watched_paths .get(server_id) - .and_then(|paths| paths.read(cx).worktree_paths.get(&worktree_id)) + .and_then(|paths| paths.worktree_paths.get(&worktree_id)) { let params = lsp::DidChangeWatchedFilesParams { changes: changes @@ -7002,281 +7767,6 @@ impl LspStore { }) } - pub(crate) fn language_server_ids_for_buffer( - &self, - buffer: &Buffer, - cx: &AppContext, - ) -> Vec { - if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { - let worktree_id = file.worktree_id(cx); - self.languages - .lsp_adapters(&language.name()) - .iter() - .flat_map(|adapter| { - let key = (worktree_id, adapter.name.clone()); - self.language_server_ids.get(&key).copied() - }) - .collect() - } else { - Vec::new() - } - } - - pub async fn deserialize_text_edits( - this: Model, - buffer_to_edit: Model, - edits: Vec, - push_to_history: bool, - _: Arc, - language_server: Arc, - cx: &mut AsyncAppContext, - ) -> Result> { - let edits = this - .update(cx, |this, cx| { - this.edits_from_lsp( - &buffer_to_edit, - edits, - language_server.server_id(), - None, - cx, - ) - })? - .await?; - - let transaction = buffer_to_edit.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(); - buffer.start_transaction(); - for (range, text) in edits { - buffer.edit([(range, text)], None, cx); - } - - if buffer.end_transaction(cx).is_some() { - let transaction = buffer.finalize_last_transaction().unwrap().clone(); - if !push_to_history { - buffer.forget_transaction(transaction.id); - } - Some(transaction) - } else { - None - } - })?; - - Ok(transaction) - } - - pub async fn deserialize_workspace_edit( - this: Model, - edit: lsp::WorkspaceEdit, - push_to_history: bool, - lsp_adapter: Arc, - language_server: Arc, - cx: &mut AsyncAppContext, - ) -> Result { - let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone())?; - - let mut operations = Vec::new(); - if let Some(document_changes) = edit.document_changes { - match document_changes { - lsp::DocumentChanges::Edits(edits) => { - operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)) - } - lsp::DocumentChanges::Operations(ops) => operations = ops, - } - } else if let Some(changes) = edit.changes { - operations.extend(changes.into_iter().map(|(uri, edits)| { - lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { - text_document: lsp::OptionalVersionedTextDocumentIdentifier { - uri, - version: None, - }, - edits: edits.into_iter().map(Edit::Plain).collect(), - }) - })); - } - - let mut project_transaction = ProjectTransaction::default(); - for operation in operations { - match operation { - lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => { - let abs_path = op - .uri - .to_file_path() - .map_err(|_| anyhow!("can't convert URI to path"))?; - - if let Some(parent_path) = abs_path.parent() { - fs.create_dir(parent_path).await?; - } - if abs_path.ends_with("/") { - fs.create_dir(&abs_path).await?; - } else { - fs.create_file( - &abs_path, - op.options - .map(|options| fs::CreateOptions { - overwrite: options.overwrite.unwrap_or(false), - ignore_if_exists: options.ignore_if_exists.unwrap_or(false), - }) - .unwrap_or_default(), - ) - .await?; - } - } - - lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => { - let source_abs_path = op - .old_uri - .to_file_path() - .map_err(|_| anyhow!("can't convert URI to path"))?; - let target_abs_path = op - .new_uri - .to_file_path() - .map_err(|_| anyhow!("can't convert URI to path"))?; - fs.rename( - &source_abs_path, - &target_abs_path, - op.options - .map(|options| fs::RenameOptions { - overwrite: options.overwrite.unwrap_or(false), - ignore_if_exists: options.ignore_if_exists.unwrap_or(false), - }) - .unwrap_or_default(), - ) - .await?; - } - - lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => { - let abs_path = op - .uri - .to_file_path() - .map_err(|_| anyhow!("can't convert URI to path"))?; - let options = op - .options - .map(|options| fs::RemoveOptions { - recursive: options.recursive.unwrap_or(false), - ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false), - }) - .unwrap_or_default(); - if abs_path.ends_with("/") { - fs.remove_dir(&abs_path, options).await?; - } else { - fs.remove_file(&abs_path, options).await?; - } - } - - lsp::DocumentChangeOperation::Edit(op) => { - let buffer_to_edit = this - .update(cx, |this, cx| { - this.open_local_buffer_via_lsp( - op.text_document.uri.clone(), - language_server.server_id(), - lsp_adapter.name.clone(), - cx, - ) - })? - .await?; - - let edits = this - .update(cx, |this, cx| { - let path = buffer_to_edit.read(cx).project_path(cx); - let active_entry = this.active_entry; - let is_active_entry = path.clone().map_or(false, |project_path| { - this.worktree_store - .read(cx) - .entry_for_path(&project_path, cx) - .map_or(false, |entry| Some(entry.id) == active_entry) - }); - - let (mut edits, mut snippet_edits) = (vec![], vec![]); - for edit in op.edits { - match edit { - Edit::Plain(edit) => edits.push(edit), - Edit::Annotated(edit) => edits.push(edit.text_edit), - Edit::Snippet(edit) => { - let Ok(snippet) = Snippet::parse(&edit.snippet.value) - else { - continue; - }; - - if is_active_entry { - snippet_edits.push((edit.range, snippet)); - } else { - // Since this buffer is not focused, apply a normal edit. - edits.push(TextEdit { - range: edit.range, - new_text: snippet.text, - }); - } - } - } - } - if !snippet_edits.is_empty() { - let buffer_id = buffer_to_edit.read(cx).remote_id(); - let version = if let Some(buffer_version) = op.text_document.version - { - this.buffer_snapshot_for_lsp_version( - &buffer_to_edit, - language_server.server_id(), - Some(buffer_version), - cx, - ) - .ok() - .map(|snapshot| snapshot.version) - } else { - Some(buffer_to_edit.read(cx).saved_version().clone()) - }; - - let most_recent_edit = version.and_then(|version| { - version.iter().max_by_key(|timestamp| timestamp.value) - }); - // Check if the edit that triggered that edit has been made by this participant. - - if let Some(most_recent_edit) = most_recent_edit { - cx.emit(LspStoreEvent::SnippetEdit { - buffer_id, - edits: snippet_edits, - most_recent_edit, - }); - } - } - - this.edits_from_lsp( - &buffer_to_edit, - edits, - language_server.server_id(), - op.text_document.version, - cx, - ) - })? - .await?; - - let transaction = buffer_to_edit.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(); - buffer.start_transaction(); - for (range, text) in edits { - buffer.edit([(range, text)], None, cx); - } - let transaction = if buffer.end_transaction(cx).is_some() { - let transaction = buffer.finalize_last_transaction().unwrap().clone(); - if !push_to_history { - buffer.forget_transaction(transaction.id); - } - Some(transaction) - } else { - None - }; - - transaction - })?; - if let Some(transaction) = transaction { - project_transaction.0.insert(buffer_to_edit, transaction); - } - } - } - } - - Ok(project_transaction) - } - fn serialize_symbol(symbol: &Symbol) -> proto::Symbol { proto::Symbol { language_server_name: symbol.language_server_name.0.to_string(), @@ -7334,6 +7824,7 @@ impl LspStore { new_text: completion.new_text.clone(), server_id: completion.server_id.0 as u64, lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(), + resolved: completion.resolved, } } @@ -7353,6 +7844,7 @@ impl LspStore { new_text: completion.new_text, server_id: LanguageServerId(completion.server_id as usize), lsp_completion, + resolved: completion.resolved, }) } @@ -7454,6 +7946,7 @@ async fn populate_labels_for_completions( documentation, lsp_completion, confirm: None, + resolved: false, }) } } @@ -7464,6 +7957,84 @@ pub enum LanguageServerToQuery { Other(LanguageServerId), } +#[derive(Default)] +struct RenamePathsWatchedForServer { + did_rename: Vec, + will_rename: Vec, +} + +impl RenamePathsWatchedForServer { + fn with_did_rename_patterns( + mut self, + did_rename: Option<&FileOperationRegistrationOptions>, + ) -> Self { + if let Some(did_rename) = did_rename { + self.did_rename = did_rename + .filters + .iter() + .filter_map(|filter| filter.try_into().log_err()) + .collect(); + } + self + } + fn with_will_rename_patterns( + mut self, + will_rename: Option<&FileOperationRegistrationOptions>, + ) -> Self { + if let Some(will_rename) = will_rename { + self.will_rename = will_rename + .filters + .iter() + .filter_map(|filter| filter.try_into().log_err()) + .collect(); + } + self + } + + fn should_send_did_rename(&self, path: &str, is_dir: bool) -> bool { + self.did_rename.iter().any(|pred| pred.eval(path, is_dir)) + } + fn should_send_will_rename(&self, path: &str, is_dir: bool) -> bool { + self.will_rename.iter().any(|pred| pred.eval(path, is_dir)) + } +} + +impl TryFrom<&FileOperationFilter> for RenameActionPredicate { + type Error = globset::Error; + fn try_from(ops: &FileOperationFilter) -> Result { + Ok(Self { + kind: ops.pattern.matches.clone(), + glob: GlobBuilder::new(&ops.pattern.glob) + .case_insensitive( + ops.pattern + .options + .as_ref() + .map_or(false, |ops| ops.ignore_case.unwrap_or(false)), + ) + .build()? + .compile_matcher(), + }) + } +} +struct RenameActionPredicate { + glob: GlobMatcher, + kind: Option, +} + +impl RenameActionPredicate { + // Returns true if language server should be notified + fn eval(&self, path: &str, is_dir: bool) -> bool { + self.kind.as_ref().map_or(true, |kind| { + let expected_kind = if is_dir { + FileOperationPatternKind::Folder + } else { + FileOperationPatternKind::File + }; + kind == &expected_kind + }) && self.glob.is_match(path) + } +} + #[derive(Default)] struct LanguageServerWatchedPaths { worktree_paths: HashMap, @@ -7488,78 +8059,65 @@ impl LanguageServerWatchedPathsBuilder { fs: Arc, language_server_id: LanguageServerId, cx: &mut ModelContext, - ) -> Model { + ) -> LanguageServerWatchedPaths { let project = cx.weak_model(); - cx.new_model(|cx| { - let this_id = cx.entity_id(); - const LSP_ABS_PATH_OBSERVE: Duration = Duration::from_millis(100); - let abs_paths = self - .abs_paths - .into_iter() - .map(|(abs_path, globset)| { - let task = cx.spawn({ - let abs_path = abs_path.clone(); - let fs = fs.clone(); + const LSP_ABS_PATH_OBSERVE: Duration = Duration::from_millis(100); + let abs_paths = self + .abs_paths + .into_iter() + .map(|(abs_path, globset)| { + let task = cx.spawn({ + let abs_path = abs_path.clone(); + let fs = fs.clone(); - let lsp_store = project.clone(); - |_, mut cx| async move { - maybe!(async move { - let mut push_updates = - fs.watch(&abs_path, LSP_ABS_PATH_OBSERVE).await; - while let Some(update) = push_updates.0.next().await { - let action = lsp_store - .update(&mut cx, |this, cx| { - let Some(local) = this.as_local() else { - return ControlFlow::Break(()); - }; - let Some(watcher) = local - .language_server_watched_paths - .get(&language_server_id) - else { - return ControlFlow::Break(()); - }; - if watcher.entity_id() != this_id { - // This watcher is no longer registered on the project, which means that we should - // cease operations. - return ControlFlow::Break(()); - } - let (globs, _) = watcher - .read(cx) - .abs_paths - .get(&abs_path) - .expect( - "Watched abs path is not registered with a watcher", - ); - let matching_entries = update - .into_iter() - .filter(|event| globs.is_match(&event.path)) - .collect::>(); - this.lsp_notify_abs_paths_changed( - language_server_id, - matching_entries, - ); - ControlFlow::Continue(()) - }) - .ok()?; + let lsp_store = project.clone(); + |_, mut cx| async move { + maybe!(async move { + let mut push_updates = fs.watch(&abs_path, LSP_ABS_PATH_OBSERVE).await; + while let Some(update) = push_updates.0.next().await { + let action = lsp_store + .update(&mut cx, |this, _| { + let Some(local) = this.as_local() else { + return ControlFlow::Break(()); + }; + let Some(watcher) = local + .language_server_watched_paths + .get(&language_server_id) + else { + return ControlFlow::Break(()); + }; + let (globs, _) = watcher.abs_paths.get(&abs_path).expect( + "Watched abs path is not registered with a watcher", + ); + let matching_entries = update + .into_iter() + .filter(|event| globs.is_match(&event.path)) + .collect::>(); + this.lsp_notify_abs_paths_changed( + language_server_id, + matching_entries, + ); + ControlFlow::Continue(()) + }) + .ok()?; - if action.is_break() { - break; - } - } - Some(()) - }) - .await; + if action.is_break() { + break; + } } - }); - (abs_path, (globset, task)) - }) - .collect(); - LanguageServerWatchedPaths { - worktree_paths: self.worktree_paths, - abs_paths, - } + Some(()) + }) + .await; + } + }); + (abs_path, (globset, task)) }) + .collect(); + LanguageServerWatchedPaths { + worktree_paths: self.worktree_paths, + abs_paths, + } } } @@ -7787,6 +8345,7 @@ impl LspAdapter for SshLspAdapter { async fn check_if_user_installed( &self, _: &dyn LspAdapterDelegate, + _: Arc, _: &AsyncAppContext, ) -> Option { Some(self.binary.clone()) @@ -7843,22 +8402,10 @@ pub struct LocalLspAdapterDelegate { } impl LocalLspAdapterDelegate { - fn for_local( - lsp_store: &LspStore, - worktree: &Model, - cx: &mut ModelContext, - ) -> Arc { - let local = lsp_store - .as_local() - .expect("LocalLspAdapterDelegate cannot be constructed on a remote"); - - let http_client = local.http_client.clone(); - - Self::new(lsp_store, worktree, http_client, local.fs.clone(), cx) - } - pub fn new( - lsp_store: &LspStore, + language_registry: Arc, + environment: &Model, + lsp_store: WeakModel, worktree: &Model, http_client: Arc, fs: Arc, @@ -7866,22 +8413,16 @@ impl LocalLspAdapterDelegate { ) -> Arc { let worktree_id = worktree.read(cx).id(); let worktree_abs_path = worktree.read(cx).abs_path(); - let load_shell_env_task = if let Some(environment) = - &lsp_store.as_local().map(|local| local.environment.clone()) - { - environment.update(cx, |env, cx| { - env.get_environment(Some(worktree_id), Some(worktree_abs_path), cx) - }) - } else { - Task::ready(None).shared() - }; + let load_shell_env_task = environment.update(cx, |env, cx| { + env.get_environment(Some(worktree_id), Some(worktree_abs_path), cx) + }); Arc::new(Self { - lsp_store: cx.weak_model(), + lsp_store, worktree: worktree.read(cx).snapshot(), fs, http_client, - language_registry: lsp_store.languages.clone(), + language_registry, load_shell_env_task, }) } @@ -7935,7 +8476,7 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { }; let env = self.shell_env().await; - let output = smol::process::Command::new(&npm) + let output = util::command::new_smol_command(&npm) .args(["root", "-g"]) .envs(env) .current_dir(local_package_directory) @@ -7969,7 +8510,7 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { let working_dir = self.worktree_root_path(); - let output = smol::process::Command::new(&command.path) + let output = util::command::new_smol_command(&command.path) .args(command.arguments) .envs(command.env.clone().unwrap_or_default()) .current_dir(working_dir) diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index c7ac0ffd0b..e707f9e9bc 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -36,6 +36,7 @@ pub struct PrettierStore { worktree_store: Model, default_prettier: DefaultPrettier, prettiers_per_worktree: HashMap>>, + prettier_ignores_per_worktree: HashMap>, prettier_instances: HashMap, } @@ -65,11 +66,13 @@ impl PrettierStore { worktree_store, default_prettier: DefaultPrettier::default(), prettiers_per_worktree: HashMap::default(), + prettier_ignores_per_worktree: HashMap::default(), prettier_instances: HashMap::default(), } } pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { + self.prettier_ignores_per_worktree.remove(&id_to_remove); let mut prettier_instances_to_clean = FuturesUnordered::new(); if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) { for path in prettier_paths.iter().flatten() { @@ -211,6 +214,65 @@ impl PrettierStore { } } + fn prettier_ignore_for_buffer( + &mut self, + buffer: &Model, + cx: &mut ModelContext, + ) -> Task> { + let buffer = buffer.read(cx); + let buffer_file = buffer.file(); + if buffer.language().is_none() { + return Task::ready(None); + } + match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) { + Some((worktree_id, buffer_path)) => { + let fs = Arc::clone(&self.fs); + let prettier_ignores = self + .prettier_ignores_per_worktree + .get(&worktree_id) + .cloned() + .unwrap_or_default(); + cx.spawn(|lsp_store, mut cx| async move { + match cx + .background_executor() + .spawn(async move { + Prettier::locate_prettier_ignore( + fs.as_ref(), + &prettier_ignores, + &buffer_path, + ) + .await + }) + .await + { + Ok(ControlFlow::Break(())) => None, + Ok(ControlFlow::Continue(None)) => None, + Ok(ControlFlow::Continue(Some(ignore_dir))) => { + log::debug!("Found prettier ignore in {ignore_dir:?}"); + lsp_store + .update(&mut cx, |store, _| { + store + .prettier_ignores_per_worktree + .entry(worktree_id) + .or_default() + .insert(ignore_dir.clone()); + }) + .ok(); + Some(ignore_dir) + } + Err(e) => { + log::error!( + "Failed to determine prettier ignore path for buffer: {e:#}" + ); + None + } + } + }) + } + None => Task::ready(None), + } + } + fn start_prettier( node: NodeRuntime, prettier_dir: PathBuf, @@ -654,6 +716,13 @@ pub(super) async fn format_with_prettier( .ok()? .await; + let ignore_dir = prettier_store + .update(cx, |prettier_store, cx| { + prettier_store.prettier_ignore_for_buffer(buffer, cx) + }) + .ok()? + .await; + let (prettier_path, prettier_task) = prettier_instance?; let prettier_description = match prettier_path.as_ref() { @@ -671,7 +740,7 @@ pub(super) async fn format_with_prettier( .flatten(); let format_result = prettier - .format(buffer, buffer_path, cx) + .format(buffer, buffer_path, ignore_dir, cx) .await .map(crate::lsp_store::FormatOperation::Prettier) .with_context(|| format!("{} failed to format buffer", prettier_description)); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 17f84a6f37..d7ffd6421e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -25,7 +25,7 @@ pub mod search_history; mod yarn; use anyhow::{anyhow, Context as _, Result}; -use buffer_store::{BufferStore, BufferStoreEvent}; +use buffer_store::{BufferChangeSet, BufferStore, BufferStoreEvent}; use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore}; use clock::ReplicaId; use collections::{BTreeSet, HashMap, HashSet}; @@ -39,7 +39,10 @@ use futures::{ pub use image_store::{ImageItem, ImageStore}; use image_store::{ImageItemEvent, ImageStoreEvent}; -use git::{blame::Blame, repository::GitRepository}; +use git::{ + blame::Blame, + repository::{GitFileStatus, GitRepository}, +}; use gpui::{ AnyModel, AppContext, AsyncAppContext, BorrowAppContext, Context as _, EventEmitter, Hsla, Model, ModelContext, SharedString, Task, WeakModel, WindowContext, @@ -47,17 +50,17 @@ use gpui::{ use itertools::Itertools; use language::{ language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent, - CachedLspAdapter, Capability, CodeLabel, DiagnosticEntry, Documentation, File as _, Language, - LanguageName, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, - Transaction, Unclipped, + CachedLspAdapter, Capability, CodeLabel, Documentation, File as _, Language, LanguageName, + LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, Transaction, + Unclipped, }; use lsp::{ - CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer, LanguageServerId, - LanguageServerName, MessageActionItem, + CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer, + LanguageServerId, LanguageServerName, MessageActionItem, }; use lsp_command::*; use node_runtime::NodeRuntime; -use parking_lot::{Mutex, RwLock}; +use parking_lot::Mutex; pub use prettier_store::PrettierStore; use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; use remote::{SshConnectionOptions, SshRemoteClient}; @@ -95,9 +98,8 @@ pub use task_inventory::{ BasicContextProvider, ContextProviderWithTasks, Inventory, TaskSourceKind, }; pub use worktree::{ - Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry, - UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, - FS_WATCH_LATENCY, + Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet, + UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY, }; pub use buffer_store::ProjectTransaction; @@ -111,7 +113,7 @@ const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; const MAX_SEARCH_RESULT_FILES: usize = 5_000; const MAX_SEARCH_RESULT_RANGES: usize = 10_000; -pub trait Item { +pub trait ProjectItem { fn try_open( project: &Model, path: &ProjectPath, @@ -121,6 +123,7 @@ pub trait Item { Self: Sized; fn entry_id(&self, cx: &AppContext) -> Option; fn project_path(&self, cx: &AppContext) -> Option; + fn is_dirty(&self) -> bool; } #[derive(Clone)] @@ -239,11 +242,11 @@ pub enum Event { LanguageNotFound(Model), ActiveEntryChanged(Option), ActivateProjectPanel, - WorktreeAdded, + WorktreeAdded(WorktreeId), WorktreeOrderChanged, WorktreeRemoved(WorktreeId), WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet), - WorktreeUpdatedGitRepositories, + WorktreeUpdatedGitRepositories(WorktreeId), DiskBasedDiagnosticsStarted { language_server_id: LanguageServerId, }, @@ -258,7 +261,7 @@ pub enum Event { DisconnectedFromHost, DisconnectedFromSshRemote, Closed, - DeletedEntry(ProjectEntryId), + DeletedEntry(WorktreeId, ProjectEntryId), CollaboratorUpdated { old_peer_id: proto::PeerId, new_peer_id: proto::PeerId, @@ -350,6 +353,8 @@ pub struct Completion { pub documentation: Option, /// The raw completion provided by the language server. pub lsp_completion: lsp::CompletionItem, + /// Whether this completion has been resolved, to ensure it happens once per completion. + pub resolved: bool, /// An optional callback to invoke when this completion is confirmed. /// Returns, whether new completions should be retriggered after the current one. /// If `true` is returned, the editor will show a new completion menu after this completion is confirmed. @@ -377,6 +382,7 @@ pub(crate) struct CoreCompletion { new_text: String, server_id: LanguageServerId, lsp_completion: lsp::CompletionItem, + resolved: bool, } /// A code action provided by a language server. @@ -582,6 +588,8 @@ impl Project { client.add_model_request_handler(Self::handle_open_new_buffer); client.add_model_message_handler(Self::handle_create_buffer_for_peer); + client.add_model_request_handler(WorktreeStore::handle_rename_project_entry); + WorktreeStore::init(&client); BufferStore::init(&client); LspStore::init(&client); @@ -1133,7 +1141,7 @@ impl Project { let fs = Arc::new(RealFs::default()); let languages = LanguageRegistry::test(cx.background_executor().clone()); - let clock = Arc::new(FakeSystemClock::default()); + let clock = Arc::new(FakeSystemClock::new()); let http_client = http_client::FakeHttpClient::with_404_response(); let client = cx .update(|cx| client::Client::new(clock, http_client.clone(), cx)) @@ -1179,7 +1187,7 @@ impl Project { use gpui::Context; let languages = LanguageRegistry::test(cx.executor()); - let clock = Arc::new(FakeSystemClock::default()); + let clock = Arc::new(FakeSystemClock::new()); let http_client = http_client::FakeHttpClient::with_404_response(); let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx)); let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); @@ -1202,13 +1210,6 @@ impl Project { .await .unwrap(); - project.update(cx, |project, cx| { - let tree_id = tree.read(cx).id(); - project.environment.update(cx, |environment, _| { - environment.set_cached(&[(tree_id, HashMap::default())]) - }); - }); - tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete()) .await; } @@ -1251,6 +1252,10 @@ impl Project { self.buffer_store.read(cx).buffers().collect() } + pub fn environment(&self) -> &Model { + &self.environment + } + pub fn cli_environment(&self, cx: &AppContext) -> Option> { self.environment.read(cx).get_cli_environment() } @@ -1430,6 +1435,15 @@ impl Project { .unwrap_or(false) } + pub fn project_path_git_status( + &self, + project_path: &ProjectPath, + cx: &AppContext, + ) -> Option { + self.worktree_for_id(project_path.worktree_id, cx) + .and_then(|worktree| worktree.read(cx).status_for_file(&project_path.path)) + } + pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option { paths .iter() @@ -1488,11 +1502,45 @@ impl Project { new_path: impl Into>, cx: &mut ModelContext, ) -> Task> { - let Some(worktree) = self.worktree_for_entry(entry_id, cx) else { + let worktree_store = self.worktree_store.read(cx); + let new_path = new_path.into(); + let Some((worktree, old_path, is_dir)) = worktree_store + .worktree_and_entry_for_id(entry_id, cx) + .map(|(worktree, entry)| (worktree, entry.path.clone(), entry.is_dir())) + else { return Task::ready(Err(anyhow!(format!("No worktree for entry {entry_id:?}")))); }; - worktree.update(cx, |worktree, cx| { - worktree.rename_entry(entry_id, new_path, cx) + + let worktree_id = worktree.read(cx).id(); + + let lsp_store = self.lsp_store().downgrade(); + cx.spawn(|_, mut cx| async move { + let (old_abs_path, new_abs_path) = { + let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?; + (root_path.join(&old_path), root_path.join(&new_path)) + }; + LspStore::will_rename_entry( + lsp_store.clone(), + worktree_id, + &old_abs_path, + &new_abs_path, + is_dir, + cx.clone(), + ) + .await; + + let entry = worktree + .update(&mut cx, |worktree, cx| { + worktree.rename_entry(entry_id, new_path.clone(), cx) + })? + .await?; + + lsp_store + .update(&mut cx, |this, _| { + this.did_rename_entry(worktree_id, &old_abs_path, &new_abs_path, is_dir); + }) + .ok(); + Ok(entry) }) } @@ -1503,6 +1551,7 @@ impl Project { cx: &mut ModelContext, ) -> Option>> { let worktree = self.worktree_for_entry(entry_id, cx)?; + cx.emit(Event::DeletedEntry(worktree.read(cx).id(), entry_id)); worktree.update(cx, |worktree, cx| { worktree.delete_entry(entry_id, trash, cx) }) @@ -1805,6 +1854,19 @@ impl Project { } } + #[cfg(any(test, feature = "test-support"))] + pub fn open_local_buffer_with_lsp( + &mut self, + abs_path: impl AsRef, + cx: &mut ModelContext, + ) -> Task, lsp_store::OpenLspBufferHandle)>> { + if let Some((worktree, relative_path)) = self.find_worktree(abs_path.as_ref(), cx) { + self.open_buffer_with_lsp((worktree.read(cx).id(), relative_path), cx) + } else { + Task::ready(Err(anyhow!("no such path"))) + } + } + pub fn open_buffer( &mut self, path: impl Into, @@ -1819,6 +1881,37 @@ impl Project { }) } + #[cfg(any(test, feature = "test-support"))] + pub fn open_buffer_with_lsp( + &mut self, + path: impl Into, + cx: &mut ModelContext, + ) -> Task, lsp_store::OpenLspBufferHandle)>> { + let buffer = self.open_buffer(path, cx); + let lsp_store = self.lsp_store().clone(); + cx.spawn(|_, mut cx| async move { + let buffer = buffer.await?; + let handle = lsp_store.update(&mut cx, |lsp_store, cx| { + lsp_store.register_buffer_with_language_servers(&buffer, cx) + })?; + Ok((buffer, handle)) + }) + } + + pub fn open_unstaged_changes( + &mut self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task>> { + if self.is_disconnected(cx) { + return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); + } + + self.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.open_unstaged_changes(buffer, cx) + }) + } + pub fn open_buffer_by_id( &mut self, id: BufferId, @@ -2203,7 +2296,7 @@ impl Project { match event { WorktreeStoreEvent::WorktreeAdded(worktree) => { self.on_worktree_added(worktree, cx); - cx.emit(Event::WorktreeAdded); + cx.emit(Event::WorktreeAdded(worktree.read(cx).id())); } WorktreeStoreEvent::WorktreeRemoved(_, id) => { cx.emit(Event::WorktreeRemoved(*id)); @@ -2224,23 +2317,25 @@ impl Project { } } cx.observe(worktree, |_, _, cx| cx.notify()).detach(); - cx.subscribe(worktree, |project, worktree, event, cx| match event { - worktree::Event::UpdatedEntries(changes) => { - cx.emit(Event::WorktreeUpdatedEntries( - worktree.read(cx).id(), - changes.clone(), - )); + cx.subscribe(worktree, |project, worktree, event, cx| { + let worktree_id = worktree.update(cx, |worktree, _| worktree.id()); + match event { + worktree::Event::UpdatedEntries(changes) => { + cx.emit(Event::WorktreeUpdatedEntries( + worktree.read(cx).id(), + changes.clone(), + )); - let worktree_id = worktree.update(cx, |worktree, _| worktree.id()); - project - .client() - .telemetry() - .report_discovered_project_events(worktree_id, changes); + project + .client() + .telemetry() + .report_discovered_project_events(worktree_id, changes); + } + worktree::Event::UpdatedGitRepositories(_) => { + cx.emit(Event::WorktreeUpdatedGitRepositories(worktree_id)); + } + worktree::Event::DeletedEntry(id) => cx.emit(Event::DeletedEntry(worktree_id, *id)), } - worktree::Event::UpdatedGitRepositories(_) => { - cx.emit(Event::WorktreeUpdatedGitRepositories); - } - worktree::Event::DeletedEntry(id) => cx.emit(Event::DeletedEntry(*id)), }) .detach(); cx.notify(); @@ -2265,10 +2360,7 @@ impl Project { event: &BufferEvent, cx: &mut ModelContext, ) -> Option<()> { - if matches!( - event, - BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged - ) { + if matches!(event, BufferEvent::Edited { .. } | BufferEvent::Reloaded) { self.request_buffer_diff_recalculation(&buffer, cx); } @@ -2365,34 +2457,32 @@ impl Project { } fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext) -> Task<()> { - let buffers = self.buffers_needing_diff.drain().collect::>(); cx.spawn(move |this, mut cx| async move { - let tasks: Vec<_> = buffers - .iter() - .filter_map(|buffer| { - let buffer = buffer.upgrade()?; - buffer - .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) - .ok() - .flatten() - }) - .collect(); - - futures::future::join_all(tasks).await; - - this.update(&mut cx, |this, cx| { - if this.buffers_needing_diff.is_empty() { - // TODO: Would a `ModelContext.notify()` suffice here? - for buffer in buffers { - if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, |_, cx| cx.notify()); + loop { + let task = this + .update(&mut cx, |this, cx| { + let buffers = this + .buffers_needing_diff + .drain() + .filter_map(|buffer| buffer.upgrade()) + .collect::>(); + if buffers.is_empty() { + None + } else { + Some(this.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.recalculate_buffer_diffs(buffers, cx) + })) } - } + }) + .ok() + .flatten(); + + if let Some(task) = task { + task.await; } else { - this.recalculate_buffer_diffs(cx).detach(); + break; } - }) - .ok(); + } }) } @@ -2457,13 +2547,26 @@ impl Project { .read(cx) .list_toolchains(worktree_id, language_name, cx) }) - .unwrap_or(Task::Ready(None)) + .ok()? .await }) } else { Task::ready(None) } } + + pub async fn toolchain_term( + languages: Arc, + language_name: LanguageName, + ) -> Option { + languages + .language_for_name(&language_name.0) + .await + .ok()? + .toolchain_lister() + .map(|lister| lister.term()) + } + pub fn activate_toolchain( &self, worktree_id: WorktreeId, @@ -2506,31 +2609,6 @@ impl Project { .update(cx, |store, _| store.reset_last_formatting_failure()); } - pub fn update_diagnostics( - &mut self, - language_server_id: LanguageServerId, - params: lsp::PublishDiagnosticsParams, - disk_based_sources: &[String], - cx: &mut ModelContext, - ) -> Result<()> { - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.update_diagnostics(language_server_id, params, disk_based_sources, cx) - }) - } - - pub fn update_diagnostic_entries( - &mut self, - server_id: LanguageServerId, - abs_path: PathBuf, - version: Option, - diagnostics: Vec>>, - cx: &mut ModelContext, - ) -> Result<(), anyhow::Error> { - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.update_diagnostic_entries(server_id, abs_path, version, diagnostics, cx) - }) - } - pub fn reload_buffers( &self, buffers: HashSet>, @@ -2797,45 +2875,17 @@ impl Project { }) } - pub fn resolve_completions( - &self, - buffer: Model, - completion_indices: Vec, - completions: Arc>>, - cx: &mut ModelContext, - ) -> Task> { - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.resolve_completions(buffer, completion_indices, completions, cx) - }) - } - - pub fn apply_additional_edits_for_completion( - &self, - buffer_handle: Model, - completion: Completion, - push_to_history: bool, - cx: &mut ModelContext, - ) -> Task>> { - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.apply_additional_edits_for_completion( - buffer_handle, - completion, - push_to_history, - cx, - ) - }) - } - pub fn code_actions( &mut self, buffer_handle: &Model, range: Range, + kinds: Option>, cx: &mut ModelContext, ) -> Task>> { let buffer = buffer_handle.read(cx); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.code_actions(buffer_handle, range, cx) + lsp_store.code_actions(buffer_handle, range, kinds, cx) }) } @@ -3383,12 +3433,9 @@ impl Project { } pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary { - let mut summary = DiagnosticSummary::default(); - for (_, _, path_summary) in self.diagnostic_summaries(include_ignored, cx) { - summary.error_count += path_summary.error_count; - summary.warning_count += path_summary.warning_count; - } - summary + self.lsp_store + .read(cx) + .diagnostic_summary(include_ignored, cx) } pub fn diagnostic_summaries<'a>( @@ -3480,17 +3527,6 @@ impl Project { ) } - pub fn get_repo( - &self, - project_path: &ProjectPath, - cx: &AppContext, - ) -> Option> { - self.worktree_for_id(project_path.worktree_id, cx)? - .read(cx) - .as_local()? - .local_git_repo(&project_path.path) - } - pub fn get_first_worktree_root_repo(&self, cx: &AppContext) -> Option> { let worktree = self.visible_worktrees(cx).next()?.read(cx).as_local()?; let root_entry = worktree.root_git_entry()?; @@ -4100,13 +4136,6 @@ impl Project { Ok(()) } - pub fn language_servers<'a>( - &'a self, - cx: &'a AppContext, - ) -> impl 'a + Iterator { - self.lsp_store.read(cx).language_servers() - } - pub fn supplementary_language_servers<'a>( &'a self, cx: &'a AppContext, @@ -4114,22 +4143,18 @@ impl Project { self.lsp_store.read(cx).supplementary_language_servers() } - pub fn language_server_for_id( - &self, - id: LanguageServerId, - cx: &AppContext, - ) -> Option> { - self.lsp_store.read(cx).language_server_for_id(id) - } - - pub fn language_servers_for_buffer<'a>( + pub fn language_servers_for_local_buffer<'a>( &'a self, buffer: &'a Buffer, cx: &'a AppContext, ) -> impl Iterator, &'a Arc)> { self.lsp_store .read(cx) - .language_servers_for_buffer(buffer, cx) + .language_servers_for_local_buffer(buffer, cx) + } + + pub fn buffer_store(&self) -> &Model { + &self.buffer_store } } @@ -4340,7 +4365,7 @@ impl ResolvedPath { } } -impl Item for Buffer { +impl ProjectItem for Buffer { fn try_open( project: &Model, path: &ProjectPath, @@ -4359,6 +4384,10 @@ impl Item for Buffer { path: file.path().clone(), }) } + + fn is_dirty(&self) -> bool { + self.is_dirty() + } } impl Completion { @@ -4389,8 +4418,10 @@ impl Completion { } } -pub fn sort_worktree_entries(entries: &mut [Entry]) { +pub fn sort_worktree_entries(entries: &mut [impl AsRef]) { entries.sort_by(|entry_a, entry_b| { + let entry_a = entry_a.as_ref(); + let entry_b = entry_b.as_ref(); compare_paths( (&entry_a.path, entry_a.is_file()), (&entry_b.path, entry_b.is_file()), diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index bd0d7cc884..74b0db4e82 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -85,6 +85,9 @@ pub struct GitSettings { /// /// Default: tracked_files pub git_gutter: Option, + /// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter. + /// + /// Default: null pub gutter_debounce: Option, /// Whether or not to show git blame data inline in /// the currently focused line. diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 490d2e67b5..da2179eec6 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1,19 +1,25 @@ use crate::{Event, *}; use fs::FakeFs; use futures::{future, StreamExt}; +use git::diff::assert_hunks; use gpui::{AppContext, SemanticVersion, UpdateGlobal}; use http_client::Url; use language::{ language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent}, - tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter, - LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, + tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet, + DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, + OffsetRangeExt, Point, ToPoint, +}; +use lsp::{ + notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter, + NumberOrString, TextDocumentEdit, WillRenameFiles, }; -use lsp::{DiagnosticSeverity, NumberOrString}; use parking_lot::Mutex; -use pretty_assertions::assert_eq; +use pretty_assertions::{assert_eq, assert_matches}; use serde_json::json; #[cfg(not(windows))] use std::os; +use std::{str::FromStr, sync::OnceLock}; use std::{mem, num::NonZeroU32, ops::Range, task::Poll}; use task::{ResolvedTask, TaskContext}; @@ -436,17 +442,17 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ); // Open a buffer without an associated language server. - let toml_buffer = project + let (toml_buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer("/the-root/Cargo.toml", cx) + project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx) }) .await .unwrap(); // Open a buffer with an associated language server before the language for it has been loaded. - let rust_buffer = project + let (rust_buffer, _handle2) = project .update(cx, |project, cx| { - project.open_local_buffer("/the-root/test.rs", cx) + project.open_local_buffer_with_lsp("/the-root/test.rs", cx) }) .await .unwrap(); @@ -507,9 +513,9 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ); // Open a third buffer with a different associated language server. - let json_buffer = project + let (json_buffer, _json_handle) = project .update(cx, |project, cx| { - project.open_local_buffer("/the-root/package.json", cx) + project.open_local_buffer_with_lsp("/the-root/package.json", cx) }) .await .unwrap(); @@ -544,9 +550,9 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // When opening another buffer whose language server is already running, // it is also configured based on the existing language server's capabilities. - let rust_buffer2 = project + let (rust_buffer2, _handle4) = project .update(cx, |project, cx| { - project.open_local_buffer("/the-root/test2.rs", cx) + project.open_local_buffer_with_lsp("/the-root/test2.rs", cx) }) .await .unwrap(); @@ -759,7 +765,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ); // Close notifications are reported only to servers matching the buffer's language. - cx.update(|_| drop(json_buffer)); + cx.update(|_| drop(_json_handle)); let close_message = lsp::DidCloseTextDocumentParams { text_document: lsp::TextDocumentIdentifier::new( lsp::Url::from_file_path("/the-root/package.json").unwrap(), @@ -821,9 +827,9 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon cx.executor().run_until_parked(); // Start the language server by opening a buffer with a compatible file extension. - let _buffer = project + let _ = project .update(cx, |project, cx| { - project.open_local_buffer("/the-root/src/a.rs", cx) + project.open_local_buffer_with_lsp("/the-root/src/a.rs", cx) }) .await .unwrap(); @@ -982,6 +988,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .await; let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let buffer_a = project .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) @@ -992,8 +999,8 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .await .unwrap(); - project.update(cx, |project, cx| { - project + lsp_store.update(cx, |lsp_store, cx| { + lsp_store .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { @@ -1010,7 +1017,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { cx, ) .unwrap(); - project + lsp_store .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { @@ -1081,6 +1088,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { .await; let project = Project::test(fs, ["/root/dir".as_ref()], cx).await; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/root/dir", true, cx) @@ -1098,8 +1106,8 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { let other_worktree_id = worktree.update(cx, |tree, _| tree.id()); let server_id = LanguageServerId(0); - project.update(cx, |project, cx| { - project + lsp_store.update(cx, |lsp_store, cx| { + lsp_store .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { @@ -1116,7 +1124,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { cx, ) .unwrap(); - project + lsp_store .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { @@ -1231,8 +1239,10 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id()); // Cause worktree to start the fake language server - let _buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx)) + let _ = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/b.rs", cx) + }) .await .unwrap(); @@ -1251,6 +1261,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { fake_server .start_progress(format!("{}/0", progress_token)) .await; + assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints); assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsStarted { @@ -1357,8 +1368,10 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id()); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/a.rs", cx) + }) .await .unwrap(); @@ -1382,6 +1395,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC Some(worktree_id) ) ); + assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints); fake_server.start_progress(progress_token).await; assert_eq!( events.next().await.unwrap(), @@ -1430,8 +1444,10 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default()); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + let (buffer, _) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/a.rs", cx) + }) .await .unwrap(); @@ -1509,8 +1525,10 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default()); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/a.rs", cx) + }) .await .unwrap(); @@ -1557,8 +1575,10 @@ async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) { }, ); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/a.rs", cx) + }) .await .unwrap(); @@ -1626,11 +1646,15 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { language_registry.add(js_lang()); let _rs_buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/a.rs", cx) + }) .await .unwrap(); let _js_buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx)) + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/b.js", cx) + }) .await .unwrap(); @@ -1726,6 +1750,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { fs.insert_tree("/dir", json!({ "a.rs": text })).await; let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -1742,6 +1767,10 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { .await .unwrap(); + let _handle = lsp_store.update(cx, |lsp_store, cx| { + lsp_store.register_buffer_with_language_servers(&buffer, cx) + }); + let mut fake_server = fake_servers.next().await.unwrap(); let open_notification = fake_server .receive_notification::() @@ -2013,9 +2042,9 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { project.update(cx, |project, cx| { project.lsp_store.update(cx, |lsp_store, cx| { lsp_store - .update_buffer_diagnostics( - &buffer, + .update_diagnostic_entries( LanguageServerId(0), + PathBuf::from("/dir/a.rs"), None, vec![ DiagnosticEntry { @@ -2073,9 +2102,10 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC .await; let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone()); - project.update(cx, |project, cx| { - project + lsp_store.update(cx, |lsp_store, cx| { + lsp_store .update_diagnostic_entries( LanguageServerId(0), Path::new("/dir/a.rs").to_owned(), @@ -2092,7 +2122,7 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC cx, ) .unwrap(); - project + lsp_store .update_diagnostic_entries( LanguageServerId(1), Path::new("/dir/a.rs").to_owned(), @@ -2111,7 +2141,7 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC .unwrap(); assert_eq!( - project.diagnostic_summary(false, cx), + lsp_store.diagnostic_summary(false, cx), DiagnosticSummary { error_count: 2, warning_count: 0, @@ -2153,8 +2183,10 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) { language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default()); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/a.rs", cx) + }) .await .unwrap(); @@ -2213,7 +2245,7 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) { let edits = lsp_store .update(cx, |lsp_store, cx| { - lsp_store.edits_from_lsp( + lsp_store.as_local_mut().unwrap().edits_from_lsp( &buffer, vec![ // replace body of first function @@ -2308,7 +2340,7 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAp // Rust-analyzer does this when performing a merge-imports code action. let edits = lsp_store .update(cx, |lsp_store, cx| { - lsp_store.edits_from_lsp( + lsp_store.as_local_mut().unwrap().edits_from_lsp( &buffer, [ // Replace the first use statement without editing the semicolon. @@ -2417,7 +2449,7 @@ async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) { // with ranges sometimes being inverted or pointing to invalid locations. let edits = lsp_store .update(cx, |lsp_store, cx| { - lsp_store.edits_from_lsp( + lsp_store.as_local_mut().unwrap().edits_from_lsp( &buffer, [ lsp::TextEdit { @@ -2524,8 +2556,10 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default()); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx)) + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/b.rs", cx) + }) .await .unwrap(); @@ -2629,8 +2663,8 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) { }, ); - let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) .await .unwrap(); @@ -2721,8 +2755,8 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) { }, ); - let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) .await .unwrap(); @@ -2784,15 +2818,17 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { }, ); - let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) .await .unwrap(); let fake_server = fake_language_servers.next().await.unwrap(); // Language server returns code actions that contain commands, and not edits. - let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx)); + let actions = project.update(cx, |project, cx| { + project.code_actions(&buffer, 0..0, None, cx) + }); fake_server .handle_request::(|_, _| async move { Ok(Some(vec![ @@ -3239,10 +3275,22 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { Path::new("b/c/file5") ); - assert!(!buffer2.read(cx).file().unwrap().is_deleted()); - assert!(!buffer3.read(cx).file().unwrap().is_deleted()); - assert!(!buffer4.read(cx).file().unwrap().is_deleted()); - assert!(buffer5.read(cx).file().unwrap().is_deleted()); + assert_matches!( + buffer2.read(cx).file().unwrap().disk_state(), + DiskState::Present { .. } + ); + assert_matches!( + buffer3.read(cx).file().unwrap().disk_state(), + DiskState::Present { .. } + ); + assert_matches!( + buffer4.read(cx).file().unwrap().disk_state(), + DiskState::Present { .. } + ); + assert_eq!( + buffer5.read(cx).file().unwrap().disk_state(), + DiskState::Deleted + ); }); // Update the remote worktree. Check that it becomes consistent with the @@ -3416,7 +3464,11 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { ] ); events.lock().clear(); - buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx); + buffer.did_save( + buffer.version(), + buffer.file().unwrap().disk_state().mtime(), + cx, + ); }); // after saving, the buffer is not dirty, and emits a saved event. @@ -3675,6 +3727,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { .await; let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let buffer = project .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx)) .await @@ -3768,9 +3821,9 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { version: None, }; - project - .update(cx, |p, cx| { - p.update_diagnostics(LanguageServerId(0), message, &[], cx) + lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx) }) .unwrap(); let buffer = buffer.update(cx, |buffer, _| buffer.snapshot()); @@ -3896,6 +3949,135 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/dir", + json!({ + "one.rs": "const ONE: usize = 1;", + "two": { + "two.rs": "const TWO: usize = one::ONE + one::ONE;" + } + + }), + ) + .await; + let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let watched_paths = lsp::FileOperationRegistrationOptions { + filters: vec![ + FileOperationFilter { + scheme: Some("file".to_owned()), + pattern: lsp::FileOperationPattern { + glob: "**/*.rs".to_owned(), + matches: Some(lsp::FileOperationPatternKind::File), + options: None, + }, + }, + FileOperationFilter { + scheme: Some("file".to_owned()), + pattern: lsp::FileOperationPattern { + glob: "**/**".to_owned(), + matches: Some(lsp::FileOperationPatternKind::Folder), + options: None, + }, + }, + ], + }; + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + workspace: Some(lsp::WorkspaceServerCapabilities { + workspace_folders: None, + file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities { + did_rename: Some(watched_paths.clone()), + will_rename: Some(watched_paths), + ..Default::default() + }), + }), + ..Default::default() + }, + ..Default::default() + }, + ); + + let _ = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp("/dir/one.rs", cx) + }) + .await + .unwrap(); + + let fake_server = fake_servers.next().await.unwrap(); + let response = project.update(cx, |project, cx| { + let worktree = project.worktrees(cx).next().unwrap(); + let entry = worktree.read(cx).entry_for_path("one.rs").unwrap(); + project.rename_entry(entry.id, "three.rs".as_ref(), cx) + }); + let expected_edit = lsp::WorkspaceEdit { + changes: None, + document_changes: Some(DocumentChanges::Edits({ + vec![TextDocumentEdit { + edits: vec![lsp::Edit::Plain(lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 1, + }, + end: lsp::Position { + line: 0, + character: 3, + }, + }, + new_text: "This is not a drill".to_owned(), + })], + text_document: lsp::OptionalVersionedTextDocumentIdentifier { + uri: Url::from_str("file:///dir/two/two.rs").unwrap(), + version: Some(1337), + }, + }] + })), + change_annotations: None, + }; + let resolved_workspace_edit = Arc::new(OnceLock::new()); + fake_server + .handle_request::({ + let resolved_workspace_edit = resolved_workspace_edit.clone(); + let expected_edit = expected_edit.clone(); + move |params, _| { + let resolved_workspace_edit = resolved_workspace_edit.clone(); + let expected_edit = expected_edit.clone(); + async move { + assert_eq!(params.files.len(), 1); + assert_eq!(params.files[0].old_uri, "file:///dir/one.rs"); + assert_eq!(params.files[0].new_uri, "file:///dir/three.rs"); + resolved_workspace_edit.set(expected_edit.clone()).unwrap(); + Ok(Some(expected_edit)) + } + } + }) + .next() + .await + .unwrap(); + let _ = response.await.unwrap(); + fake_server + .handle_notification::(|params, _| { + assert_eq!(params.files.len(), 1); + assert_eq!(params.files[0].old_uri, "file:///dir/one.rs"); + assert_eq!(params.files[0].new_uri, "file:///dir/three.rs"); + }) + .next() + .await + .unwrap(); + assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit)); +} + #[gpui::test] async fn test_rename(cx: &mut gpui::TestAppContext) { // hi @@ -3929,9 +4111,9 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { }, ); - let buffer = project + let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer("/dir/one.rs", cx) + project.open_local_buffer_with_lsp("/dir/one.rs", cx) }) .await .unwrap(); @@ -4794,8 +4976,8 @@ async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) { ), ]; - let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx)) + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx)) .await .unwrap(); cx.executor().run_until_parked(); @@ -4903,8 +5085,8 @@ async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) { }, ); - let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx)) + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) .await .unwrap(); cx.executor().run_until_parked(); @@ -4945,6 +5127,84 @@ async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/dir", + json!({ + "a.ts": "a", + }), + ) + .await; + + let project = Project::test(fs, ["/dir".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(typescript_lang()); + let mut fake_language_servers = language_registry.register_fake_lsp( + "TypeScript", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)), + ..lsp::ServerCapabilities::default() + }, + ..FakeLspAdapter::default() + }, + ); + + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let fake_server = fake_language_servers + .next() + .await + .expect("failed to get the language server"); + + let mut request_handled = fake_server.handle_request::( + move |_, _| async move { + Ok(Some(vec![ + lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction { + title: "organize imports".to_string(), + kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS), + ..lsp::CodeAction::default() + }), + lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction { + title: "fix code".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + ..lsp::CodeAction::default() + }), + ])) + }, + ); + + let code_actions_task = project.update(cx, |project, cx| { + project.code_actions( + &buffer, + 0..buffer.read(cx).len(), + Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]), + cx, + ) + }); + + let () = request_handled + .next() + .await + .expect("The code action request should have been triggered"); + + let code_actions = code_actions_task.await.unwrap(); + assert_eq!(code_actions.len(), 1); + assert_eq!( + code_actions[0].lsp_action.kind, + Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS) + ); +} + #[gpui::test] async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -5016,8 +5276,8 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { ), ]; - let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx)) + let (buffer, _handle) = project + .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx)) .await .unwrap(); cx.executor().run_until_parked(); @@ -5076,7 +5336,7 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { } let code_actions_task = project.update(cx, |project, cx| { - project.code_actions(&buffer, 0..buffer.read(cx).len(), cx) + project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx) }); // cx.run_until_parked(); @@ -5300,6 +5560,98 @@ async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let staged_contents = r#" + fn main() { + println!("hello world"); + } + "# + .unindent(); + let file_contents = r#" + // print goodbye + fn main() { + println!("goodbye world"); + } + "# + .unindent(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/dir", + json!({ + ".git": {}, + "src": { + "main.rs": file_contents, + } + }), + ) + .await; + + fs.set_index_for_repo( + Path::new("/dir/.git"), + &[(Path::new("src/main.rs"), staged_contents)], + ); + + let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/dir/src/main.rs", cx) + }) + .await + .unwrap(); + let unstaged_changes = project + .update(cx, |project, cx| { + project.open_unstaged_changes(buffer.clone(), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + unstaged_changes.update(cx, |unstaged_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(), + &[ + (0..1, "", "// print goodbye\n"), + ( + 2..3, + " println!(\"hello world\");\n", + " println!(\"goodbye world\");\n", + ), + ], + ); + }); + + let staged_contents = r#" + // print goodbye + fn main() { + } + "# + .unindent(); + + fs.set_index_for_repo( + Path::new("/dir/.git"), + &[(Path::new("src/main.rs"), staged_contents)], + ); + + cx.run_until_parked(); + unstaged_changes.update(cx, |unstaged_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(), + &[(2..3, "", " println!(\"goodbye world\");\n")], + ); + }); +} + async fn search( project: &Model, query: SearchQuery, diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 6a2d5032e4..f70baeb6d8 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -3,20 +3,18 @@ use anyhow::Result; use client::proto; use fancy_regex::{Captures, Regex, RegexBuilder}; use gpui::Model; -use language::{Buffer, BufferSnapshot}; +use language::{Buffer, BufferSnapshot, CharKind}; use smol::future::yield_now; use std::{ borrow::Cow, io::{BufRead, BufReader, Read}, ops::Range, path::Path, - sync::{Arc, OnceLock}, + sync::{Arc, LazyLock}, }; use text::Anchor; use util::paths::PathMatcher; -static TEXT_REPLACEMENT_SPECIAL_CHARACTERS_REGEX: OnceLock = OnceLock::new(); - pub enum SearchResult { Buffer { buffer: Model, @@ -76,6 +74,12 @@ pub enum SearchQuery { }, } +static WORD_MATCH_TEST: LazyLock = LazyLock::new(|| { + RegexBuilder::new(r"\B") + .build() + .expect("Failed to create WORD_MATCH_TEST") +}); + impl SearchQuery { pub fn text( query: impl ToString, @@ -119,9 +123,17 @@ impl SearchQuery { let initial_query = Arc::from(query.as_str()); if whole_word { let mut word_query = String::new(); - word_query.push_str("\\b"); + if let Some(first) = query.get(0..1) { + if WORD_MATCH_TEST.is_match(first).is_ok_and(|x| !x) { + word_query.push_str("\\b"); + } + } word_query.push_str(&query); - word_query.push_str("\\b"); + if let Some(last) = query.get(query.len() - 1..) { + if WORD_MATCH_TEST.is_match(last).is_ok_and(|x| !x) { + word_query.push_str("\\b"); + } + } query = word_query } @@ -251,16 +263,17 @@ impl SearchQuery { regex, replacement, .. } => { if let Some(replacement) = replacement { - let replacement = TEXT_REPLACEMENT_SPECIAL_CHARACTERS_REGEX - .get_or_init(|| Regex::new(r"\\\\|\\n|\\t").unwrap()) - .replace_all(replacement, |c: &Captures| { - match c.get(0).unwrap().as_str() { - r"\\" => "\\", - r"\n" => "\n", - r"\t" => "\t", - x => unreachable!("Unexpected escape sequence: {}", x), - } - }); + static TEXT_REPLACEMENT_SPECIAL_CHARACTERS_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\\\\|\\n|\\t").unwrap()); + let replacement = TEXT_REPLACEMENT_SPECIAL_CHARACTERS_REGEX.replace_all( + replacement, + |c: &Captures| match c.get(0).unwrap().as_str() { + r"\\" => "\\", + r"\n" => "\n", + r"\t" => "\t", + x => unreachable!("Unexpected escape sequence: {}", x), + }, + ); Some(regex.replace(text, replacement)) } else { None @@ -313,7 +326,9 @@ impl SearchQuery { let end_kind = classifier.kind(rope.reversed_chars_at(mat.end()).next().unwrap()); let next_kind = rope.chars_at(mat.end()).next().map(|c| classifier.kind(c)); - if Some(start_kind) == prev_kind || Some(end_kind) == next_kind { + if (Some(start_kind) == prev_kind && start_kind == CharKind::Word) + || (Some(end_kind) == next_kind && end_kind == CharKind::Word) + { continue; } } diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 52583ea8fb..ede820e3e9 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -81,7 +81,8 @@ impl Inventory { } /// Pulls its task sources relevant to the worktree and the language given, - /// returns all task templates with their source kinds, in no specific order. + /// returns all task templates with their source kinds, worktree tasks first, language tasks second + /// and global tasks last. No specific order inside source kinds groups. pub fn list_tasks( &self, file: Option>, @@ -92,13 +93,15 @@ impl Inventory { let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { name: language.name().0, }); + let global_tasks = self.global_templates_from_settings(); let language_tasks = language .and_then(|language| language.context_provider()?.associated_tasks(file, cx)) .into_iter() .flat_map(|tasks| tasks.0.into_iter()) - .flat_map(|task| Some((task_source_kind.clone()?, task))); + .flat_map(|task| Some((task_source_kind.clone()?, task))) + .chain(global_tasks); - self.templates_from_settings(worktree) + self.worktree_templates_from_settings(worktree) .chain(language_tasks) .collect() } @@ -106,7 +109,7 @@ impl Inventory { /// Pulls its task sources relevant to the worktree and the language given and resolves them with the [`TaskContext`] given. /// Joins the new resolutions with the resolved tasks that were used (spawned) before, /// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first. - /// Deduplicates the tasks by their labels and contenxt and splits the ordered list into two: used tasks and the rest, newly resolved tasks. + /// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks. pub fn used_and_current_resolved_tasks( &self, worktree: Option, @@ -165,14 +168,18 @@ impl Inventory { .collect::>(); let not_used_score = post_inc(&mut lru_score); + let global_tasks = self.global_templates_from_settings(); let language_tasks = language .and_then(|language| language.context_provider()?.associated_tasks(file, cx)) .into_iter() .flat_map(|tasks| tasks.0.into_iter()) - .flat_map(|task| Some((task_source_kind.clone()?, task))); - let new_resolved_tasks = self - .templates_from_settings(worktree) - .chain(language_tasks) + .flat_map(|task| Some((task_source_kind.clone()?, task))) + .chain(global_tasks); + let worktree_tasks = self + .worktree_templates_from_settings(worktree) + .chain(language_tasks); + + let new_resolved_tasks = worktree_tasks .filter_map(|(kind, task)| { let id_base = kind.to_id_base(); Some(( @@ -235,9 +242,8 @@ impl Inventory { self.last_scheduled_tasks.retain(|(_, task)| &task.id != id); } - fn templates_from_settings( + fn global_templates_from_settings( &self, - worktree: Option, ) -> impl '_ + Iterator { self.templates_from_settings .global @@ -252,28 +258,34 @@ impl Inventory { template, ) }) - .chain(worktree.into_iter().flat_map(|worktree| { - self.templates_from_settings - .worktree - .get(&worktree) - .into_iter() - .flatten() - .flat_map(|(directory, templates)| { - templates.iter().map(move |template| (directory, template)) - }) - .map(move |(directory, template)| { - ( - TaskSourceKind::Worktree { - id: worktree, - directory_in_worktree: directory.to_path_buf(), - id_base: Cow::Owned(format!( - "local worktree tasks from directory {directory:?}" - )), - }, - template.clone(), - ) - }) - })) + } + + fn worktree_templates_from_settings( + &self, + worktree: Option, + ) -> impl '_ + Iterator { + worktree.into_iter().flat_map(|worktree| { + self.templates_from_settings + .worktree + .get(&worktree) + .into_iter() + .flatten() + .flat_map(|(directory, templates)| { + templates.iter().map(move |template| (directory, template)) + }) + .map(move |(directory, template)| { + ( + TaskSourceKind::Worktree { + id: worktree, + directory_in_worktree: directory.to_path_buf(), + id_base: Cow::Owned(format!( + "local worktree tasks from directory {directory:?}" + )), + }, + template.clone(), + ) + }) + }) } /// Updates in-memory task metadata from the JSON string given. @@ -366,7 +378,7 @@ mod test_inventory { use crate::Inventory; - use super::{task_source_kind_preference, TaskSourceKind}; + use super::TaskSourceKind; pub(super) fn task_template_names( inventory: &Model, @@ -408,15 +420,18 @@ mod test_inventory { worktree: Option, cx: &mut TestAppContext, ) -> Vec<(TaskSourceKind, String)> { - let (used, current) = inventory.update(cx, |inventory, cx| { - inventory.used_and_current_resolved_tasks(worktree, None, &TaskContext::default(), cx) - }); - let mut all = used; - all.extend(current); - all.into_iter() - .map(|(source_kind, task)| (source_kind, task.resolved_label)) - .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) - .collect() + inventory.update(cx, |inventory, cx| { + let task_context = &TaskContext::default(); + inventory + .list_tasks(None, None, worktree, cx) + .into_iter() + .filter_map(|(source_kind, task)| { + let id_base = source_kind.to_id_base(); + Some((source_kind, task.resolve_task(&id_base, task_context)?)) + }) + .map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label)) + .collect() + }) } } @@ -789,6 +804,30 @@ mod tests { .unwrap(); }); + assert_eq!( + list_tasks_sorted_by_last_used(&inventory, None, cx).await, + worktree_independent_tasks, + "Without a worktree, only worktree-independent tasks should be listed" + ); + assert_eq!( + list_tasks_sorted_by_last_used(&inventory, Some(worktree_1), cx).await, + worktree_1_tasks + .iter() + .chain(worktree_independent_tasks.iter()) + .cloned() + .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) + .collect::>(), + ); + assert_eq!( + list_tasks_sorted_by_last_used(&inventory, Some(worktree_2), cx).await, + worktree_2_tasks + .iter() + .chain(worktree_independent_tasks.iter()) + .cloned() + .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) + .collect::>(), + ); + assert_eq!( list_tasks(&inventory, None, cx).await, worktree_independent_tasks, @@ -800,7 +839,6 @@ mod tests { .iter() .chain(worktree_independent_tasks.iter()) .cloned() - .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) .collect::>(), ); assert_eq!( @@ -809,7 +847,6 @@ mod tests { .iter() .chain(worktree_independent_tasks.iter()) .cloned() - .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) .collect::>(), ); } @@ -821,7 +858,7 @@ mod tests { TaskStore::init(None); } - pub(super) async fn resolved_task_names( + async fn resolved_task_names( inventory: &Model, worktree: Option, cx: &mut TestAppContext, @@ -849,4 +886,20 @@ mod tests { )) .unwrap() } + + async fn list_tasks_sorted_by_last_used( + inventory: &Model, + worktree: Option, + cx: &mut TestAppContext, + ) -> Vec<(TaskSourceKind, String)> { + let (used, current) = inventory.update(cx, |inventory, cx| { + inventory.used_and_current_resolved_tasks(worktree, None, &TaskContext::default(), cx) + }); + let mut all = used; + all.extend(current); + all.into_iter() + .map(|(source_kind, task)| (source_kind, task.resolved_label)) + .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) + .collect() + } } diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 662a5a4d47..33916cfd7b 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -331,7 +331,7 @@ fn local_task_context_for_location( let worktree_id = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)); let worktree_abs_path = worktree_id .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx)) - .map(|worktree| worktree.read(cx).abs_path()); + .and_then(|worktree| worktree.read(cx).root_dir()); cx.spawn(|mut cx| async move { let worktree_abs_path = worktree_abs_path.clone(); diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 1320a883f3..4e028d16e6 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -1,8 +1,9 @@ use crate::Project; -use anyhow::Context as _; +use anyhow::{Context as _, Result}; use collections::HashMap; -use gpui::{AnyWindowHandle, AppContext, Context, Entity, Model, ModelContext, WeakModel}; +use gpui::{AnyWindowHandle, AppContext, Context, Entity, Model, ModelContext, Task, WeakModel}; use itertools::Itertools; +use language::LanguageName; use settings::{Settings, SettingsLocation}; use smol::channel::bounded; use std::{ @@ -10,10 +11,11 @@ use std::{ env::{self}, iter, path::{Path, PathBuf}, + sync::Arc, }; use task::{Shell, SpawnInTerminal}; use terminal::{ - terminal_settings::{self, TerminalSettings}, + terminal_settings::{self, TerminalSettings, VenvSettings}, TaskState, TaskStatus, Terminal, TerminalBuilder, }; use util::ResultExt; @@ -42,19 +44,13 @@ pub struct SshCommand { } impl Project { - pub fn active_project_directory(&self, cx: &AppContext) -> Option { + pub fn active_project_directory(&self, cx: &AppContext) -> Option> { let worktree = self .active_entry() .and_then(|entry_id| self.worktree_for_entry(entry_id, cx)) .into_iter() .chain(self.worktrees(cx)) - .find_map(|tree| { - let worktree = tree.read(cx); - worktree - .root_entry() - .filter(|entry| entry.is_dir()) - .map(|_| worktree.abs_path().to_path_buf()) - }); + .find_map(|tree| tree.read(cx).root_dir()); worktree } @@ -87,18 +83,17 @@ impl Project { kind: TerminalKind, window: AnyWindowHandle, cx: &mut ModelContext, - ) -> anyhow::Result> { - let path = match &kind { - TerminalKind::Shell(path) => path.as_ref().map(|path| path.to_path_buf()), + ) -> Task>> { + let path: Option> = match &kind { + TerminalKind::Shell(path) => path.as_ref().map(|path| Arc::from(path.as_ref())), TerminalKind::Task(spawn_task) => { if let Some(cwd) = &spawn_task.cwd { - Some(cwd.clone()) + Some(Arc::from(cwd.as_ref())) } else { self.active_project_directory(cx) } } }; - let ssh_details = self.ssh_details(cx); let mut settings_location = None; if let Some(path) = path.as_ref() { @@ -109,12 +104,59 @@ impl Project { }); } } - let settings = TerminalSettings::get(settings_location, cx); + let settings = TerminalSettings::get(settings_location, cx).clone(); + + cx.spawn(move |project, mut cx| async move { + let python_venv_directory = if let Some(path) = path.clone() { + project + .update(&mut cx, |this, cx| { + this.python_venv_directory(path, settings.detect_venv.clone(), cx) + })? + .await + } else { + None + }; + project.update(&mut cx, |project, cx| { + project.create_terminal_with_venv(kind, python_venv_directory, window, cx) + })? + }) + } + + pub fn create_terminal_with_venv( + &mut self, + kind: TerminalKind, + python_venv_directory: Option, + window: AnyWindowHandle, + cx: &mut ModelContext, + ) -> Result> { + let this = &mut *self; + let path: Option> = match &kind { + TerminalKind::Shell(path) => path.as_ref().map(|path| Arc::from(path.as_ref())), + TerminalKind::Task(spawn_task) => { + if let Some(cwd) = &spawn_task.cwd { + Some(Arc::from(cwd.as_ref())) + } else { + this.active_project_directory(cx) + } + } + }; + let ssh_details = this.ssh_details(cx); + + let mut settings_location = None; + if let Some(path) = path.as_ref() { + if let Some((worktree, _)) = this.find_worktree(path, cx) { + settings_location = Some(SettingsLocation { + worktree_id: worktree.read(cx).id(), + path, + }); + } + } + let settings = TerminalSettings::get(settings_location, cx).clone(); let (completion_tx, completion_rx) = bounded(1); // Start with the environment that we might have inherited from the Zed CLI. - let mut env = self + let mut env = this .environment .read(cx) .get_cli_environment() @@ -128,16 +170,14 @@ impl Project { } else { None }; - let python_venv_directory = path - .as_ref() - .and_then(|path| self.python_venv_directory(path, settings, cx)); + let mut python_venv_activate_command = None; let (spawn_task, shell) = match kind { TerminalKind::Shell(_) => { - if let Some(python_venv_directory) = python_venv_directory { + if let Some(python_venv_directory) = &python_venv_directory { python_venv_activate_command = - self.python_activate_command(&python_venv_directory, settings); + this.python_activate_command(python_venv_directory, &settings.detect_venv); } match &ssh_details { @@ -152,10 +192,10 @@ impl Project { .or_insert_with(|| "xterm-256color".to_string()); let (program, args) = - wrap_for_ssh(ssh_command, None, path.as_deref(), env, None); + wrap_for_ssh(&ssh_command, None, path.as_deref(), env, None); env = HashMap::default(); ( - None, + Option::::None, Shell::WithArguments { program, args, @@ -174,6 +214,8 @@ impl Project { command_label: spawn_task.command_label, hide: spawn_task.hide, status: TaskStatus::Running, + show_summary: spawn_task.show_summary, + show_command: spawn_task.show_command, completion_rx, }); @@ -192,11 +234,11 @@ impl Project { env.entry("TERM".to_string()) .or_insert_with(|| "xterm-256color".to_string()); let (program, args) = wrap_for_ssh( - ssh_command, + &ssh_command, Some((&spawn_task.command, &spawn_task.args)), path.as_deref(), env, - python_venv_directory, + python_venv_directory.as_deref(), ); env = HashMap::default(); ( @@ -225,9 +267,9 @@ impl Project { } } }; - - let terminal = TerminalBuilder::new( - local_path, + TerminalBuilder::new( + local_path.map(|path| path.to_path_buf()), + python_venv_directory, spawn_task, shell, env, @@ -242,7 +284,7 @@ impl Project { .map(|builder| { let terminal_handle = cx.new_model(|cx| builder.subscribe(cx)); - self.terminals + this.terminals .local_handles .push(terminal_handle.downgrade()); @@ -261,25 +303,49 @@ impl Project { .detach(); if let Some(activate_command) = python_venv_activate_command { - self.activate_python_virtual_environment(activate_command, &terminal_handle, cx); + this.activate_python_virtual_environment(activate_command, &terminal_handle, cx); } terminal_handle - }); - - terminal + }) } - pub fn python_venv_directory( + fn python_venv_directory( &self, - abs_path: &Path, - settings: &TerminalSettings, - cx: &AppContext, - ) -> Option { - let venv_settings = settings.detect_venv.as_option()?; - if let Some(path) = self.find_venv_in_worktree(abs_path, &venv_settings, cx) { - return Some(path); - } - self.find_venv_on_filesystem(abs_path, &venv_settings, cx) + abs_path: Arc, + venv_settings: VenvSettings, + cx: &ModelContext, + ) -> Task> { + cx.spawn(move |this, mut cx| async move { + if let Some((worktree, _)) = this + .update(&mut cx, |this, cx| this.find_worktree(&abs_path, cx)) + .ok()? + { + let toolchain = this + .update(&mut cx, |this, cx| { + this.active_toolchain( + worktree.read(cx).id(), + LanguageName::new("Python"), + cx, + ) + }) + .ok()? + .await; + + if let Some(toolchain) = toolchain { + let toolchain_path = Path::new(toolchain.path.as_ref()); + return Some(toolchain_path.parent()?.parent()?.to_path_buf()); + } + } + let venv_settings = venv_settings.as_option()?; + this.update(&mut cx, move |this, cx| { + if let Some(path) = this.find_venv_in_worktree(&abs_path, &venv_settings, cx) { + return Some(path); + } + this.find_venv_on_filesystem(&abs_path, &venv_settings, cx) + }) + .ok() + .flatten() + }) } fn find_venv_in_worktree( @@ -335,9 +401,9 @@ impl Project { fn python_activate_command( &self, venv_base_directory: &Path, - settings: &TerminalSettings, + venv_settings: &VenvSettings, ) -> Option { - let venv_settings = settings.detect_venv.as_option()?; + let venv_settings = venv_settings.as_option()?; let activate_keyword = match venv_settings.activate_script { terminal_settings::ActivateScript::Default => match std::env::consts::OS { "windows" => ".", @@ -367,16 +433,22 @@ impl Project { "windows" => "\r", _ => "\n", }; - Some(format!("{} {}{}", activate_keyword, quoted, line_ending)) + if smol::block_on(self.fs.metadata(path.as_ref())).is_err() { + return None; + } + Some(format!( + "{} {} ; clear{}", + activate_keyword, quoted, line_ending + )) } fn activate_python_virtual_environment( &self, command: String, terminal_handle: &Model, - cx: &mut ModelContext, + cx: &mut AppContext, ) { - terminal_handle.update(cx, |this, _| this.input_bytes(command.into_bytes())); + terminal_handle.update(cx, |terminal, _| terminal.input_bytes(command.into_bytes())); } pub fn local_terminal_handles(&self) -> &Vec> { @@ -384,12 +456,12 @@ impl Project { } } -pub fn wrap_for_ssh( +fn wrap_for_ssh( ssh_command: &SshCommand, command: Option<(&String, &Vec)>, path: Option<&Path>, env: HashMap, - venv_directory: Option, + venv_directory: Option<&Path>, ) -> (String, Vec) { let to_run = if let Some((command, args)) = command { let command = Cow::Borrowed(command.as_str()); @@ -439,7 +511,7 @@ pub fn wrap_for_ssh( (program, args) } -fn add_environment_path(env: &mut HashMap, new_path: &Path) -> anyhow::Result<()> { +fn add_environment_path(env: &mut HashMap, new_path: &Path) -> Result<()> { let mut env_paths = vec![new_path.to_path_buf()]; if let Some(path) = env.get("PATH").or(env::var("PATH").ok().as_ref()) { let mut paths = std::env::split_paths(&path).collect::>(); diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index c601ff8f12..71228d96a4 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -1,4 +1,4 @@ -use std::sync::Arc; +use std::{str::FromStr, sync::Arc}; use anyhow::{bail, Result}; @@ -119,6 +119,7 @@ impl ToolchainStore { let toolchain = Toolchain { name: toolchain.name.into(), path: toolchain.path.into(), + as_json: serde_json::Value::from_str(&toolchain.raw_json)?, language_name, }; let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); @@ -144,6 +145,7 @@ impl ToolchainStore { toolchain: toolchain.map(|toolchain| proto::Toolchain { name: toolchain.name.into(), path: toolchain.path.into(), + raw_json: toolchain.as_json.to_string(), }), }) } @@ -182,6 +184,7 @@ impl ToolchainStore { .map(|toolchain| proto::Toolchain { name: toolchain.name.to_string(), path: toolchain.path.to_string(), + raw_json: toolchain.as_json.to_string(), }) .collect::>() } else { @@ -308,12 +311,14 @@ impl LocalToolchainStore { }) .ok()? .await; - let language = registry.language_for_name(&language_name.0).await.ok()?; - let toolchains = language - .toolchain_lister()? - .list(root.to_path_buf(), project_env) - .await; - Some(toolchains) + + cx.background_executor() + .spawn(async move { + let language = registry.language_for_name(&language_name.0).await.ok()?; + let toolchains = language.toolchain_lister()?; + Some(toolchains.list(root.to_path_buf(), project_env).await) + }) + .await }) } pub(crate) fn active_toolchain( @@ -352,6 +357,7 @@ impl RemoteToolchainStore { toolchain: Some(proto::Toolchain { name: toolchain.name.into(), path: toolchain.path.into(), + raw_json: toolchain.as_json.to_string(), }), }) .await @@ -383,10 +389,13 @@ impl RemoteToolchainStore { let toolchains = response .toolchains .into_iter() - .map(|toolchain| Toolchain { - language_name: language_name.clone(), - name: toolchain.name.into(), - path: toolchain.path.into(), + .filter_map(|toolchain| { + Some(Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + as_json: serde_json::Value::from_str(&toolchain.raw_json).ok()?, + }) }) .collect(); let groups = response @@ -421,10 +430,13 @@ impl RemoteToolchainStore { .await .log_err()?; - response.toolchain.map(|toolchain| Toolchain { - language_name: language_name.clone(), - name: toolchain.name.into(), - path: toolchain.path.into(), + response.toolchain.and_then(|toolchain| { + Some(Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + as_json: serde_json::Value::from_str(&toolchain.raw_json).ok()?, + }) }) }) } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index db5ae67ba7..7a285055a9 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -23,10 +23,10 @@ use smol::{ stream::StreamExt, }; use text::ReplicaId; -use util::ResultExt; +use util::{paths::SanitizedPath, ResultExt}; use worktree::{Entry, ProjectEntryId, Worktree, WorktreeId, WorktreeSettings}; -use crate::{search::SearchQuery, ProjectPath}; +use crate::{search::SearchQuery, LspStore, ProjectPath}; struct MatchingEntry { worktree_path: Arc, @@ -52,7 +52,7 @@ pub struct WorktreeStore { worktrees_reordered: bool, #[allow(clippy::type_complexity)] loading_worktrees: - HashMap, Shared, Arc>>>>, + HashMap, Arc>>>>, state: WorktreeStoreState, } @@ -69,7 +69,6 @@ impl EventEmitter for WorktreeStore {} impl WorktreeStore { pub fn init(client: &AnyProtoClient) { client.add_model_request_handler(Self::handle_create_project_entry); - client.add_model_request_handler(Self::handle_rename_project_entry); client.add_model_request_handler(Self::handle_copy_project_entry); client.add_model_request_handler(Self::handle_delete_project_entry); client.add_model_request_handler(Self::handle_expand_project_entry); @@ -147,11 +146,12 @@ impl WorktreeStore { pub fn find_worktree( &self, - abs_path: &Path, + abs_path: impl Into, cx: &AppContext, ) -> Option<(Model, PathBuf)> { + let abs_path: SanitizedPath = abs_path.into(); for tree in self.worktrees() { - if let Ok(relative_path) = abs_path.strip_prefix(tree.read(cx).abs_path()) { + if let Ok(relative_path) = abs_path.as_path().strip_prefix(tree.read(cx).abs_path()) { return Some((tree.clone(), relative_path.into())); } } @@ -183,6 +183,19 @@ impl WorktreeStore { .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id)) } + pub fn worktree_and_entry_for_id<'a>( + &'a self, + entry_id: ProjectEntryId, + cx: &'a AppContext, + ) -> Option<(Model, &'a Entry)> { + self.worktrees().find_map(|worktree| { + worktree + .read(cx) + .entry_for_id(entry_id) + .map(|e| (worktree.clone(), e)) + }) + } + pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option { self.worktree_for_id(path.worktree_id, cx)? .read(cx) @@ -192,12 +205,12 @@ impl WorktreeStore { pub fn create_worktree( &mut self, - abs_path: impl AsRef, + abs_path: impl Into, visible: bool, cx: &mut ModelContext, ) -> Task>> { - let path: Arc = abs_path.as_ref().into(); - if !self.loading_worktrees.contains_key(&path) { + let abs_path: SanitizedPath = abs_path.into(); + if !self.loading_worktrees.contains_key(&abs_path) { let task = match &self.state { WorktreeStoreState::Remote { upstream_client, .. @@ -205,20 +218,26 @@ impl WorktreeStore { if upstream_client.is_via_collab() { Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab")))) } else { - self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx) + self.create_ssh_worktree( + upstream_client.clone(), + abs_path.clone(), + visible, + cx, + ) } } WorktreeStoreState::Local { fs } => { - self.create_local_worktree(fs.clone(), abs_path, visible, cx) + self.create_local_worktree(fs.clone(), abs_path.clone(), visible, cx) } }; - self.loading_worktrees.insert(path.clone(), task.shared()); + self.loading_worktrees + .insert(abs_path.clone(), task.shared()); } - let task = self.loading_worktrees.get(&path).unwrap().clone(); + let task = self.loading_worktrees.get(&abs_path).unwrap().clone(); cx.spawn(|this, mut cx| async move { let result = task.await; - this.update(&mut cx, |this, _| this.loading_worktrees.remove(&path)) + this.update(&mut cx, |this, _| this.loading_worktrees.remove(&abs_path)) .ok(); match result { Ok(worktree) => Ok(worktree), @@ -230,12 +249,11 @@ impl WorktreeStore { fn create_ssh_worktree( &mut self, client: AnyProtoClient, - abs_path: impl AsRef, + abs_path: impl Into, visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let path_key: Arc = abs_path.as_ref().into(); - let mut abs_path = path_key.clone().to_string_lossy().to_string(); + let mut abs_path = Into::::into(abs_path).to_string(); // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` // in which case want to strip the leading the `/`. // On the host-side, the `~` will get expanded. @@ -293,22 +311,23 @@ impl WorktreeStore { fn create_local_worktree( &mut self, fs: Arc, - abs_path: impl AsRef, + abs_path: impl Into, visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { let next_entry_id = self.next_entry_id.clone(); - let path: Arc = abs_path.as_ref().into(); + let path: SanitizedPath = abs_path.into(); cx.spawn(move |this, mut cx| async move { let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await; let worktree = worktree?; + this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; if visible { cx.update(|cx| { - cx.add_recent_document(&path); + cx.add_recent_document(path.as_path()); }) .log_err(); } @@ -565,11 +584,11 @@ impl WorktreeStore { pub fn shared( &mut self, remote_id: u64, - downsteam_client: AnyProtoClient, + downstream_client: AnyProtoClient, cx: &mut ModelContext, ) { self.retain_worktrees = true; - self.downstream_client = Some((downsteam_client, remote_id)); + self.downstream_client = Some((downstream_client, remote_id)); // When shared, retain all worktrees for worktree_handle in self.worktrees.iter_mut() { @@ -998,16 +1017,56 @@ impl WorktreeStore { } pub async fn handle_rename_project_entry( - this: Model, + this: Model, envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); - let worktree = this.update(&mut cx, |this, cx| { - this.worktree_for_entry(entry_id, cx) - .ok_or_else(|| anyhow!("worktree not found")) - })??; - Worktree::handle_rename_entry(worktree, envelope.payload, cx).await + let (worktree_id, worktree, old_path, is_dir) = this + .update(&mut cx, |this, cx| { + this.worktree_store + .read(cx) + .worktree_and_entry_for_id(entry_id, cx) + .map(|(worktree, entry)| { + ( + worktree.read(cx).id(), + worktree, + entry.path.clone(), + entry.is_dir(), + ) + }) + })? + .ok_or_else(|| anyhow!("worktree not found"))?; + let (old_abs_path, new_abs_path) = { + let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?; + ( + root_path.join(&old_path), + root_path.join(&envelope.payload.new_path), + ) + }; + let lsp_store = this + .update(&mut cx, |this, _| this.lsp_store())? + .downgrade(); + LspStore::will_rename_entry( + lsp_store, + worktree_id, + &old_abs_path, + &new_abs_path, + is_dir, + cx.clone(), + ) + .await; + let response = Worktree::handle_rename_entry(worktree, envelope.payload, cx.clone()).await; + this.update(&mut cx, |this, cx| { + this.lsp_store().read(cx).did_rename_entry( + worktree_id, + &old_abs_path, + &new_abs_path, + is_dir, + ); + }) + .ok(); + response } pub async fn handle_copy_project_entry( diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index dbcabc9f83..af913d9d6b 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -15,6 +15,7 @@ doctest = false [dependencies] anyhow.workspace = true collections.workspace = true +command_palette_hooks.workspace = true db.workspace = true editor.workspace = true file_icons.workspace = true diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 94472f5576..1eccfc9e1b 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1,4 +1,5 @@ mod project_panel_settings; +mod utils; use client::{ErrorCode, ErrorExt}; use language::DiagnosticSeverity; @@ -17,6 +18,7 @@ use file_icons::FileIcons; use anyhow::{anyhow, Context as _, Result}; use collections::{hash_map, BTreeSet, HashMap}; +use command_palette_hooks::CommandPaletteFilter; use git::repository::GitFileStatus; use gpui::{ actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, @@ -38,8 +40,10 @@ use project_panel_settings::{ }; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; +use std::any::TypeId; use std::{ cell::OnceCell, + cmp, collections::HashSet, ffi::OsStr, ops::Range, @@ -50,16 +54,16 @@ use std::{ use theme::ThemeSettings; use ui::{ prelude::*, v_flex, ContextMenu, DecoratedIcon, Icon, IconDecoration, IconDecorationKind, - IndentGuideColors, IndentGuideLayout, KeyBinding, Label, ListItem, Scrollbar, ScrollbarState, - Tooltip, + IndentGuideColors, IndentGuideLayout, KeyBinding, Label, ListItem, ListItemSpacing, Scrollbar, + ScrollbarState, Tooltip, }; -use util::{maybe, ResultExt, TryFutureExt}; +use util::{maybe, paths::compare_paths, ResultExt, TakeUntilExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, notifications::{DetachAndPromptErr, NotifyTaskExt}, DraggedSelection, OpenInTerminal, PreviewTabsSettings, SelectedEntry, Workspace, }; -use worktree::CreatedEntry; +use worktree::{CreatedEntry, GitEntry, GitEntryRef}; const PROJECT_PANEL_KEY: &str = "ProjectPanel"; const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX; @@ -72,7 +76,7 @@ pub struct ProjectPanel { // An update loop that keeps incrementing/decrementing scroll offset while there is a dragged entry that's // hovered over the start/end of a list. hover_scroll_task: Option>, - visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, + visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, /// Maps from leaf project entry ID to the currently selected ancestor. /// Relevant only for auto-fold dirs, where a single project panel entry may actually consist of several /// project entries (and all non-leaf nodes are guaranteed to be directories). @@ -187,6 +191,12 @@ actions!( UnfoldDirectory, FoldDirectory, SelectParent, + SelectNextGitEntry, + SelectPrevGitEntry, + SelectNextDiagnostic, + SelectPrevDiagnostic, + SelectNextDirectory, + SelectPrevDirectory, ] ); @@ -249,16 +259,18 @@ struct ItemColors { hover: Hsla, drag_over: Hsla, marked_active: Hsla, + focused: Hsla, } fn get_item_color(cx: &ViewContext) -> ItemColors { let colors = cx.theme().colors(); ItemColors { - default: colors.surface_background, - hover: colors.element_active, + default: colors.panel_background, + hover: colors.ghost_element_hover, drag_over: colors.drop_target_background, marked_active: colors.ghost_element_selected, + focused: colors.panel_focused_border, } } @@ -269,6 +281,7 @@ impl ProjectPanel { let focus_handle = cx.focus_handle(); cx.on_focus(&focus_handle, Self::focus_in).detach(); cx.on_focus_out(&focus_handle, |this, _, cx| { + this.focus_out(cx); this.hide_scrollbar(cx); }) .detach(); @@ -298,8 +311,9 @@ impl ProjectPanel { this.update_visible_entries(None, cx); cx.notify(); } - project::Event::WorktreeUpdatedEntries(_, _) - | project::Event::WorktreeAdded + project::Event::WorktreeUpdatedGitRepositories(_) + | project::Event::WorktreeUpdatedEntries(_, _) + | project::Event::WorktreeAdded(_) | project::Event::WorktreeOrderChanged => { this.update_visible_entries(None, cx); cx.notify(); @@ -308,6 +322,15 @@ impl ProjectPanel { }) .detach(); + let trash_action = [TypeId::of::()]; + let is_remote = project.read(cx).is_via_collab(); + + if is_remote { + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_action_types(&trash_action); + }); + } + let filename_editor = cx.new_view(Editor::single_line); cx.subscribe( @@ -547,7 +570,7 @@ impl ProjectPanel { .entry((project_path.worktree_id, path_buffer.clone())) .and_modify(|strongest_diagnostic_severity| { *strongest_diagnostic_severity = - std::cmp::min(*strongest_diagnostic_severity, diagnostic_severity); + cmp::min(*strongest_diagnostic_severity, diagnostic_severity); }) .or_insert(diagnostic_severity); } @@ -574,6 +597,12 @@ impl ProjectPanel { } } + fn focus_out(&mut self, cx: &mut ViewContext) { + if !self.focus_handle.is_focused(cx) { + self.confirm(&Confirm, cx); + } + } + fn deploy_context_menu( &mut self, position: Point, @@ -651,9 +680,11 @@ impl ProjectPanel { .action("Copy Relative Path", Box::new(CopyRelativePath)) .separator() .action("Rename", Box::new(Rename)) - .when(!is_root, |menu| { + .when(!is_root & !is_remote, |menu| { menu.action("Trash", Box::new(Trash { skip_prompt: false })) - .action("Delete", Box::new(Delete { skip_prompt: false })) + }) + .when(!is_root, |menu| { + menu.action("Delete", Box::new(Delete { skip_prompt: false })) }) .when(!is_remote & is_root, |menu| { menu.separator() @@ -1129,7 +1160,7 @@ impl ProjectPanel { } } - fn rename(&mut self, _: &Rename, cx: &mut ViewContext) { + fn rename_impl(&mut self, selection: Option>, cx: &mut ViewContext) { if let Some(SelectedEntry { worktree_id, entry_id, @@ -1153,13 +1184,16 @@ impl ProjectPanel { .map(|s| s.to_string_lossy()) .unwrap_or_default() .to_string(); - let file_stem = entry.path.file_stem().map(|s| s.to_string_lossy()); - let selection_end = - file_stem.map_or(file_name.len(), |file_stem| file_stem.len()); + let selection = selection.unwrap_or_else(|| { + let file_stem = entry.path.file_stem().map(|s| s.to_string_lossy()); + let selection_end = + file_stem.map_or(file_name.len(), |file_stem| file_stem.len()); + 0..selection_end + }); self.filename_editor.update(cx, |editor, cx| { editor.set_text(file_name, cx); editor.change_selections(Some(Autoscroll::fit()), cx, |s| { - s.select_ranges([0..selection_end]) + s.select_ranges([selection]) }); editor.focus(cx); }); @@ -1171,6 +1205,10 @@ impl ProjectPanel { } } + fn rename(&mut self, _: &Rename, cx: &mut ViewContext) { + self.rename_impl(None, cx); + } + fn trash(&mut self, action: &Trash, cx: &mut ViewContext) { self.remove(true, action.skip_prompt, cx); } @@ -1179,17 +1217,17 @@ impl ProjectPanel { self.remove(false, action.skip_prompt, cx); } - fn remove(&mut self, trash: bool, skip_prompt: bool, cx: &mut ViewContext<'_, ProjectPanel>) { + fn remove(&mut self, trash: bool, skip_prompt: bool, cx: &mut ViewContext) { maybe!({ - if self.marked_entries.is_empty() && self.selection.is_none() { + let items_to_delete = self.disjoint_entries(cx); + if items_to_delete.is_empty() { return None; } let project = self.project.read(cx); - let items_to_delete = self.marked_entries(); let mut dirty_buffers = 0; let file_paths = items_to_delete - .into_iter() + .iter() .filter_map(|selection| { let project_path = project.path_for_entry(selection.entry_id, cx)?; dirty_buffers += @@ -1258,28 +1296,121 @@ impl ProjectPanel { } else { None }; - - cx.spawn(|this, mut cx| async move { + let next_selection = self.find_next_selection_after_deletion(items_to_delete, cx); + cx.spawn(|panel, mut cx| async move { if let Some(answer) = answer { if answer.await != Ok(0) { - return Result::<(), anyhow::Error>::Ok(()); + return anyhow::Ok(()); } } for (entry_id, _) in file_paths { - this.update(&mut cx, |this, cx| { - this.project - .update(cx, |project, cx| project.delete_entry(entry_id, trash, cx)) - .ok_or_else(|| anyhow!("no such entry")) - })?? - .await?; + panel + .update(&mut cx, |panel, cx| { + panel + .project + .update(cx, |project, cx| project.delete_entry(entry_id, trash, cx)) + .context("no such entry") + })?? + .await?; } - Result::<(), anyhow::Error>::Ok(()) + panel.update(&mut cx, |panel, cx| { + if let Some(next_selection) = next_selection { + panel.selection = Some(next_selection); + panel.autoscroll(cx); + } else { + panel.select_last(&SelectLast {}, cx); + } + })?; + Ok(()) }) .detach_and_log_err(cx); Some(()) }); } + fn find_next_selection_after_deletion( + &self, + sanitized_entries: BTreeSet, + cx: &mut ViewContext, + ) -> Option { + if sanitized_entries.is_empty() { + return None; + } + + let project = self.project.read(cx); + let (worktree_id, worktree) = sanitized_entries + .iter() + .map(|entry| entry.worktree_id) + .filter_map(|id| project.worktree_for_id(id, cx).map(|w| (id, w.read(cx)))) + .max_by(|(_, a), (_, b)| a.root_name().cmp(b.root_name()))?; + + let marked_entries_in_worktree = sanitized_entries + .iter() + .filter(|e| e.worktree_id == worktree_id) + .collect::>(); + let latest_entry = marked_entries_in_worktree + .iter() + .max_by(|a, b| { + match ( + worktree.entry_for_id(a.entry_id), + worktree.entry_for_id(b.entry_id), + ) { + (Some(a), Some(b)) => { + compare_paths((&a.path, a.is_file()), (&b.path, b.is_file())) + } + _ => cmp::Ordering::Equal, + } + }) + .and_then(|e| worktree.entry_for_id(e.entry_id))?; + + let parent_path = latest_entry.path.parent()?; + let parent_entry = worktree.entry_for_path(parent_path)?; + + // Remove all siblings that are being deleted except the last marked entry + let mut siblings: Vec<_> = worktree + .snapshot() + .child_entries(parent_path) + .with_git_statuses() + .filter(|sibling| { + sibling.id == latest_entry.id + || !marked_entries_in_worktree.contains(&&SelectedEntry { + worktree_id, + entry_id: sibling.id, + }) + }) + .map(|entry| entry.to_owned()) + .collect(); + + project::sort_worktree_entries(&mut siblings); + let sibling_entry_index = siblings + .iter() + .position(|sibling| sibling.id == latest_entry.id)?; + + if let Some(next_sibling) = sibling_entry_index + .checked_add(1) + .and_then(|i| siblings.get(i)) + { + return Some(SelectedEntry { + worktree_id, + entry_id: next_sibling.id, + }); + } + if let Some(prev_sibling) = sibling_entry_index + .checked_sub(1) + .and_then(|i| siblings.get(i)) + { + return Some(SelectedEntry { + worktree_id, + entry_id: prev_sibling.id, + }); + } + // No neighbour sibling found, fall back to parent + Some(SelectedEntry { + worktree_id, + entry_id: parent_entry.id, + }) + } + fn unfold_directory(&mut self, _: &UnfoldDirectory, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_entry(cx) { self.unfolded_dir_ids.insert(entry.id); @@ -1380,6 +1511,176 @@ impl ProjectPanel { } } + fn select_prev_diagnostic(&mut self, _: &SelectPrevDiagnostic, cx: &mut ViewContext) { + let selection = self.find_entry( + self.selection.as_ref(), + true, + |entry, worktree_id| { + (self.selection.is_none() + || self.selection.is_some_and(|selection| { + if selection.worktree_id == worktree_id { + selection.entry_id != entry.id + } else { + true + } + })) + && entry.is_file() + && self + .diagnostics + .contains_key(&(worktree_id, entry.path.to_path_buf())) + }, + cx, + ); + + if let Some(selection) = selection { + self.selection = Some(selection); + self.expand_entry(selection.worktree_id, selection.entry_id, cx); + self.update_visible_entries(Some((selection.worktree_id, selection.entry_id)), cx); + self.autoscroll(cx); + cx.notify(); + } + } + + fn select_next_diagnostic(&mut self, _: &SelectNextDiagnostic, cx: &mut ViewContext) { + let selection = self.find_entry( + self.selection.as_ref(), + false, + |entry, worktree_id| { + (self.selection.is_none() + || self.selection.is_some_and(|selection| { + if selection.worktree_id == worktree_id { + selection.entry_id != entry.id + } else { + true + } + })) + && entry.is_file() + && self + .diagnostics + .contains_key(&(worktree_id, entry.path.to_path_buf())) + }, + cx, + ); + + if let Some(selection) = selection { + self.selection = Some(selection); + self.expand_entry(selection.worktree_id, selection.entry_id, cx); + self.update_visible_entries(Some((selection.worktree_id, selection.entry_id)), cx); + self.autoscroll(cx); + cx.notify(); + } + } + + fn select_prev_git_entry(&mut self, _: &SelectPrevGitEntry, cx: &mut ViewContext) { + let selection = self.find_entry( + self.selection.as_ref(), + true, + |entry, worktree_id| { + (self.selection.is_none() + || self.selection.is_some_and(|selection| { + if selection.worktree_id == worktree_id { + selection.entry_id != entry.id + } else { + true + } + })) + && entry.is_file() + && entry + .git_status + .is_some_and(|status| matches!(status, GitFileStatus::Modified)) + }, + cx, + ); + + if let Some(selection) = selection { + self.selection = Some(selection); + self.expand_entry(selection.worktree_id, selection.entry_id, cx); + self.update_visible_entries(Some((selection.worktree_id, selection.entry_id)), cx); + self.autoscroll(cx); + cx.notify(); + } + } + + fn select_prev_directory(&mut self, _: &SelectPrevDirectory, cx: &mut ViewContext) { + let selection = self.find_visible_entry( + self.selection.as_ref(), + true, + |entry, worktree_id| { + (self.selection.is_none() + || self.selection.is_some_and(|selection| { + if selection.worktree_id == worktree_id { + selection.entry_id != entry.id + } else { + true + } + })) + && entry.is_dir() + }, + cx, + ); + + if let Some(selection) = selection { + self.selection = Some(selection); + self.autoscroll(cx); + cx.notify(); + } + } + + fn select_next_directory(&mut self, _: &SelectNextDirectory, cx: &mut ViewContext) { + let selection = self.find_visible_entry( + self.selection.as_ref(), + false, + |entry, worktree_id| { + (self.selection.is_none() + || self.selection.is_some_and(|selection| { + if selection.worktree_id == worktree_id { + selection.entry_id != entry.id + } else { + true + } + })) + && entry.is_dir() + }, + cx, + ); + + if let Some(selection) = selection { + self.selection = Some(selection); + self.autoscroll(cx); + cx.notify(); + } + } + + fn select_next_git_entry(&mut self, _: &SelectNextGitEntry, cx: &mut ViewContext) { + let selection = self.find_entry( + self.selection.as_ref(), + true, + |entry, worktree_id| { + (self.selection.is_none() + || self.selection.is_some_and(|selection| { + if selection.worktree_id == worktree_id { + selection.entry_id != entry.id + } else { + true + } + })) + && entry.is_file() + && entry + .git_status + .is_some_and(|status| matches!(status, GitFileStatus::Modified)) + }, + cx, + ); + + if let Some(selection) = selection { + self.selection = Some(selection); + self.expand_entry(selection.worktree_id, selection.entry_id, cx); + self.update_visible_entries(Some((selection.worktree_id, selection.entry_id)), cx); + self.autoscroll(cx); + cx.notify(); + } + } + fn select_parent(&mut self, _: &SelectParent, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { if let Some(parent) = entry.path.parent() { @@ -1450,7 +1751,7 @@ impl ProjectPanel { } fn cut(&mut self, _: &Cut, cx: &mut ViewContext) { - let entries = self.marked_entries(); + let entries = self.disjoint_entries(cx); if !entries.is_empty() { self.clipboard = Some(ClipboardEntry::Cut(entries)); cx.notify(); @@ -1458,7 +1759,7 @@ impl ProjectPanel { } fn copy(&mut self, _: &Copy, cx: &mut ViewContext) { - let entries = self.marked_entries(); + let entries = self.disjoint_entries(cx); if !entries.is_empty() { self.clipboard = Some(ClipboardEntry::Copied(entries)); cx.notify(); @@ -1470,7 +1771,7 @@ impl ProjectPanel { source: &SelectedEntry, (worktree, target_entry): (Model, &Entry), cx: &AppContext, - ) -> Option { + ) -> Option<(PathBuf, Option>)> { let mut new_path = target_entry.path.to_path_buf(); // If we're pasting into a file, or a directory into itself, go up one level. if target_entry.is_file() || (target_entry.is_dir() && target_entry.id == source.entry_id) { @@ -1486,6 +1787,8 @@ impl ProjectPanel { new_path.push(&clipboard_entry_file_name); let extension = new_path.extension().map(|e| e.to_os_string()); let file_name_without_extension = Path::new(&clipboard_entry_file_name).file_stem()?; + let file_name_len = file_name_without_extension.to_string_lossy().len(); + let mut disambiguation_range = None; let mut ix = 0; { let worktree = worktree.read(cx); @@ -1493,9 +1796,17 @@ impl ProjectPanel { new_path.pop(); let mut new_file_name = file_name_without_extension.to_os_string(); - new_file_name.push(" copy"); + + let disambiguation = " copy"; + let mut disambiguation_len = disambiguation.len(); + + new_file_name.push(disambiguation); + if ix > 0 { - new_file_name.push(format!(" {}", ix)); + let extra_disambiguation = format!(" {}", ix); + disambiguation_len += extra_disambiguation.len(); + + new_file_name.push(extra_disambiguation); } if let Some(extension) = extension.as_ref() { new_file_name.push("."); @@ -1503,10 +1814,11 @@ impl ProjectPanel { } new_path.push(new_file_name); + disambiguation_range = Some(file_name_len..(file_name_len + disambiguation_len)); ix += 1; } } - Some(new_path) + Some((new_path, disambiguation_range)) } fn paste(&mut self, _: &Paste, cx: &mut ViewContext) { @@ -1524,9 +1836,10 @@ impl ProjectPanel { } let mut paste_entry_tasks: IndexMap<(ProjectEntryId, bool), PasteTask> = IndexMap::default(); + let mut disambiguation_range = None; let clip_is_cut = clipboard_entries.is_cut(); for clipboard_entry in clipboard_entries.items() { - let new_path = + let (new_path, new_disambiguation_range) = self.create_paste_path(clipboard_entry, self.selected_sub_entry(cx)?, cx)?; let clip_entry_id = clipboard_entry.entry_id; let is_same_worktree = clipboard_entry.worktree_id == worktree_id; @@ -1563,8 +1876,11 @@ impl ProjectPanel { }; let needs_delete = !is_same_worktree && clip_is_cut; paste_entry_tasks.insert((clip_entry_id, needs_delete), task); + disambiguation_range = new_disambiguation_range.or(disambiguation_range); } + let item_count = paste_entry_tasks.len(); + cx.spawn(|project_panel, mut cx| async move { let mut last_succeed = None; let mut need_delete_ids = Vec::new(); @@ -1585,17 +1901,6 @@ impl ProjectPanel { } } } - // update selection - if let Some(entry_id) = last_succeed { - project_panel - .update(&mut cx, |project_panel, _cx| { - project_panel.selection = Some(SelectedEntry { - worktree_id, - entry_id, - }); - }) - .ok(); - } // remove entry for cut in difference worktree for entry_id in need_delete_ids { project_panel @@ -1607,6 +1912,22 @@ impl ProjectPanel { })?? .await?; } + // update selection + if let Some(entry_id) = last_succeed { + project_panel + .update(&mut cx, |project_panel, cx| { + project_panel.selection = Some(SelectedEntry { + worktree_id, + entry_id, + }); + + // if only one entry was pasted and it was disambiguated, open the rename editor + if item_count == 1 && disambiguation_range.is_some() { + project_panel.rename_impl(disambiguation_range, cx); + } + }) + .ok(); + } anyhow::Ok(()) }) @@ -1625,7 +1946,7 @@ impl ProjectPanel { fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext) { let abs_file_paths = { let project = self.project.read(cx); - self.marked_entries() + self.effective_entries() .into_iter() .filter_map(|entry| { let entry_path = project.path_for_entry(entry.entry_id, cx)?.path; @@ -1649,7 +1970,7 @@ impl ProjectPanel { fn copy_relative_path(&mut self, _: &CopyRelativePath, cx: &mut ViewContext) { let file_paths = { let project = self.project.read(cx); - self.marked_entries() + self.effective_entries() .into_iter() .filter_map(|entry| { Some( @@ -1674,8 +1995,8 @@ impl ProjectPanel { } fn remove_from_project(&mut self, _: &RemoveFromProject, cx: &mut ViewContext) { - if let Some((worktree, _)) = self.selected_sub_entry(cx) { - let worktree_id = worktree.read(cx).id(); + for entry in self.effective_entries().iter() { + let worktree_id = entry.worktree_id; self.project .update(cx, |project, cx| project.remove_worktree(worktree_id, cx)); } @@ -1832,25 +2153,83 @@ impl ProjectPanel { None } - // Returns list of entries that should be affected by an operation. - // When currently selected entry is not marked, it's treated as the only marked entry. - fn marked_entries(&self) -> BTreeSet { - let Some(mut selection) = self.selection else { - return Default::default(); - }; - if self.marked_entries.contains(&selection) { - self.marked_entries - .iter() - .copied() - .map(|mut entry| { - entry.entry_id = self.resolve_entry(entry.entry_id); - entry - }) - .collect() - } else { - selection.entry_id = self.resolve_entry(selection.entry_id); - BTreeSet::from_iter([selection]) + fn disjoint_entries(&self, cx: &AppContext) -> BTreeSet { + let marked_entries = self.effective_entries(); + let mut sanitized_entries = BTreeSet::new(); + if marked_entries.is_empty() { + return sanitized_entries; } + + let project = self.project.read(cx); + let marked_entries_by_worktree: HashMap> = marked_entries + .into_iter() + .filter(|entry| !project.entry_is_worktree_root(entry.entry_id, cx)) + .fold(HashMap::default(), |mut map, entry| { + map.entry(entry.worktree_id).or_default().push(entry); + map + }); + + for (worktree_id, marked_entries) in marked_entries_by_worktree { + if let Some(worktree) = project.worktree_for_id(worktree_id, cx) { + let worktree = worktree.read(cx); + let marked_dir_paths = marked_entries + .iter() + .filter_map(|entry| { + worktree.entry_for_id(entry.entry_id).and_then(|entry| { + if entry.is_dir() { + Some(entry.path.as_ref()) + } else { + None + } + }) + }) + .collect::>(); + + sanitized_entries.extend(marked_entries.into_iter().filter(|entry| { + let Some(entry_info) = worktree.entry_for_id(entry.entry_id) else { + return false; + }; + let entry_path = entry_info.path.as_ref(); + let inside_marked_dir = marked_dir_paths.iter().any(|&marked_dir_path| { + entry_path != marked_dir_path && entry_path.starts_with(marked_dir_path) + }); + !inside_marked_dir + })); + } + } + + sanitized_entries + } + + fn effective_entries(&self) -> BTreeSet { + if let Some(selection) = self.selection { + let selection = SelectedEntry { + entry_id: self.resolve_entry(selection.entry_id), + worktree_id: selection.worktree_id, + }; + + // Default to using just the selected item when nothing is marked. + if self.marked_entries.is_empty() { + return BTreeSet::from([selection]); + } + + // Allow operating on the selected item even when something else is marked, + // making it easier to perform one-off actions without clearing a mark. + if self.marked_entries.len() == 1 && !self.marked_entries.contains(&selection) { + return BTreeSet::from([selection]); + } + } + + // Return only marked entries since we've already handled special cases where + // only selection should take precedence. At this point, marked entries may or + // may not include the current selection, which is intentional. + self.marked_entries + .iter() + .map(|entry| SelectedEntry { + entry_id: self.resolve_entry(entry.entry_id), + worktree_id: entry.worktree_id, + }) + .collect::>() } /// Finds the currently selected subentry for a given leaf entry id. If a given entry @@ -1967,7 +2346,7 @@ impl ProjectPanel { } let mut visible_worktree_entries = Vec::new(); - let mut entry_iter = snapshot.entries(true, 0); + let mut entry_iter = snapshot.entries(true, 0).with_git_statuses(); let mut auto_folded_ancestors = vec![]; while let Some(entry) = entry_iter.entry() { if auto_collapse_dirs && entry.kind.is_dir() { @@ -2009,7 +2388,7 @@ impl ProjectPanel { } } auto_folded_ancestors.clear(); - visible_worktree_entries.push(entry.clone()); + visible_worktree_entries.push(entry.to_owned()); let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id { entry.id == new_entry_id || { self.ancestors.get(&entry.id).map_or(false, |entries| { @@ -2023,24 +2402,27 @@ impl ProjectPanel { false }; if precedes_new_entry { - visible_worktree_entries.push(Entry { - id: NEW_ENTRY_ID, - kind: new_entry_kind, - path: entry.path.join("\0").into(), - inode: 0, - mtime: entry.mtime, - size: entry.size, - is_ignored: entry.is_ignored, - is_external: false, - is_private: false, + visible_worktree_entries.push(GitEntry { + entry: Entry { + id: NEW_ENTRY_ID, + kind: new_entry_kind, + path: entry.path.join("\0").into(), + inode: 0, + mtime: entry.mtime, + size: entry.size, + is_ignored: entry.is_ignored, + is_external: false, + is_private: false, + is_always_included: entry.is_always_included, + canonical_path: entry.canonical_path.clone(), + char_bag: entry.char_bag, + is_fifo: entry.is_fifo, + }, git_status: entry.git_status, - canonical_path: entry.canonical_path.clone(), - char_bag: entry.char_bag, - is_fifo: entry.is_fifo, }); } let worktree_abs_path = worktree.read(cx).abs_path(); - let (depth, path) = if Some(entry) == worktree.read(cx).root_entry() { + let (depth, path) = if Some(entry.entry) == worktree.read(cx).root_entry() { let Some(path_name) = worktree_abs_path .file_name() .with_context(|| { @@ -2117,8 +2499,8 @@ impl ProjectPanel { entry_iter.advance(); } - snapshot.propagate_git_statuses(&mut visible_worktree_entries); project::sort_worktree_entries(&mut visible_worktree_entries); + self.visible_entries .push((worktree_id, visible_worktree_entries, OnceCell::new())); } @@ -2265,23 +2647,55 @@ impl ProjectPanel { let _ = maybe!({ let project = self.project.read(cx); let target_worktree = project.worktree_for_entry(target_entry_id, cx)?; + let worktree_id = target_worktree.read(cx).id(); let target_entry = target_worktree .read(cx) .entry_for_id(target_entry_id)? .clone(); + + let mut copy_tasks = Vec::new(); + let mut disambiguation_range = None; for selection in selections.items() { - let new_path = self.create_paste_path( + let (new_path, new_disambiguation_range) = self.create_paste_path( selection, (target_worktree.clone(), &target_entry), cx, )?; - self.project - .update(cx, |project, cx| { - project.copy_entry(selection.entry_id, None, new_path, cx) - }) - .detach_and_log_err(cx) + + let task = self.project.update(cx, |project, cx| { + project.copy_entry(selection.entry_id, None, new_path, cx) + }); + copy_tasks.push(task); + disambiguation_range = new_disambiguation_range.or(disambiguation_range); } + let item_count = copy_tasks.len(); + + cx.spawn(|project_panel, mut cx| async move { + let mut last_succeed = None; + for task in copy_tasks.into_iter() { + if let Some(Some(entry)) = task.await.log_err() { + last_succeed = Some(entry.id); + } + } + // update selection + if let Some(entry_id) = last_succeed { + project_panel + .update(&mut cx, |project_panel, cx| { + project_panel.selection = Some(SelectedEntry { + worktree_id, + entry_id, + }); + + // if only one entry was dragged and it was disambiguated, open the rename editor + if item_count == 1 && disambiguation_range.is_some() { + project_panel.rename_impl(disambiguation_range, cx); + } + }) + .ok(); + } + }) + .detach(); Some(()) }); } else { @@ -2314,13 +2728,13 @@ impl ProjectPanel { None } - fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, &Entry)> { + fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef)> { let mut offset = 0; for (worktree_id, visible_worktree_entries, _) in &self.visible_entries { if visible_worktree_entries.len() > offset + index { return visible_worktree_entries .get(index) - .map(|entry| (*worktree_id, entry)); + .map(|entry| (*worktree_id, entry.to_ref())); } offset += visible_worktree_entries.len(); } @@ -2353,7 +2767,7 @@ impl ProjectPanel { .collect() }); for entry in visible_worktree_entries[entry_range].iter() { - callback(entry, entries, cx); + callback(&entry, entries, cx); } ix = end_ix; } @@ -2422,7 +2836,7 @@ impl ProjectPanel { }; let (depth, difference) = - ProjectPanel::calculate_depth_and_difference(entry, entries); + ProjectPanel::calculate_depth_and_difference(&entry, entries); let filename = match difference { diff if diff > 1 => entry @@ -2546,6 +2960,237 @@ impl ProjectPanel { } } + fn find_entry_in_worktree( + &self, + worktree_id: WorktreeId, + reverse_search: bool, + only_visible_entries: bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, + cx: &mut ViewContext, + ) -> Option { + if only_visible_entries { + let entries = self + .visible_entries + .iter() + .find_map(|(tree_id, entries, _)| { + if worktree_id == *tree_id { + Some(entries) + } else { + None + } + })? + .clone(); + + return utils::ReversibleIterable::new(entries.iter(), reverse_search) + .find(|ele| predicate(ele.to_ref(), worktree_id)) + .cloned(); + } + + let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?; + worktree.update(cx, |tree, _| { + utils::ReversibleIterable::new( + tree.entries(true, 0usize).with_git_statuses(), + reverse_search, + ) + .find_single_ended(|ele| predicate(*ele, worktree_id)) + .map(|ele| ele.to_owned()) + }) + } + + fn find_entry( + &self, + start: Option<&SelectedEntry>, + reverse_search: bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, + cx: &mut ViewContext, + ) -> Option { + let mut worktree_ids: Vec<_> = self + .visible_entries + .iter() + .map(|(worktree_id, _, _)| *worktree_id) + .collect(); + + let mut last_found: Option = None; + + if let Some(start) = start { + let worktree = self + .project + .read(cx) + .worktree_for_id(start.worktree_id, cx)?; + + let search = worktree.update(cx, |tree, _| { + let entry = tree.entry_for_id(start.entry_id)?; + let root_entry = tree.root_entry()?; + let tree_id = tree.id(); + + let mut first_iter = tree + .traverse_from_path(true, true, true, entry.path.as_ref()) + .with_git_statuses(); + + if reverse_search { + first_iter.next(); + } + + let first = first_iter + .enumerate() + .take_until(|(count, entry)| entry.entry == root_entry && *count != 0usize) + .map(|(_, entry)| entry) + .find(|ele| predicate(*ele, tree_id)) + .map(|ele| ele.to_owned()); + + let second_iter = tree.entries(true, 0usize).with_git_statuses(); + + let second = if reverse_search { + second_iter + .take_until(|ele| ele.id == start.entry_id) + .filter(|ele| predicate(*ele, tree_id)) + .last() + .map(|ele| ele.to_owned()) + } else { + second_iter + .take_while(|ele| ele.id != start.entry_id) + .filter(|ele| predicate(*ele, tree_id)) + .last() + .map(|ele| ele.to_owned()) + }; + + if reverse_search { + Some((second, first)) + } else { + Some((first, second)) + } + }); + + if let Some((first, second)) = search { + let first = first.map(|entry| SelectedEntry { + worktree_id: start.worktree_id, + entry_id: entry.id, + }); + + let second = second.map(|entry| SelectedEntry { + worktree_id: start.worktree_id, + entry_id: entry.id, + }); + + if first.is_some() { + return first; + } + last_found = second; + + let idx = worktree_ids + .iter() + .enumerate() + .find(|(_, ele)| **ele == start.worktree_id) + .map(|(idx, _)| idx); + + if let Some(idx) = idx { + worktree_ids.rotate_left(idx + 1usize); + worktree_ids.pop(); + } + } + } + + for tree_id in worktree_ids.into_iter() { + if let Some(found) = + self.find_entry_in_worktree(tree_id, reverse_search, false, &predicate, cx) + { + return Some(SelectedEntry { + worktree_id: tree_id, + entry_id: found.id, + }); + } + } + + last_found + } + + fn find_visible_entry( + &self, + start: Option<&SelectedEntry>, + reverse_search: bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, + cx: &mut ViewContext, + ) -> Option { + let mut worktree_ids: Vec<_> = self + .visible_entries + .iter() + .map(|(worktree_id, _, _)| *worktree_id) + .collect(); + + let mut last_found: Option = None; + + if let Some(start) = start { + let entries = self + .visible_entries + .iter() + .find(|(worktree_id, _, _)| *worktree_id == start.worktree_id) + .map(|(_, entries, _)| entries)?; + + let mut start_idx = entries + .iter() + .enumerate() + .find(|(_, ele)| ele.id == start.entry_id) + .map(|(idx, _)| idx)?; + + if reverse_search { + start_idx = start_idx.saturating_add(1usize); + } + + let (left, right) = entries.split_at_checked(start_idx)?; + + let (first_iter, second_iter) = if reverse_search { + ( + utils::ReversibleIterable::new(left.iter(), reverse_search), + utils::ReversibleIterable::new(right.iter(), reverse_search), + ) + } else { + ( + utils::ReversibleIterable::new(right.iter(), reverse_search), + utils::ReversibleIterable::new(left.iter(), reverse_search), + ) + }; + + let first_search = first_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id)); + let second_search = second_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id)); + + if first_search.is_some() { + return first_search.map(|entry| SelectedEntry { + worktree_id: start.worktree_id, + entry_id: entry.id, + }); + } + + last_found = second_search.map(|entry| SelectedEntry { + worktree_id: start.worktree_id, + entry_id: entry.id, + }); + + let idx = worktree_ids + .iter() + .enumerate() + .find(|(_, ele)| **ele == start.worktree_id) + .map(|(idx, _)| idx); + + if let Some(idx) = idx { + worktree_ids.rotate_left(idx + 1usize); + worktree_ids.pop(); + } + } + + for tree_id in worktree_ids.into_iter() { + if let Some(found) = + self.find_entry_in_worktree(tree_id, reverse_search, true, &predicate, cx) + { + return Some(SelectedEntry { + worktree_id: tree_id, + entry_id: found.id, + }); + } + } + + last_found + } + fn calculate_depth_and_difference( entry: &Entry, visible_worktree_entries: &HashSet>, @@ -2580,6 +3225,8 @@ impl ProjectPanel { details: EntryDetails, cx: &mut ViewContext, ) -> Stateful

{ + const GROUP_NAME: &str = "project_entry"; + let kind = details.kind; let settings = ProjectPanelSettings::get_global(cx); let show_editor = details.is_editing && !details.is_processing; @@ -2625,8 +3272,39 @@ impl ProjectPanel { marked_selections: selections, }; + let default_color = if is_marked { + item_colors.marked_active + } else { + item_colors.default + }; + + let bg_hover_color = if self.mouse_down || is_marked { + item_colors.marked_active + } else if !is_active { + item_colors.hover + } else { + item_colors.default + }; + + let border_color = + if !self.mouse_down && is_active && self.focus_handle.contains_focused(cx) { + item_colors.focused + } else if self.mouse_down && is_marked || is_active { + item_colors.marked_active + } else { + item_colors.default + }; + div() .id(entry_id.to_proto() as usize) + .group(GROUP_NAME) + .cursor_pointer() + .rounded_none() + .bg(default_color) + .border_1() + .border_r_2() + .border_color(border_color) + .hover(|style| style.bg(bg_hover_color)) .when(is_local, |div| { div.on_drag_move::(cx.listener( move |this, event: &DragMoveEvent, cx| { @@ -2748,10 +3426,14 @@ impl ProjectPanel { } else if event.down.modifiers.secondary() { if event.down.click_count > 1 { this.split_entry(entry_id, cx); - } else if !this.marked_entries.insert(selection) { - this.marked_entries.remove(&selection); + } else { + this.selection = Some(selection); + if !this.marked_entries.insert(selection) { + this.marked_entries.remove(&selection); + } } } else if kind.is_dir() { + this.marked_entries.clear(); this.toggle_expanded(entry_id, cx); } else { let preview_tabs_enabled = PreviewTabsSettings::get_global(cx).enabled; @@ -2761,12 +3443,17 @@ impl ProjectPanel { this.open_entry(entry_id, focus_opened_item, allow_preview, cx); } })) - .cursor_pointer() .child( ListItem::new(entry_id.to_proto() as usize) .indent_level(depth) .indent_step_size(px(settings.indent_size)) - .selected(is_marked || is_active) + .spacing(match settings.entry_spacing { + project_panel_settings::EntrySpacing::Comfortable => ListItemSpacing::Dense, + project_panel_settings::EntrySpacing::Standard => { + ListItemSpacing::ExtraDense + } + }) + .selectable(false) .when_some(canonical_path, |this, path| { this.end_slot::( div() @@ -2806,13 +3493,11 @@ impl ProjectPanel { } else { IconDecorationKind::Dot }, - if is_marked || is_active { - item_colors.marked_active - } else { - item_colors.default - }, + default_color, cx, ) + .group_name(Some(GROUP_NAME.into())) + .knockout_hover_color(bg_hover_color) .color(decoration_color.color(cx)) .position(Point { x: px(-2.), @@ -2890,7 +3575,8 @@ impl ProjectPanel { .single_line() .color(filename_text_color) .when( - is_active && index == active_index, + index == active_index + && (is_active || is_marked), |this| this.underline(true), ), ); @@ -2915,31 +3601,18 @@ impl ProjectPanel { // Stop propagation to prevent the catch-all context menu for the project // panel from being deployed. cx.stop_propagation(); + // Some context menu actions apply to all marked entries. If the user + // right-clicks on an entry that is not marked, they may not realize the + // action applies to multiple entries. To avoid inadvertent changes, all + // entries are unmarked. + if !this.marked_entries.contains(&selection) { + this.marked_entries.clear(); + } this.deploy_context_menu(event.position, entry_id, cx); }, )) .overflow_x(), ) - .border_1() - .border_r_2() - .rounded_none() - .hover(|style| { - if is_active { - style - } else { - style.bg(item_colors.hover).border_color(item_colors.hover) - } - }) - .when(is_marked || is_active, |this| { - this.when(is_marked, |this| { - this.bg(item_colors.marked_active) - .border_color(item_colors.marked_active) - }) - }) - .when( - !self.mouse_down && is_active && self.focus_handle.contains_focused(cx), - |this| this.border_color(Color::Selected.color(cx)), - ) } fn render_vertical_scrollbar(&self, cx: &mut ViewContext) -> Option> { @@ -3121,7 +3794,7 @@ impl ProjectPanel { project: Model, entry_id: ProjectEntryId, skip_ignored: bool, - cx: &mut ViewContext<'_, Self>, + cx: &mut ViewContext, ) { if let Some(worktree) = project.read(cx).worktree_for_entry(entry_id, cx) { let worktree = worktree.read(cx); @@ -3226,7 +3899,7 @@ fn item_width_estimate(depth: usize, item_text_chars: usize, is_symlink: bool) - } impl Render for ProjectPanel { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let has_worktree = !self.visible_entries.is_empty(); let project = self.project.read(cx); let indent_size = ProjectPanelSettings::get_global(cx).indent_size; @@ -3313,12 +3986,23 @@ impl Render for ProjectPanel { this.hide_scrollbar(cx); } })) + .on_click(cx.listener(|this, _event, cx| { + cx.stop_propagation(); + this.selection = None; + this.marked_entries.clear(); + })) .key_context(self.dispatch_context(cx)) .on_action(cx.listener(Self::select_next)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_last)) .on_action(cx.listener(Self::select_parent)) + .on_action(cx.listener(Self::select_next_git_entry)) + .on_action(cx.listener(Self::select_prev_git_entry)) + .on_action(cx.listener(Self::select_next_diagnostic)) + .on_action(cx.listener(Self::select_prev_diagnostic)) + .on_action(cx.listener(Self::select_next_directory)) + .on_action(cx.listener(Self::select_prev_directory)) .on_action(cx.listener(Self::expand_selected_entry)) .on_action(cx.listener(Self::collapse_selected_entry)) .on_action(cx.listener(Self::collapse_all_entries)) @@ -3505,7 +4189,7 @@ impl Render for ProjectPanel { deferred( anchored() .position(*position) - .anchor(gpui::AnchorCorner::TopLeft) + .anchor(gpui::Corner::TopLeft) .child(menu.clone()), ) .with_priority(1) @@ -3657,6 +4341,10 @@ impl Panel for ProjectPanel { .map_or(false, |entry| entry.is_dir()) }) } + + fn activation_priority(&self) -> u32 { + 0 + } } impl FocusableView for ProjectPanel { @@ -3687,7 +4375,6 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::path::{Path, PathBuf}; - use ui::Context; use workspace::{ item::{Item, ProjectItem}, register_project_item, AppState, @@ -4685,11 +5372,22 @@ mod tests { // "v root1", " one.txt", - " one copy.txt <== selected", + " [EDITOR: 'one copy.txt'] <== selected", " one.two.txt", ] ); + panel.update(cx, |panel, cx| { + panel.filename_editor.update(cx, |editor, cx| { + let file_name_selections = editor.selections.all::(cx); + assert_eq!(file_name_selections.len(), 1, "File editing should have a single selection, but got: {file_name_selections:?}"); + let file_name_selection = &file_name_selections[0]; + assert_eq!(file_name_selection.start, "one".len(), "Should select the file name disambiguation after the original file name"); + assert_eq!(file_name_selection.end, "one copy".len(), "Should select the file name disambiguation until the extension"); + }); + assert!(panel.confirm_edit(cx).is_none()); + }); + panel.update(cx, |panel, cx| { panel.paste(&Default::default(), cx); }); @@ -4702,10 +5400,12 @@ mod tests { "v root1", " one.txt", " one copy.txt", - " one copy 1.txt <== selected", + " [EDITOR: 'one copy 1.txt'] <== selected", " one.two.txt", ] ); + + panel.update(cx, |panel, cx| assert!(panel.confirm_edit(cx).is_none())); } #[gpui::test] @@ -4897,11 +5597,14 @@ mod tests { " four.txt", " one.txt", " three.txt", - " three copy.txt <== selected", + " [EDITOR: 'three copy.txt'] <== selected", " two.txt", ] ); + panel.update(cx, |panel, cx| panel.cancel(&menu::Cancel {}, cx)); + cx.executor().run_until_parked(); + select_path(&panel, "root1/a", cx); panel.update(cx, |panel, cx| { panel.copy(&Default::default(), cx); @@ -5005,19 +5708,13 @@ mod tests { select_path(&panel, "root", cx); panel.update(cx, |panel, cx| panel.paste(&Default::default(), cx)); cx.executor().run_until_parked(); - panel.update(cx, |panel, cx| panel.paste(&Default::default(), cx)); - cx.executor().run_until_parked(); assert_eq!( visible_entries_as_strings(&panel, 0..50, cx), &[ // "v root", " > a", - " v a copy", - " > a <== selected", - " > inner_dir", - " one.txt", - " two.txt", + " > [EDITOR: 'a copy'] <== selected", " v b", " v a", " v inner_dir", @@ -5027,6 +5724,238 @@ mod tests { " two.txt" ] ); + + let confirm = panel.update(cx, |panel, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("c", cx)); + panel.confirm_edit(cx).unwrap() + }); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + // + "v root", + " > a", + " > [PROCESSING: 'c'] <== selected", + " v b", + " v a", + " v inner_dir", + " four.txt", + " three.txt", + " one.txt", + " two.txt" + ] + ); + + confirm.await.unwrap(); + + panel.update(cx, |panel, cx| panel.paste(&Default::default(), cx)); + cx.executor().run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + // + "v root", + " > a", + " v b", + " v a", + " v inner_dir", + " four.txt", + " three.txt", + " one.txt", + " two.txt", + " v c", + " > a <== selected", + " > inner_dir", + " one.txt", + " two.txt", + ] + ); + } + + #[gpui::test] + async fn test_copy_paste_directory_with_sibling_file(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/test", + json!({ + "dir1": { + "a.txt": "", + "b.txt": "", + }, + "dir2": {}, + "c.txt": "", + "d.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "test/dir1", cx); + + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + + select_path_with_mark(&panel, "test/dir1", cx); + select_path_with_mark(&panel, "test/c.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt", + " b.txt", + " > dir2", + " c.txt <== selected <== marked", + " d.txt", + ], + "Initial state before copying dir1 and c.txt" + ); + + panel.update(cx, |panel, cx| { + panel.copy(&Default::default(), cx); + }); + select_path(&panel, "test/dir2", cx); + panel.update(cx, |panel, cx| { + panel.paste(&Default::default(), cx); + }); + cx.executor().run_until_parked(); + + toggle_expand_dir(&panel, "test/dir2/dir1", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt", + " b.txt", + " v dir2", + " v dir1 <== selected", + " a.txt", + " b.txt", + " c.txt", + " c.txt <== marked", + " d.txt", + ], + "Should copy dir1 as well as c.txt into dir2" + ); + + // Disambiguating multiple files should not open the rename editor. + select_path(&panel, "test/dir2", cx); + panel.update(cx, |panel, cx| { + panel.paste(&Default::default(), cx); + }); + cx.executor().run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt", + " b.txt", + " v dir2", + " v dir1", + " a.txt", + " b.txt", + " > dir1 copy <== selected", + " c.txt", + " c copy.txt", + " c.txt <== marked", + " d.txt", + ], + "Should copy dir1 as well as c.txt into dir2 and disambiguate them without opening the rename editor" + ); + } + + #[gpui::test] + async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/test", + json!({ + "dir1": { + "a.txt": "", + "b.txt": "", + }, + "dir2": {}, + "c.txt": "", + "d.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "test/dir1", cx); + + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + + select_path_with_mark(&panel, "test/dir1/a.txt", cx); + select_path_with_mark(&panel, "test/dir1", cx); + select_path_with_mark(&panel, "test/c.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt <== marked", + " b.txt", + " > dir2", + " c.txt <== selected <== marked", + " d.txt", + ], + "Initial state before copying a.txt, dir1 and c.txt" + ); + + panel.update(cx, |panel, cx| { + panel.copy(&Default::default(), cx); + }); + select_path(&panel, "test/dir2", cx); + panel.update(cx, |panel, cx| { + panel.paste(&Default::default(), cx); + }); + cx.executor().run_until_parked(); + + toggle_expand_dir(&panel, "test/dir2/dir1", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt <== marked", + " b.txt", + " v dir2", + " v dir1 <== selected", + " a.txt", + " b.txt", + " c.txt", + " c.txt <== marked", + " d.txt", + ], + "Should copy dir1 and c.txt into dir2. a.txt is already present in copied dir1." + ); } #[gpui::test] @@ -5073,14 +6002,13 @@ mod tests { &[ "v src", " v test", - " second.rs", + " second.rs <== selected", " third.rs" ], "Project panel should have no deleted file, no other file is selected in it" ); ensure_no_open_items_and_panes(&workspace, cx); - select_path(&panel, "src/test/second.rs", cx); panel.update(cx, |panel, cx| panel.open(&Open, cx)); cx.executor().run_until_parked(); assert_eq!( @@ -5114,7 +6042,7 @@ mod tests { submit_deletion_skipping_prompt(&panel, cx); assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), - &["v src", " v test", " third.rs"], + &["v src", " v test", " third.rs <== selected"], "Project panel should have no deleted file, with one last file remaining" ); ensure_no_open_items_and_panes(&workspace, cx); @@ -5287,6 +6215,107 @@ mod tests { ); } + #[gpui::test] + async fn test_select_directory(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/project_root", + json!({ + "dir_1": { + "nested_dir": { + "file_a.py": "# File contents", + } + }, + "file_1.py": "# File contents", + "dir_2": { + + }, + "dir_3": { + + }, + "file_2.py": "# File contents", + "dir_4": { + + }, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + panel.update(cx, |panel, cx| panel.open(&Open, cx)); + cx.executor().run_until_parked(); + select_path(&panel, "project_root/dir_1", cx); + cx.executor().run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..10, cx), + &[ + "v project_root", + " > dir_1 <== selected", + " > dir_2", + " > dir_3", + " > dir_4", + " file_1.py", + " file_2.py", + ] + ); + panel.update(cx, |panel, cx| { + panel.select_prev_directory(&SelectPrevDirectory, cx) + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..10, cx), + &[ + "v project_root <== selected", + " > dir_1", + " > dir_2", + " > dir_3", + " > dir_4", + " file_1.py", + " file_2.py", + ] + ); + + panel.update(cx, |panel, cx| { + panel.select_prev_directory(&SelectPrevDirectory, cx) + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..10, cx), + &[ + "v project_root", + " > dir_1", + " > dir_2", + " > dir_3", + " > dir_4 <== selected", + " file_1.py", + " file_2.py", + ] + ); + + panel.update(cx, |panel, cx| { + panel.select_next_directory(&SelectNextDirectory, cx) + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..10, cx), + &[ + "v project_root <== selected", + " > dir_1", + " > dir_2", + " > dir_3", + " > dir_4", + " file_1.py", + " file_2.py", + ] + ); + } + #[gpui::test] async fn test_dir_toggle_collapse(cx: &mut gpui::TestAppContext) { init_test_with_editor(cx); @@ -5623,7 +6652,11 @@ mod tests { submit_deletion(&panel, cx); assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), - &["v project_root", " v dir_1", " v nested_dir",] + &[ + "v project_root", + " v dir_1", + " v nested_dir <== selected", + ] ); } #[gpui::test] @@ -6320,6 +7353,698 @@ mod tests { ); } + #[gpui::test] + async fn test_basic_file_deletion_scenarios(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "subdir1": {}, + "file1.txt": "", + "file2.txt": "", + }, + "dir2": { + "subdir2": {}, + "file3.txt": "", + "file4.txt": "", + }, + "file5.txt": "", + "file6.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "root/dir1", cx); + toggle_expand_dir(&panel, "root/dir2", cx); + + // Test Case 1: Delete middle file in directory + select_path(&panel, "root/dir1/file1.txt", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " > subdir1", + " file1.txt <== selected", + " file2.txt", + " v dir2", + " > subdir2", + " file3.txt", + " file4.txt", + " file5.txt", + " file6.txt", + ], + "Initial state before deleting middle file" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " > subdir1", + " file2.txt <== selected", + " v dir2", + " > subdir2", + " file3.txt", + " file4.txt", + " file5.txt", + " file6.txt", + ], + "Should select next file after deleting middle file" + ); + + // Test Case 2: Delete last file in directory + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " > subdir1 <== selected", + " v dir2", + " > subdir2", + " file3.txt", + " file4.txt", + " file5.txt", + " file6.txt", + ], + "Should select next directory when last file is deleted" + ); + + // Test Case 3: Delete root level file + select_path(&panel, "root/file6.txt", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " > subdir1", + " v dir2", + " > subdir2", + " file3.txt", + " file4.txt", + " file5.txt", + " file6.txt <== selected", + ], + "Initial state before deleting root level file" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " > subdir1", + " v dir2", + " > subdir2", + " file3.txt", + " file4.txt", + " file5.txt <== selected", + ], + "Should select prev entry at root level" + ); + } + + #[gpui::test] + async fn test_complex_selection_scenarios(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "subdir1": { + "a.txt": "", + "b.txt": "" + }, + "file1.txt": "", + }, + "dir2": { + "subdir2": { + "c.txt": "", + "d.txt": "" + }, + "file2.txt": "", + }, + "file3.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "root/dir1", cx); + toggle_expand_dir(&panel, "root/dir1/subdir1", cx); + toggle_expand_dir(&panel, "root/dir2", cx); + toggle_expand_dir(&panel, "root/dir2/subdir2", cx); + + // Test Case 1: Select and delete nested directory with parent + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + select_path_with_mark(&panel, "root/dir1/subdir1", cx); + select_path_with_mark(&panel, "root/dir1", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1 <== selected <== marked", + " v subdir1 <== marked", + " a.txt", + " b.txt", + " file1.txt", + " v dir2", + " v subdir2", + " c.txt", + " d.txt", + " file2.txt", + " file3.txt", + ], + "Initial state before deleting nested directory with parent" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir2 <== selected", + " v subdir2", + " c.txt", + " d.txt", + " file2.txt", + " file3.txt", + ], + "Should select next directory after deleting directory with parent" + ); + + // Test Case 2: Select mixed files and directories across levels + select_path_with_mark(&panel, "root/dir2/subdir2/c.txt", cx); + select_path_with_mark(&panel, "root/dir2/file2.txt", cx); + select_path_with_mark(&panel, "root/file3.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir2", + " v subdir2", + " c.txt <== marked", + " d.txt", + " file2.txt <== marked", + " file3.txt <== selected <== marked", + ], + "Initial state before deleting" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir2 <== selected", + " v subdir2", + " d.txt", + ], + "Should select sibling directory" + ); + } + + #[gpui::test] + async fn test_delete_all_files_and_directories(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "subdir1": { + "a.txt": "", + "b.txt": "" + }, + "file1.txt": "", + }, + "dir2": { + "subdir2": { + "c.txt": "", + "d.txt": "" + }, + "file2.txt": "", + }, + "file3.txt": "", + "file4.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "root/dir1", cx); + toggle_expand_dir(&panel, "root/dir1/subdir1", cx); + toggle_expand_dir(&panel, "root/dir2", cx); + toggle_expand_dir(&panel, "root/dir2/subdir2", cx); + + // Test Case 1: Select all root files and directories + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + select_path_with_mark(&panel, "root/dir1", cx); + select_path_with_mark(&panel, "root/dir2", cx); + select_path_with_mark(&panel, "root/file3.txt", cx); + select_path_with_mark(&panel, "root/file4.txt", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root", + " v dir1 <== marked", + " v subdir1", + " a.txt", + " b.txt", + " file1.txt", + " v dir2 <== marked", + " v subdir2", + " c.txt", + " d.txt", + " file2.txt", + " file3.txt <== marked", + " file4.txt <== selected <== marked", + ], + "State before deleting all contents" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &["v root <== selected"], + "Only empty root directory should remain after deleting all contents" + ); + } + + #[gpui::test] + async fn test_nested_selection_deletion(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "subdir1": { + "file_a.txt": "content a", + "file_b.txt": "content b", + }, + "subdir2": { + "file_c.txt": "content c", + }, + "file1.txt": "content 1", + }, + "dir2": { + "file2.txt": "content 2", + }, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "root/dir1", cx); + toggle_expand_dir(&panel, "root/dir1/subdir1", cx); + toggle_expand_dir(&panel, "root/dir2", cx); + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + + // Test Case 1: Select parent directory, subdirectory, and a file inside the subdirectory + select_path_with_mark(&panel, "root/dir1", cx); + select_path_with_mark(&panel, "root/dir1/subdir1", cx); + select_path_with_mark(&panel, "root/dir1/subdir1/file_a.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root", + " v dir1 <== marked", + " v subdir1 <== marked", + " file_a.txt <== selected <== marked", + " file_b.txt", + " > subdir2", + " file1.txt", + " v dir2", + " file2.txt", + ], + "State with parent dir, subdir, and file selected" + ); + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &["v root", " v dir2 <== selected", " file2.txt",], + "Only dir2 should remain after deletion" + ); + } + + #[gpui::test] + async fn test_multiple_worktrees_deletion(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + // First worktree + fs.insert_tree( + "/root1", + json!({ + "dir1": { + "file1.txt": "content 1", + "file2.txt": "content 2", + }, + "dir2": { + "file3.txt": "content 3", + }, + }), + ) + .await; + + // Second worktree + fs.insert_tree( + "/root2", + json!({ + "dir3": { + "file4.txt": "content 4", + "file5.txt": "content 5", + }, + "file6.txt": "content 6", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + // Expand all directories for testing + toggle_expand_dir(&panel, "root1/dir1", cx); + toggle_expand_dir(&panel, "root1/dir2", cx); + toggle_expand_dir(&panel, "root2/dir3", cx); + + // Test Case 1: Delete files across different worktrees + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + select_path_with_mark(&panel, "root1/dir1/file1.txt", cx); + select_path_with_mark(&panel, "root2/dir3/file4.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root1", + " v dir1", + " file1.txt <== marked", + " file2.txt", + " v dir2", + " file3.txt", + "v root2", + " v dir3", + " file4.txt <== selected <== marked", + " file5.txt", + " file6.txt", + ], + "Initial state with files selected from different worktrees" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root1", + " v dir1", + " file2.txt", + " v dir2", + " file3.txt", + "v root2", + " v dir3", + " file5.txt <== selected", + " file6.txt", + ], + "Should select next file in the last worktree after deletion" + ); + + // Test Case 2: Delete directories from different worktrees + select_path_with_mark(&panel, "root1/dir1", cx); + select_path_with_mark(&panel, "root2/dir3", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root1", + " v dir1 <== marked", + " file2.txt", + " v dir2", + " file3.txt", + "v root2", + " v dir3 <== selected <== marked", + " file5.txt", + " file6.txt", + ], + "State with directories marked from different worktrees" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root1", + " v dir2", + " file3.txt", + "v root2", + " file6.txt <== selected", + ], + "Should select remaining file in last worktree after directory deletion" + ); + + // Test Case 4: Delete all remaining files except roots + select_path_with_mark(&panel, "root1/dir2/file3.txt", cx); + select_path_with_mark(&panel, "root2/file6.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root1", + " v dir2", + " file3.txt <== marked", + "v root2", + " file6.txt <== selected <== marked", + ], + "State with all remaining files marked" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &["v root1", " v dir2", "v root2 <== selected"], + "Second parent root should be selected after deleting" + ); + } + + #[gpui::test] + async fn test_selection_vs_marked_entries_priority(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "file1.txt": "", + "file2.txt": "", + "file3.txt": "", + }, + "dir2": { + "file4.txt": "", + "file5.txt": "", + }, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "root/dir1", cx); + toggle_expand_dir(&panel, "root/dir2", cx); + + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + + select_path_with_mark(&panel, "root/dir1/file2.txt", cx); + select_path(&panel, "root/dir1/file1.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " file1.txt <== selected", + " file2.txt <== marked", + " file3.txt", + " v dir2", + " file4.txt", + " file5.txt", + ], + "Initial state with one marked entry and different selection" + ); + + // Delete should operate on the selected entry (file1.txt) + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " file2.txt <== selected <== marked", + " file3.txt", + " v dir2", + " file4.txt", + " file5.txt", + ], + "Should delete selected file, not marked file" + ); + + select_path_with_mark(&panel, "root/dir1/file3.txt", cx); + select_path_with_mark(&panel, "root/dir2/file4.txt", cx); + select_path(&panel, "root/dir2/file5.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " file2.txt <== marked", + " file3.txt <== marked", + " v dir2", + " file4.txt <== marked", + " file5.txt <== selected", + ], + "Initial state with multiple marked entries and different selection" + ); + + // Delete should operate on all marked entries, ignoring the selection + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v root", + " v dir1", + " v dir2", + " file5.txt <== selected", + ], + "Should delete all marked files, leaving only the selected file" + ); + } + + #[gpui::test] + async fn test_selection_fallback_to_next_highest_worktree(cx: &mut gpui::TestAppContext) { + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root_b", + json!({ + "dir1": { + "file1.txt": "content 1", + "file2.txt": "content 2", + }, + }), + ) + .await; + + fs.insert_tree( + "/root_c", + json!({ + "dir2": {}, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root_b".as_ref(), "/root_c".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + toggle_expand_dir(&panel, "root_b/dir1", cx); + toggle_expand_dir(&panel, "root_c/dir2", cx); + + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + select_path_with_mark(&panel, "root_b/dir1/file1.txt", cx); + select_path_with_mark(&panel, "root_b/dir1/file2.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root_b", + " v dir1", + " file1.txt <== marked", + " file2.txt <== selected <== marked", + "v root_c", + " v dir2", + ], + "Initial state with files marked in root_b" + ); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v root_b", + " v dir1 <== selected", + "v root_c", + " v dir2", + ], + "After deletion in root_b as it's last deletion, selection should be in root_b" + ); + + select_path_with_mark(&panel, "root_c/dir2", cx); + + submit_deletion(&panel, cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &["v root_b", " v dir1", "v root_c <== selected",], + "After deleting from root_c, it should remain in root_c" + ); + } + fn toggle_expand_dir( panel: &View, path: impl AsRef, @@ -6357,6 +8082,32 @@ mod tests { }); } + fn select_path_with_mark( + panel: &View, + path: impl AsRef, + cx: &mut VisualTestContext, + ) { + let path = path.as_ref(); + panel.update(cx, |panel, cx| { + for worktree in panel.project.read(cx).worktrees(cx).collect::>() { + let worktree = worktree.read(cx); + if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) { + let entry_id = worktree.entry_for_path(relative_path).unwrap().id; + let entry = crate::SelectedEntry { + worktree_id: worktree.id(), + entry_id, + }; + if !panel.marked_entries.contains(&entry) { + panel.marked_entries.insert(entry); + } + panel.selection = Some(entry); + return; + } + } + panic!("no worktree for path {:?}", path); + }); + } + fn find_project_entry( panel: &View, path: impl AsRef, @@ -6570,7 +8321,7 @@ mod tests { path: ProjectPath, } - impl project::Item for TestProjectItem { + impl project::ProjectItem for TestProjectItem { fn try_open( _project: &Model, path: &ProjectPath, @@ -6587,6 +8338,10 @@ mod tests { fn project_path(&self, _: &AppContext) -> Option { Some(self.path.clone()) } + + fn is_dirty(&self) -> bool { + false + } } impl ProjectItem for TestProjectItemView { diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 92ea7cea2d..bf16a9688f 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -18,11 +18,22 @@ pub enum ShowIndentGuides { Never, } +#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum EntrySpacing { + /// Comfortable spacing of entries. + #[default] + Comfortable, + /// The standard spacing of entries. + Standard, +} + #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct ProjectPanelSettings { pub button: bool, pub default_width: Pixels, pub dock: ProjectPanelDockPosition, + pub entry_spacing: EntrySpacing, pub file_icons: bool, pub folder_icons: bool, pub git_status: bool, @@ -90,6 +101,10 @@ pub struct ProjectPanelSettingsContent { /// /// Default: left pub dock: Option, + /// Spacing between worktree entries in the project panel. + /// + /// Default: comfortable + pub entry_spacing: Option, /// Whether to show file icons in the project panel. /// /// Default: true diff --git a/crates/project_panel/src/utils.rs b/crates/project_panel/src/utils.rs new file mode 100644 index 0000000000..486def9b84 --- /dev/null +++ b/crates/project_panel/src/utils.rs @@ -0,0 +1,42 @@ +pub(crate) struct ReversibleIterable { + pub(crate) it: It, + pub(crate) reverse: bool, +} + +impl ReversibleIterable { + pub(crate) fn new(it: T, reverse: bool) -> Self { + Self { it, reverse } + } +} + +impl ReversibleIterable +where + It: Iterator, +{ + pub(crate) fn find_single_ended(mut self, pred: F) -> Option + where + F: FnMut(&Item) -> bool, + { + if self.reverse { + self.it.filter(pred).last() + } else { + self.it.find(pred) + } + } +} + +impl ReversibleIterable +where + It: DoubleEndedIterator, +{ + pub(crate) fn find(mut self, mut pred: F) -> Option + where + F: FnMut(&Item) -> bool, + { + if self.reverse { + self.it.rfind(|x| pred(x)) + } else { + self.it.find(|x| pred(x)) + } + } +} diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 80cf90bf9e..a127f98392 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -11,7 +11,7 @@ use std::{borrow::Cow, cmp::Reverse, sync::Arc}; use theme::ActiveTheme; use util::ResultExt; use workspace::{ - ui::{v_flex, Color, Label, LabelCommon, LabelLike, ListItem, ListItemSpacing, Selectable}, + ui::{v_flex, Color, Label, LabelCommon, LabelLike, ListItem, ListItemSpacing, Toggleable}, Workspace, }; @@ -78,10 +78,7 @@ impl ProjectSymbolsDelegate { )); let sort_key_for_match = |mat: &StringMatch| { let symbol = &self.symbols[mat.candidate_id]; - ( - Reverse(OrderedFloat(mat.score)), - &symbol.label.text[symbol.label.filter_range.clone()], - ) + (Reverse(OrderedFloat(mat.score)), symbol.label.filter_text()) }; visible_matches.sort_unstable_by_key(sort_key_for_match); @@ -177,10 +174,7 @@ impl PickerDelegate for ProjectSymbolsDelegate { .iter() .enumerate() .map(|(id, symbol)| { - StringMatchCandidate::new( - id, - symbol.label.text[symbol.label.filter_range.clone()].to_string(), - ) + StringMatchCandidate::new(id, &symbol.label.filter_text()) }) .partition(|candidate| { project @@ -240,7 +234,7 @@ impl PickerDelegate for ProjectSymbolsDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child( v_flex() .child( @@ -292,7 +286,7 @@ mod tests { let _buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/dir/test.rs", cx) + project.open_local_buffer_with_lsp("/dir/test.rs", cx) }) .await .unwrap(); @@ -313,7 +307,7 @@ mod tests { let candidates = fake_symbols .iter() .enumerate() - .map(|(id, symbol)| StringMatchCandidate::new(id, symbol.name.clone())) + .map(|(id, symbol)| StringMatchCandidate::new(id, &symbol.name)) .collect::>(); let matches = if params.query.is_empty() { Vec::new() diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index dcd62751a7..3f426f0214 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -295,9 +295,18 @@ message Envelope { GetPanicFilesResponse get_panic_files_response = 281; CancelLanguageServerWork cancel_language_server_work = 282; - + LspExtOpenDocs lsp_ext_open_docs = 283; - LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; // current max + LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; + + SyncExtensions sync_extensions = 285; + SyncExtensionsResponse sync_extensions_response = 286; + InstallExtension install_extension = 287; + + GetStagedText get_staged_text = 288; + GetStagedTextResponse get_staged_text_response = 289; + + RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; } reserved 87 to 88; @@ -425,7 +434,7 @@ message Room { repeated Participant participants = 2; repeated PendingParticipant pending_participants = 3; repeated Follower followers = 4; - string live_kit_room = 5; + string livekit_room = 5; } message Participant { @@ -918,6 +927,7 @@ message Completion { string new_text = 3; uint64 server_id = 4; bytes lsp_completion = 5; + bool resolved = 6; } message GetCodeActions { @@ -1758,7 +1768,7 @@ message Entry { bool is_ignored = 7; bool is_external = 8; reserved 6; - optional GitStatus git_status = 9; + reserved 9; bool is_fifo = 10; optional uint64 size = 11; optional string canonical_path = 12; @@ -1767,6 +1777,8 @@ message Entry { message RepositoryEntry { uint64 work_directory_id = 1; optional string branch = 2; + repeated StatusEntry updated_statuses = 3; + repeated string removed_statuses = 4; } message StatusEntry { @@ -1778,17 +1790,19 @@ enum GitStatus { Added = 0; Modified = 1; Conflict = 2; + Deleted = 3; } message BufferState { uint64 id = 1; optional File file = 2; string base_text = 3; - optional string diff_base = 4; LineEnding line_ending = 5; repeated VectorClockEntry saved_version = 6; - reserved 7; Timestamp saved_mtime = 8; + + reserved 7; + reserved 4; } message BufferChunk { @@ -1979,7 +1993,16 @@ message WorktreeMetadata { message UpdateDiffBase { uint64 project_id = 1; uint64 buffer_id = 2; - optional string diff_base = 3; + optional string staged_text = 3; +} + +message GetStagedText { + uint64 project_id = 1; + uint64 buffer_id = 2; +} + +message GetStagedTextResponse { + optional string staged_text = 1; } message GetNotifications { @@ -2469,6 +2492,7 @@ message ListToolchains { message Toolchain { string name = 1; string path = 2; + string raw_json = 3; } message ToolchainGroup { @@ -2519,7 +2543,6 @@ message UpdateGitBranch { string branch_name = 2; ProjectPath repository = 3; } - message GetPanicFiles { } @@ -2544,3 +2567,28 @@ message CancelLanguageServerWork { optional string token = 2; } } + +message Extension { + string id = 1; + string version = 2; + bool dev = 3; +} + +message SyncExtensions { + repeated Extension extensions = 1; +} + +message SyncExtensionsResponse { + string tmp_dir = 1; + repeated Extension missing_extensions = 2; +} + +message InstallExtension { + Extension extension = 1; + string tmp_dir = 2; +} + +message RegisterBufferWithLanguageServers{ + uint64 project_id = 1; + uint64 buffer_id = 2; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 2ec9f8bf55..2210bd5973 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -216,6 +216,8 @@ messages!( (GetImplementationResponse, Background), (GetLlmToken, Background), (GetLlmTokenResponse, Background), + (GetStagedText, Foreground), + (GetStagedTextResponse, Foreground), (GetUsers, Foreground), (Hello, Foreground), (IncomingCall, Foreground), @@ -368,6 +370,10 @@ messages!( (GetPanicFiles, Background), (GetPanicFilesResponse, Background), (CancelLanguageServerWork, Foreground), + (SyncExtensions, Background), + (SyncExtensionsResponse, Background), + (InstallExtension, Background), + (RegisterBufferWithLanguageServers, Background), ); request_messages!( @@ -408,6 +414,7 @@ request_messages!( (GetProjectSymbols, GetProjectSymbolsResponse), (GetReferences, GetReferencesResponse), (GetSignatureHelp, GetSignatureHelpResponse), + (GetStagedText, GetStagedTextResponse), (GetSupermavenApiKey, GetSupermavenApiKeyResponse), (GetTypeDefinition, GetTypeDefinitionResponse), (LinkedEditingRange, LinkedEditingRangeResponse), @@ -491,6 +498,9 @@ request_messages!( (GetPathMetadata, GetPathMetadataResponse), (GetPanicFiles, GetPanicFilesResponse), (CancelLanguageServerWork, Ack), + (SyncExtensions, SyncExtensionsResponse), + (InstallExtension, Ack), + (RegisterBufferWithLanguageServers, Ack), ); entity_messages!( @@ -520,6 +530,7 @@ entity_messages!( GetProjectSymbols, GetReferences, GetSignatureHelp, + GetStagedText, GetTypeDefinition, InlayHints, JoinProject, @@ -575,6 +586,7 @@ entity_messages!( ActiveToolchain, GetPathMetadata, CancelLanguageServerWork, + RegisterBufferWithLanguageServers, ); entity_messages!( @@ -668,7 +680,7 @@ pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator bool { - false -} - -gpui::impl_actions!(projects, [OpenRecent]); -gpui::actions!(projects, [OpenRemote]); +use zed_actions::{OpenRecent, OpenRemote}; pub fn init(cx: &mut AppContext) { SshSettings::register(cx); @@ -224,24 +210,14 @@ impl PickerDelegate for RecentProjectsDelegate { .enumerate() .filter(|(_, (id, _))| !self.is_current_workspace(*id, cx)) .map(|(id, (_, location))| { - let combined_string = match location { - SerializedWorkspaceLocation::Local(paths, order) => order - .order() - .iter() - .zip(paths.paths().iter()) - .sorted_by_key(|(i, _)| *i) - .map(|(_, path)| path.compact().to_string_lossy().into_owned()) - .collect::>() - .join(""), - SerializedWorkspaceLocation::Ssh(ssh_project) => ssh_project - .ssh_urls() - .iter() - .map(|path| path.to_string_lossy().to_string()) - .collect::>() - .join(""), - }; + let combined_string = location + .sorted_paths() + .iter() + .map(|path| path.compact().to_string_lossy().into_owned()) + .collect::>() + .join(""); - StringMatchCandidate::new(id, combined_string) + StringMatchCandidate::new(id, &combined_string) }) .collect::>(); self.matches = smol::block_on(fuzzy::match_strings( @@ -377,21 +353,11 @@ impl PickerDelegate for RecentProjectsDelegate { let (_, location) = self.workspaces.get(hit.candidate_id)?; let mut path_start_offset = 0; - let paths = match location { - SerializedWorkspaceLocation::Local(paths, order) => Arc::new( - order - .order() - .iter() - .zip(paths.paths().iter()) - .sorted_by_key(|(i, _)| **i) - .map(|(_, path)| path.compact()) - .collect(), - ), - SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), - }; - let (match_labels, paths): (Vec<_>, Vec<_>) = paths + let (match_labels, paths): (Vec<_>, Vec<_>) = location + .sorted_paths() .iter() + .map(|p| p.compact()) .map(|path| { let highlighted_text = highlights_for_path(path.as_ref(), &hit.positions, path_start_offset); @@ -408,7 +374,7 @@ impl PickerDelegate for RecentProjectsDelegate { Some( ListItem::new(ix) - .selected(selected) + .toggle_state(selected) .inset(true) .spacing(ListItemSpacing::Sparse) .child( diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 49d870f56b..b9b0083b93 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -61,7 +61,7 @@ struct CreateRemoteServer { } impl CreateRemoteServer { - fn new(cx: &mut WindowContext<'_>) -> Self { + fn new(cx: &mut WindowContext) -> Self { let address_editor = cx.new_view(Editor::single_line); address_editor.update(cx, |this, cx| { this.focus_handle(cx).focus(cx); @@ -88,7 +88,7 @@ struct EditNicknameState { } impl EditNicknameState { - fn new(index: usize, cx: &mut WindowContext<'_>) -> Self { + fn new(index: usize, cx: &mut WindowContext) -> Self { let this = Self { index, editor: cx.new_view(Editor::single_line), @@ -264,7 +264,7 @@ struct DefaultState { servers: Vec, } impl DefaultState { - fn new(cx: &WindowContext<'_>) -> Self { + fn new(cx: &WindowContext) -> Self { let handle = ScrollHandle::new(); let scrollbar = ScrollbarState::new(handle.clone()); let add_new_server = NavigableEntry::new(&handle, cx); @@ -309,7 +309,7 @@ enum Mode { } impl Mode { - fn default_mode(cx: &WindowContext<'_>) -> Self { + fn default_mode(cx: &WindowContext) -> Self { Self::Default(DefaultState::new(cx)) } } @@ -653,7 +653,7 @@ impl RemoteServerProjects { })) .child( ListItem::new(("new-remote-project", ix)) - .selected( + .toggle_state( ssh_server.open_folder.focus_handle.contains_focused(cx), ) .inset(true) @@ -688,7 +688,7 @@ impl RemoteServerProjects { })) .child( ListItem::new(("server-options", ix)) - .selected( + .toggle_state( ssh_server.configure.focus_handle.contains_focused(cx), ) .inset(true) @@ -772,7 +772,7 @@ impl RemoteServerProjects { })) .child( ListItem::new((element_id_base, ix)) - .selected(navigation.focus_handle.contains_focused(cx)) + .toggle_state(navigation.focus_handle.contains_focused(cx)) .inset(true) .spacing(ui::ListItemSpacing::Sparse) .start_slot( @@ -984,7 +984,7 @@ impl RemoteServerProjects { })) .child( ListItem::new("add-nickname") - .selected(entries[0].focus_handle.contains_focused(cx)) + .toggle_state(entries[0].focus_handle.contains_focused(cx)) .inset(true) .spacing(ui::ListItemSpacing::Sparse) .start_slot(Icon::new(IconName::Pencil).color(Color::Muted)) @@ -1003,7 +1003,7 @@ impl RemoteServerProjects { fn callback( workspace: WeakView, connection_string: SharedString, - cx: &mut WindowContext<'_>, + cx: &mut WindowContext, ) { cx.write_to_clipboard(ClipboardItem::new_string( connection_string.to_string(), @@ -1043,7 +1043,7 @@ impl RemoteServerProjects { }) .child( ListItem::new("copy-server-address") - .selected(entries[1].focus_handle.contains_focused(cx)) + .toggle_state(entries[1].focus_handle.contains_focused(cx)) .inset(true) .spacing(ui::ListItemSpacing::Sparse) .start_slot(Icon::new(IconName::Copy).color(Color::Muted)) @@ -1069,7 +1069,7 @@ impl RemoteServerProjects { remote_servers: View, index: usize, connection_string: SharedString, - cx: &mut WindowContext<'_>, + cx: &mut WindowContext, ) { let prompt_message = format!("Remove server `{}`?", connection_string); @@ -1116,7 +1116,7 @@ impl RemoteServerProjects { })) .child( ListItem::new("remove-server") - .selected(entries[2].focus_handle.contains_focused(cx)) + .toggle_state(entries[2].focus_handle.contains_focused(cx)) .inset(true) .spacing(ui::ListItemSpacing::Sparse) .start_slot(Icon::new(IconName::Trash).color(Color::Error)) @@ -1144,7 +1144,7 @@ impl RemoteServerProjects { })) .child( ListItem::new("go-back") - .selected(entries[3].focus_handle.contains_focused(cx)) + .toggle_state(entries[3].focus_handle.contains_focused(cx)) .inset(true) .spacing(ui::ListItemSpacing::Sparse) .start_slot( @@ -1233,7 +1233,7 @@ impl RemoteServerProjects { .anchor_scroll(state.add_new_server.scroll_anchor.clone()) .child( ListItem::new("register-remove-server-button") - .selected(state.add_new_server.focus_handle.contains_focused(cx)) + .toggle_state(state.add_new_server.focus_handle.contains_focused(cx)) .inset(true) .spacing(ui::ListItemSpacing::Sparse) .start_slot(Icon::new(IconName::Plus).color(Color::Muted)) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index e70b68d374..ad8985d20f 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -4,6 +4,7 @@ use std::{path::PathBuf, sync::Arc, time::Duration}; use anyhow::{anyhow, Result}; use auto_update::AutoUpdater; use editor::Editor; +use extension_host::ExtensionStore; use futures::channel::oneshot; use gpui::{ percentage, Animation, AnimationExt, AnyWindowHandle, AsyncAppContext, DismissEvent, @@ -200,7 +201,7 @@ impl SshPrompt { selection_background_color: cx.theme().players().local().selection, ..Default::default() }; - let markdown = cx.new_view(|cx| Markdown::new_text(prompt, markdown_style, None, cx, None)); + let markdown = cx.new_view(|cx| Markdown::new_text(prompt, markdown_style, None, None, cx)); self.prompt = Some((markdown, tx)); self.status_message.take(); cx.focus_view(&self.editor); @@ -356,7 +357,7 @@ impl RenderOnce for SshConnectionHeader { } impl Render for SshConnectionModal { - fn render(&mut self, cx: &mut ui::ViewContext) -> impl ui::IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl ui::IntoElement { let nickname = self.prompt.read(cx).nickname.clone(); let connection_string = self.prompt.read(cx).connection_string.clone(); @@ -630,6 +631,15 @@ pub async fn open_ssh_project( } } + window + .update(cx, |workspace, cx| { + if let Some(client) = workspace.project().read(cx).ssh_client().clone() { + ExtensionStore::global(cx) + .update(cx, |store, cx| store.register_ssh_client(client, cx)); + } + }) + .ok(); + break; } diff --git a/crates/release_channel/Cargo.toml b/crates/release_channel/Cargo.toml index cf664ceff1..7c12194674 100644 --- a/crates/release_channel/Cargo.toml +++ b/crates/release_channel/Cargo.toml @@ -10,4 +10,3 @@ workspace = true [dependencies] gpui.workspace = true -once_cell.workspace = true diff --git a/crates/release_channel/src/lib.rs b/crates/release_channel/src/lib.rs index 602c4f4b5f..a41c4de226 100644 --- a/crates/release_channel/src/lib.rs +++ b/crates/release_channel/src/lib.rs @@ -2,24 +2,23 @@ #![deny(missing_docs)] -use std::{env, str::FromStr}; +use std::{env, str::FromStr, sync::LazyLock}; use gpui::{AppContext, Global, SemanticVersion}; -use once_cell::sync::Lazy; /// stable | dev | nightly | preview -pub static RELEASE_CHANNEL_NAME: Lazy = if cfg!(debug_assertions) { - Lazy::new(|| { +pub static RELEASE_CHANNEL_NAME: LazyLock = LazyLock::new(|| { + if cfg!(debug_assertions) { env::var("ZED_RELEASE_CHANNEL") .unwrap_or_else(|_| include_str!("../../zed/RELEASE_CHANNEL").trim().to_string()) - }) -} else { - Lazy::new(|| include_str!("../../zed/RELEASE_CHANNEL").trim().to_string()) -}; + } else { + include_str!("../../zed/RELEASE_CHANNEL").trim().to_string() + } +}); #[doc(hidden)] -pub static RELEASE_CHANNEL: Lazy = - Lazy::new(|| match ReleaseChannel::from_str(&RELEASE_CHANNEL_NAME) { +pub static RELEASE_CHANNEL: LazyLock = + LazyLock::new(|| match ReleaseChannel::from_str(&RELEASE_CHANNEL_NAME) { Ok(channel) => channel, _ => panic!("invalid release channel {}", *RELEASE_CHANNEL_NAME), }); diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 20795be201..bd9cc47058 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -253,9 +253,11 @@ impl SshSocket { // :WARNING: ssh unquotes arguments when executing on the remote :WARNING: // e.g. $ ssh host sh -c 'ls -l' is equivalent to $ ssh host sh -c ls -l // and passes -l as an argument to sh, not to ls. - // You need to do it like this: $ ssh host "sh -c 'ls -l /tmp'" + // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing + // into a machine. You must use `cd` to get back to $HOME. + // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'" fn ssh_command(&self, program: &str, args: &[&str]) -> process::Command { - let mut command = process::Command::new("ssh"); + let mut command = util::command::new_smol_command("ssh"); let to_run = iter::once(&program) .chain(args.iter()) .map(|token| { @@ -267,6 +269,7 @@ impl SshSocket { shlex::try_quote(token).unwrap() }) .join(" "); + let to_run = format!("cd; {to_run}"); log::debug!("ssh {} {:?}", self.connection_options.ssh_url(), to_run); self.ssh_options(&mut command) .arg(self.connection_options.ssh_url()) @@ -1074,7 +1077,7 @@ impl SshRemoteClient { c.connections.insert( opts.clone(), ConnectionPoolEntry::Connecting( - cx.foreground_executor() + cx.background_executor() .spawn({ let connection = connection.clone(); async move { Ok(connection.clone()) } @@ -1224,7 +1227,7 @@ trait RemoteConnection: Send + Sync { struct SshRemoteConnection { socket: SshSocket, - master_process: Mutex>, + master_process: Mutex>, remote_binary_path: Option, _temp_dir: TempDir, } @@ -1258,7 +1261,7 @@ impl RemoteConnection for SshRemoteConnection { dest_path: PathBuf, cx: &AppContext, ) -> Task> { - let mut command = process::Command::new("scp"); + let mut command = util::command::new_smol_command("scp"); let output = self .socket .ssh_options(&mut command) @@ -1269,6 +1272,7 @@ impl RemoteConnection for SshRemoteConnection { .map(|port| vec!["-P".to_string(), port.to_string()]) .unwrap_or_default(), ) + .arg("-C") .arg("-r") .arg(&src_path) .arg(format!( @@ -1430,7 +1434,7 @@ impl SshRemoteConnection { anyhow::ensure!( which::which("nc").is_ok(), - "Cannot find nc, which is required to connect over ssh." + "Cannot find `nc` command (netcat), which is required to connect over SSH." ); // Create an askpass script that communicates back to this process. @@ -1568,6 +1572,7 @@ impl SshRemoteConnection { // exclude armv5,6,7 as they are 32-bit. let arch = if arch.starts_with("armv8") || arch.starts_with("armv9") + || arch.starts_with("arm64") || arch.starts_with("aarch64") { "aarch64" @@ -1909,7 +1914,7 @@ impl SshRemoteConnection { async fn upload_file(&self, src_path: &Path, dest_path: &Path) -> Result<()> { log::debug!("uploading file {:?} to {:?}", src_path, dest_path); - let mut command = process::Command::new("scp"); + let mut command = util::command::new_smol_command("scp"); let output = self .socket .ssh_options(&mut command) diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index 73e52895df..82853217dc 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -29,6 +29,8 @@ chrono.workspace = true clap.workspace = true client.workspace = true env_logger.workspace = true +extension.workspace = true +extension_host.workspace = true fs.workspace = true futures.workspace = true git.workspace = true @@ -36,6 +38,7 @@ git_hosting_providers.workspace = true gpui.workspace = true http_client.workspace = true language.workspace = true +language_extension.workspace = true languages.workspace = true log.workspace = true lsp.workspace = true diff --git a/crates/remote_server/build.rs b/crates/remote_server/build.rs index fae1889773..34abdacef7 100644 --- a/crates/remote_server/build.rs +++ b/crates/remote_server/build.rs @@ -9,6 +9,10 @@ fn main() { "cargo:rustc-env=ZED_PKG_VERSION={}", zed_cargo_toml.package.unwrap().version.unwrap() ); + println!( + "cargo:rustc-env=TARGET={}", + std::env::var("TARGET").unwrap() + ); // If we're building this for nightly, we want to set the ZED_COMMIT_SHA if let Some(release_channel) = std::env::var("ZED_RELEASE_CHANNEL").ok() { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 74416f6ed9..2fb8330603 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -1,4 +1,6 @@ use anyhow::{anyhow, Result}; +use extension::ExtensionHostProxy; +use extension_host::headless_host::HeadlessExtensionStore; use fs::Fs; use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, PromptLevel}; use http_client::HttpClient; @@ -37,6 +39,7 @@ pub struct HeadlessProject { pub settings_observer: Model, pub next_entry_id: Arc, pub languages: Arc, + pub extensions: Model, } pub struct HeadlessAppState { @@ -45,6 +48,7 @@ pub struct HeadlessAppState { pub http_client: Arc, pub node_runtime: NodeRuntime, pub languages: Arc, + pub extension_host_proxy: Arc, } impl HeadlessProject { @@ -61,9 +65,11 @@ impl HeadlessProject { http_client, node_runtime, languages, + extension_host_proxy: proxy, }: HeadlessAppState, cx: &mut ModelContext, ) -> Self { + language_extension::init(proxy.clone(), languages.clone()); languages::init(languages.clone(), node_runtime.clone(), cx); let worktree_store = cx.new_model(|cx| { @@ -147,6 +153,15 @@ impl HeadlessProject { ) .detach(); + let extensions = HeadlessExtensionStore::new( + fs.clone(), + http_client.clone(), + paths::remote_extensions_dir().to_path_buf(), + proxy, + node_runtime, + cx, + ); + let client: AnyProtoClient = session.clone().into(); session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store); @@ -173,6 +188,15 @@ impl HeadlessProject { client.add_model_request_handler(BufferStore::handle_update_buffer); client.add_model_message_handler(BufferStore::handle_close_buffer); + client.add_request_handler( + extensions.clone().downgrade(), + HeadlessExtensionStore::handle_sync_extensions, + ); + client.add_request_handler( + extensions.clone().downgrade(), + HeadlessExtensionStore::handle_install_extension, + ); + BufferStore::init(&client); WorktreeStore::init(&client); SettingsObserver::init(&client); @@ -190,6 +214,7 @@ impl HeadlessProject { task_store, next_entry_id: Default::default(), languages, + extensions, } } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index e3914c7ae1..8c2e50d68e 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -1,6 +1,7 @@ use crate::headless_project::HeadlessProject; use client::{Client, UserStore}; use clock::FakeSystemClock; +use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; use gpui::{Context, Model, SemanticVersion, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; @@ -19,6 +20,7 @@ use serde_json::json; use settings::{initial_server_settings_content, Settings, SettingsLocation, SettingsStore}; use smol::stream::StreamExt; use std::{ + collections::HashSet, path::{Path, PathBuf}, sync::Arc, }; @@ -77,13 +79,22 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test }) .await .unwrap(); + let change_set = project + .update(cx, |project, cx| { + project.open_unstaged_changes(buffer.clone(), cx) + }) + .await + .unwrap(); + + change_set.update(cx, |change_set, cx| { + assert_eq!( + change_set.base_text_string(cx).unwrap(), + "fn one() -> usize { 0 }" + ); + }); buffer.update(cx, |buffer, cx| { assert_eq!(buffer.text(), "fn one() -> usize { 1 }"); - assert_eq!( - buffer.diff_base().unwrap().to_string(), - "fn one() -> usize { 0 }" - ); let ix = buffer.text().find('1').unwrap(); buffer.edit([(ix..ix + 1, "100")], None, cx); }); @@ -139,9 +150,9 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())], ); cx.executor().run_until_parked(); - buffer.update(cx, |buffer, _| { + change_set.update(cx, |change_set, cx| { assert_eq!( - buffer.diff_base().unwrap().to_string(), + change_set.base_text_string(cx).unwrap(), "fn one() -> usize { 100 }" ); }); @@ -212,7 +223,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes // test that the headless server is tracking which buffers we have open correctly. cx.run_until_parked(); headless.update(server_cx, |headless, cx| { - assert!(!headless.buffer_store.read(cx).shared_buffers().is_empty()) + assert!(headless.buffer_store.read(cx).has_shared_buffers()) }); do_search(&project, cx.clone()).await; @@ -221,7 +232,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes }); cx.run_until_parked(); headless.update(server_cx, |headless, cx| { - assert!(headless.buffer_store.read(cx).shared_buffers().is_empty()) + assert!(!headless.buffer_store.read(cx).has_shared_buffers()) }); do_search(&project, cx.clone()).await; @@ -430,9 +441,9 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext // Wait for the settings to synchronize cx.run_until_parked(); - let buffer = project + let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + project.open_buffer_with_lsp((worktree_id, Path::new("src/lib.rs")), cx) }) .await .unwrap(); @@ -606,9 +617,9 @@ async fn test_remote_cancel_language_server_work( cx.run_until_parked(); - let buffer = project + let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + project.open_buffer_with_lsp((worktree_id, Path::new("src/lib.rs")), cx) }) .await .unwrap(); @@ -1140,6 +1151,10 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA let (project, headless_project) = init_test(&fs, cx, server_cx).await; let branches = ["main", "dev", "feature-1"]; + let branches_set = branches + .iter() + .map(ToString::to_string) + .collect::>(); fs.insert_branches(Path::new("/code/project1/.git"), &branches); let (worktree, _) = project @@ -1163,10 +1178,10 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA let remote_branches = remote_branches .into_iter() - .map(|branch| branch.name) - .collect::>(); + .map(|branch| branch.name.to_string()) + .collect::>(); - assert_eq!(&remote_branches, &branches); + assert_eq!(&remote_branches, &branches_set); cx.update(|cx| { project.update(cx, |project, cx| { @@ -1234,6 +1249,7 @@ pub async fn init_test( let http_client = Arc::new(BlockedHttpClient); let node_runtime = NodeRuntime::unavailable(); let languages = Arc::new(LanguageRegistry::new(cx.executor())); + let proxy = Arc::new(ExtensionHostProxy::new()); server_cx.update(HeadlessProject::init); let headless = server_cx.new_model(|cx| { client::init_settings(cx); @@ -1245,6 +1261,7 @@ pub async fn init_test( http_client, node_runtime, languages, + extension_host_proxy: proxy, }, cx, ) @@ -1277,7 +1294,7 @@ fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model< let client = cx.update(|cx| { Client::new( - Arc::new(FakeSystemClock::default()), + Arc::new(FakeSystemClock::new()), FakeHttpClient::with_404_response(), cx, ) diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 467fd452f8..5fd8c7f656 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -3,6 +3,7 @@ use crate::HeadlessProject; use anyhow::{anyhow, Context, Result}; use chrono::Utc; use client::{telemetry, ProxySettings}; +use extension::ExtensionHostProxy; use fs::{Fs, RealFs}; use futures::channel::mpsc; use futures::{select, select_biased, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, SinkExt}; @@ -159,6 +160,7 @@ fn init_panic_hook() { option_env!("ZED_COMMIT_SHA").unwrap_or(&env!("ZED_PKG_VERSION")) ), release_channel: release_channel::RELEASE_CHANNEL.display_name().into(), + target: env!("TARGET").to_owned().into(), os_name: telemetry::os_name(), os_version: Some(telemetry::os_version()), architecture: env::consts::ARCH.into(), @@ -434,6 +436,9 @@ pub fn execute_run( GitHostingProviderRegistry::set_global(git_hosting_provider_registry, cx); git_hosting_providers::init(cx); + extension::init(cx); + let extension_host_proxy = ExtensionHostProxy::global(cx); + let project = cx.new_model(|cx| { let fs = Arc::new(RealFs::new(Default::default(), None)); let node_settings_rx = initialize_settings(session.clone(), fs.clone(), cx); @@ -466,6 +471,7 @@ pub fn execute_run( http_client, node_runtime, languages, + extension_host_proxy, }, cx, ) diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index b170def71f..cc6cd7c1c8 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -16,15 +16,20 @@ doctest = false alacritty_terminal.workspace = true anyhow.workspace = true async-dispatcher.workspace = true +async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } base64.workspace = true client.workspace = true collections.workspace = true command_palette_hooks.workspace = true editor.workspace = true feature_flags.workspace = true +file_icons.workspace = true futures.workspace = true gpui.workspace = true +http_client.workspace = true image.workspace = true +jupyter-websocket-client.workspace = true +jupyter-protocol.workspace = true language.workspace = true log.workspace = true markdown_preview.workspace = true @@ -38,6 +43,7 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smol.workspace = true +telemetry.workspace = true terminal.workspace = true terminal_view.workspace = true theme.workspace = true @@ -47,9 +53,6 @@ uuid.workspace = true workspace.workspace = true picker.workspace = true -[target.'cfg(target_os = "windows")'.dependencies] -windows.workspace = true - [dev-dependencies] editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true @@ -60,6 +63,7 @@ language = { workspace = true, features = ["test-support"] } languages = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } +terminal_view = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } tree-sitter-md.workspace = true tree-sitter-typescript.workspace = true diff --git a/crates/repl/src/components/kernel_options.rs b/crates/repl/src/components/kernel_options.rs index fc0213e54e..77fdffd1e5 100644 --- a/crates/repl/src/components/kernel_options.rs +++ b/crates/repl/src/components/kernel_options.rs @@ -34,6 +34,16 @@ pub struct KernelPickerDelegate { on_select: OnSelect, } +// Helper function to truncate long paths +fn truncate_path(path: &SharedString, max_length: usize) -> SharedString { + if path.len() <= max_length { + path.to_string().into() + } else { + let truncated = path.chars().rev().take(max_length - 3).collect::(); + format!("...{}", truncated.chars().rev().collect::()).into() + } +} + impl KernelSelector { pub fn new(on_select: OnSelect, worktree_id: WorktreeId, trigger: T) -> Self { KernelSelector { @@ -88,7 +98,7 @@ impl PickerDelegate for KernelPickerDelegate { if query.is_empty() { self.filtered_kernels = all_kernels; - return Task::Ready(Some(())); + return Task::ready(()); } self.filtered_kernels = if query.is_empty() { @@ -100,7 +110,7 @@ impl PickerDelegate for KernelPickerDelegate { .collect() }; - return Task::Ready(Some(())); + return Task::ready(()); } fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext>) { @@ -116,37 +126,72 @@ impl PickerDelegate for KernelPickerDelegate { &self, ix: usize, selected: bool, - _cx: &mut ViewContext>, + cx: &mut ViewContext>, ) -> Option { let kernelspec = self.filtered_kernels.get(ix)?; - let is_selected = self.selected_kernelspec.as_ref() == Some(kernelspec); + let icon = kernelspec.icon(cx); + + let (name, kernel_type, path_or_url) = match kernelspec { + KernelSpecification::Jupyter(_) => (kernelspec.name(), "Jupyter", None), + KernelSpecification::PythonEnv(_) => ( + kernelspec.name(), + "Python Env", + Some(truncate_path(&kernelspec.path(), 42)), + ), + KernelSpecification::Remote(_) => ( + kernelspec.name(), + "Remote", + Some(truncate_path(&kernelspec.path(), 42)), + ), + }; Some( ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child( - v_flex() - .min_w(px(600.)) + h_flex() .w_full() - .gap_0p5() + .gap_3() + .child(icon.color(Color::Default).size(IconSize::Medium)) .child( - h_flex() - .w_full() - .gap_1() - .child(Label::new(kernelspec.name()).weight(FontWeight::MEDIUM)) + v_flex() + .flex_grow() + .gap_0p5() .child( - Label::new(kernelspec.language()) - .size(LabelSize::Small) - .color(Color::Muted), + h_flex() + .justify_between() + .child( + div().w_48().text_ellipsis().child( + Label::new(name) + .weight(FontWeight::MEDIUM) + .size(LabelSize::Default), + ), + ) + .when_some(path_or_url.clone(), |flex, path| { + flex.text_ellipsis().child( + Label::new(path) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }), + ) + .child( + h_flex() + .gap_1() + .child( + Label::new(kernelspec.language()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + Label::new(kernel_type) + .size(LabelSize::Small) + .color(Color::Muted), + ), ), - ) - .child( - Label::new(kernelspec.path()) - .size(LabelSize::XSmall) - .color(Color::Muted), ), ) .when(is_selected, |item| { @@ -199,14 +244,16 @@ impl RenderOnce for KernelSelector { }; let picker_view = cx.new_view(|cx| { - let picker = Picker::uniform_list(delegate, cx).max_height(Some(rems(20.).into())); + let picker = Picker::uniform_list(delegate, cx) + .width(rems(30.)) + .max_height(Some(rems(20.).into())); picker }); PopoverMenu::new("kernel-switcher") .menu(move |_cx| Some(picker_view.clone())) .trigger(self.trigger) - .attach(gpui::AnchorCorner::BottomLeft) + .attach(gpui::Corner::BottomLeft) .when_some(self.handle, |menu, handle| menu.with_handle(handle)) } } diff --git a/crates/repl/src/kernels/mod.rs b/crates/repl/src/kernels/mod.rs new file mode 100644 index 0000000000..e829b1946c --- /dev/null +++ b/crates/repl/src/kernels/mod.rs @@ -0,0 +1,240 @@ +mod native_kernel; +use std::{fmt::Debug, future::Future, path::PathBuf}; + +use futures::{ + channel::mpsc::{self, Receiver}, + future::Shared, + stream, +}; +use gpui::{AppContext, Model, Task, WindowContext}; +use language::LanguageName; +pub use native_kernel::*; + +mod remote_kernels; +use project::{Project, WorktreeId}; +pub use remote_kernels::*; + +use anyhow::Result; +use jupyter_protocol::JupyterKernelspec; +use runtimelib::{ExecutionState, JupyterMessage, KernelInfoReply}; +use ui::{Icon, IconName, SharedString}; + +pub type JupyterMessageChannel = stream::SelectAll>; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum KernelSpecification { + Remote(RemoteKernelSpecification), + Jupyter(LocalKernelSpecification), + PythonEnv(LocalKernelSpecification), +} + +impl KernelSpecification { + pub fn name(&self) -> SharedString { + match self { + Self::Jupyter(spec) => spec.name.clone().into(), + Self::PythonEnv(spec) => spec.name.clone().into(), + Self::Remote(spec) => spec.name.clone().into(), + } + } + + pub fn type_name(&self) -> SharedString { + match self { + Self::Jupyter(_) => "Jupyter".into(), + Self::PythonEnv(_) => "Python Environment".into(), + Self::Remote(_) => "Remote".into(), + } + } + + pub fn path(&self) -> SharedString { + SharedString::from(match self { + Self::Jupyter(spec) => spec.path.to_string_lossy().to_string(), + Self::PythonEnv(spec) => spec.path.to_string_lossy().to_string(), + Self::Remote(spec) => spec.url.to_string(), + }) + } + + pub fn language(&self) -> SharedString { + SharedString::from(match self { + Self::Jupyter(spec) => spec.kernelspec.language.clone(), + Self::PythonEnv(spec) => spec.kernelspec.language.clone(), + Self::Remote(spec) => spec.kernelspec.language.clone(), + }) + } + + pub fn icon(&self, cx: &AppContext) -> Icon { + let lang_name = match self { + Self::Jupyter(spec) => spec.kernelspec.language.clone(), + Self::PythonEnv(spec) => spec.kernelspec.language.clone(), + Self::Remote(spec) => spec.kernelspec.language.clone(), + }; + + file_icons::FileIcons::get(cx) + .get_type_icon(&lang_name.to_lowercase()) + .map(Icon::from_path) + .unwrap_or(Icon::new(IconName::ReplNeutral)) + } +} + +pub fn python_env_kernel_specifications( + project: &Model, + worktree_id: WorktreeId, + cx: &mut AppContext, +) -> impl Future>> { + let python_language = LanguageName::new("Python"); + let toolchains = project + .read(cx) + .available_toolchains(worktree_id, python_language, cx); + let background_executor = cx.background_executor().clone(); + + async move { + let toolchains = if let Some(toolchains) = toolchains.await { + toolchains + } else { + return Ok(Vec::new()); + }; + + let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| { + background_executor.spawn(async move { + let python_path = toolchain.path.to_string(); + + // Check if ipykernel is installed + let ipykernel_check = util::command::new_smol_command(&python_path) + .args(&["-c", "import ipykernel"]) + .output() + .await; + + if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() { + // Create a default kernelspec for this environment + let default_kernelspec = JupyterKernelspec { + argv: vec![ + python_path.clone(), + "-m".to_string(), + "ipykernel_launcher".to_string(), + "-f".to_string(), + "{connection_file}".to_string(), + ], + display_name: toolchain.name.to_string(), + language: "python".to_string(), + interrupt_mode: None, + metadata: None, + env: None, + }; + + Some(KernelSpecification::PythonEnv(LocalKernelSpecification { + name: toolchain.name.to_string(), + path: PathBuf::from(&python_path), + kernelspec: default_kernelspec, + })) + } else { + None + } + }) + }); + + let kernel_specs = futures::future::join_all(kernelspecs) + .await + .into_iter() + .flatten() + .collect(); + + anyhow::Ok(kernel_specs) + } +} + +pub trait RunningKernel: Send + Debug { + fn request_tx(&self) -> mpsc::Sender; + fn working_directory(&self) -> &PathBuf; + fn execution_state(&self) -> &ExecutionState; + fn set_execution_state(&mut self, state: ExecutionState); + fn kernel_info(&self) -> Option<&KernelInfoReply>; + fn set_kernel_info(&mut self, info: KernelInfoReply); + fn force_shutdown(&mut self, cx: &mut WindowContext) -> Task>; +} + +#[derive(Debug, Clone)] +pub enum KernelStatus { + Idle, + Busy, + Starting, + Error, + ShuttingDown, + Shutdown, + Restarting, +} + +impl KernelStatus { + pub fn is_connected(&self) -> bool { + match self { + KernelStatus::Idle | KernelStatus::Busy => true, + _ => false, + } + } +} + +impl ToString for KernelStatus { + fn to_string(&self) -> String { + match self { + KernelStatus::Idle => "Idle".to_string(), + KernelStatus::Busy => "Busy".to_string(), + KernelStatus::Starting => "Starting".to_string(), + KernelStatus::Error => "Error".to_string(), + KernelStatus::ShuttingDown => "Shutting Down".to_string(), + KernelStatus::Shutdown => "Shutdown".to_string(), + KernelStatus::Restarting => "Restarting".to_string(), + } + } +} + +#[derive(Debug)] +pub enum Kernel { + RunningKernel(Box), + StartingKernel(Shared>), + ErroredLaunch(String), + ShuttingDown, + Shutdown, + Restarting, +} + +impl From<&Kernel> for KernelStatus { + fn from(kernel: &Kernel) -> Self { + match kernel { + Kernel::RunningKernel(kernel) => match kernel.execution_state() { + ExecutionState::Idle => KernelStatus::Idle, + ExecutionState::Busy => KernelStatus::Busy, + }, + Kernel::StartingKernel(_) => KernelStatus::Starting, + Kernel::ErroredLaunch(_) => KernelStatus::Error, + Kernel::ShuttingDown => KernelStatus::ShuttingDown, + Kernel::Shutdown => KernelStatus::Shutdown, + Kernel::Restarting => KernelStatus::Restarting, + } + } +} + +impl Kernel { + pub fn status(&self) -> KernelStatus { + self.into() + } + + pub fn set_execution_state(&mut self, status: &ExecutionState) { + if let Kernel::RunningKernel(running_kernel) = self { + running_kernel.set_execution_state(status.clone()); + } + } + + pub fn set_kernel_info(&mut self, kernel_info: &KernelInfoReply) { + if let Kernel::RunningKernel(running_kernel) = self { + running_kernel.set_kernel_info(kernel_info.clone()); + } + } + + pub fn is_shutting_down(&self) -> bool { + match self { + Kernel::Restarting | Kernel::ShuttingDown => true, + Kernel::RunningKernel(_) + | Kernel::StartingKernel(_) + | Kernel::ErroredLaunch(_) + | Kernel::Shutdown => false, + } + } +} diff --git a/crates/repl/src/kernels.rs b/crates/repl/src/kernels/native_kernel.rs similarity index 55% rename from crates/repl/src/kernels.rs rename to crates/repl/src/kernels/native_kernel.rs index 8ad8a05648..2d796e12c6 100644 --- a/crates/repl/src/kernels.rs +++ b/crates/repl/src/kernels/native_kernel.rs @@ -1,69 +1,30 @@ use anyhow::{Context as _, Result}; use futures::{ - channel::mpsc::{self, Receiver}, - future::Shared, - stream::{self, SelectAll, StreamExt}, - SinkExt as _, + channel::mpsc::{self}, + io::BufReader, + stream::{SelectAll, StreamExt}, + AsyncBufReadExt as _, SinkExt as _, }; -use gpui::{AppContext, EntityId, Model, Task}; -use language::LanguageName; -use project::{Fs, Project, WorktreeId}; -use runtimelib::{ - dirs, ConnectionInfo, ExecutionState, JupyterKernelspec, JupyterMessage, JupyterMessageContent, - KernelInfoReply, +use gpui::{EntityId, Task, View, WindowContext}; +use jupyter_protocol::{ + connection_info::{ConnectionInfo, Transport}, + ExecutionState, JupyterKernelspec, JupyterMessage, JupyterMessageContent, KernelInfoReply, }; +use project::Fs; +use runtimelib::dirs; use smol::{net::TcpListener, process::Command}; use std::{ env, fmt::Debug, - future::Future, net::{IpAddr, Ipv4Addr, SocketAddr}, path::PathBuf, sync::Arc, }; -use ui::SharedString; use uuid::Uuid; -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum KernelSpecification { - Remote(RemoteKernelSpecification), - Jupyter(LocalKernelSpecification), - PythonEnv(LocalKernelSpecification), -} +use crate::Session; -impl KernelSpecification { - pub fn name(&self) -> SharedString { - match self { - Self::Jupyter(spec) => spec.name.clone().into(), - Self::PythonEnv(spec) => spec.name.clone().into(), - Self::Remote(spec) => spec.name.clone().into(), - } - } - - pub fn type_name(&self) -> SharedString { - match self { - Self::Jupyter(_) => "Jupyter".into(), - Self::PythonEnv(_) => "Python Environment".into(), - Self::Remote(_) => "Remote".into(), - } - } - - pub fn path(&self) -> SharedString { - SharedString::from(match self { - Self::Jupyter(spec) => spec.path.to_string_lossy().to_string(), - Self::PythonEnv(spec) => spec.path.to_string_lossy().to_string(), - Self::Remote(spec) => spec.url.to_string(), - }) - } - - pub fn language(&self) -> SharedString { - SharedString::from(match self { - Self::Jupyter(spec) => spec.kernelspec.language.clone(), - Self::PythonEnv(spec) => spec.kernelspec.language.clone(), - Self::Remote(spec) => spec.kernelspec.language.clone(), - }) - } -} +use super::RunningKernel; #[derive(Debug, Clone)] pub struct LocalKernelSpecification { @@ -80,22 +41,6 @@ impl PartialEq for LocalKernelSpecification { impl Eq for LocalKernelSpecification {} -#[derive(Debug, Clone)] -pub struct RemoteKernelSpecification { - pub name: String, - pub url: String, - pub token: String, - pub kernelspec: JupyterKernelspec, -} - -impl PartialEq for RemoteKernelSpecification { - fn eq(&self, other: &Self) -> bool { - self.name == other.name && self.url == other.url - } -} - -impl Eq for RemoteKernelSpecification {} - impl LocalKernelSpecification { #[must_use] fn command(&self, connection_path: &PathBuf) -> Result { @@ -109,7 +54,7 @@ impl LocalKernelSpecification { self.name ); - let mut cmd = Command::new(&argv[0]); + let mut cmd = util::command::new_smol_command(&argv[0]); for arg in &argv[1..] { if arg == "{connection_file}" { @@ -123,12 +68,6 @@ impl LocalKernelSpecification { cmd.envs(env); } - #[cfg(windows)] - { - use smol::process::windows::CommandExt; - cmd.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - Ok(cmd) } } @@ -147,110 +86,20 @@ async fn peek_ports(ip: IpAddr) -> Result<[u16; 5]> { Ok(ports) } -#[derive(Debug, Clone)] -pub enum KernelStatus { - Idle, - Busy, - Starting, - Error, - ShuttingDown, - Shutdown, - Restarting, -} - -impl KernelStatus { - pub fn is_connected(&self) -> bool { - match self { - KernelStatus::Idle | KernelStatus::Busy => true, - _ => false, - } - } -} - -impl ToString for KernelStatus { - fn to_string(&self) -> String { - match self { - KernelStatus::Idle => "Idle".to_string(), - KernelStatus::Busy => "Busy".to_string(), - KernelStatus::Starting => "Starting".to_string(), - KernelStatus::Error => "Error".to_string(), - KernelStatus::ShuttingDown => "Shutting Down".to_string(), - KernelStatus::Shutdown => "Shutdown".to_string(), - KernelStatus::Restarting => "Restarting".to_string(), - } - } -} - -impl From<&Kernel> for KernelStatus { - fn from(kernel: &Kernel) -> Self { - match kernel { - Kernel::RunningKernel(kernel) => match kernel.execution_state { - ExecutionState::Idle => KernelStatus::Idle, - ExecutionState::Busy => KernelStatus::Busy, - }, - Kernel::StartingKernel(_) => KernelStatus::Starting, - Kernel::ErroredLaunch(_) => KernelStatus::Error, - Kernel::ShuttingDown => KernelStatus::ShuttingDown, - Kernel::Shutdown => KernelStatus::Shutdown, - Kernel::Restarting => KernelStatus::Restarting, - } - } -} - -#[derive(Debug)] -pub enum Kernel { - RunningKernel(RunningKernel), - StartingKernel(Shared>), - ErroredLaunch(String), - ShuttingDown, - Shutdown, - Restarting, -} - -impl Kernel { - pub fn status(&self) -> KernelStatus { - self.into() - } - - pub fn set_execution_state(&mut self, status: &ExecutionState) { - if let Kernel::RunningKernel(running_kernel) = self { - running_kernel.execution_state = status.clone(); - } - } - - pub fn set_kernel_info(&mut self, kernel_info: &KernelInfoReply) { - if let Kernel::RunningKernel(running_kernel) = self { - running_kernel.kernel_info = Some(kernel_info.clone()); - } - } - - pub fn is_shutting_down(&self) -> bool { - match self { - Kernel::Restarting | Kernel::ShuttingDown => true, - Kernel::RunningKernel(_) - | Kernel::StartingKernel(_) - | Kernel::ErroredLaunch(_) - | Kernel::Shutdown => false, - } - } -} - -pub struct RunningKernel { +pub struct NativeRunningKernel { pub process: smol::process::Child, _shell_task: Task>, - _iopub_task: Task>, _control_task: Task>, _routing_task: Task>, connection_path: PathBuf, + _process_status_task: Option>, pub working_directory: PathBuf, pub request_tx: mpsc::Sender, pub execution_state: ExecutionState, pub kernel_info: Option, } -type JupyterMessageChannel = stream::SelectAll>; - -impl Debug for RunningKernel { +impl Debug for NativeRunningKernel { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("RunningKernel") .field("process", &self.process) @@ -258,31 +107,22 @@ impl Debug for RunningKernel { } } -impl RunningKernel { +impl NativeRunningKernel { pub fn new( - kernel_specification: KernelSpecification, + kernel_specification: LocalKernelSpecification, entity_id: EntityId, working_directory: PathBuf, fs: Arc, - cx: &mut AppContext, - ) -> Task> { - let kernel_specification = match kernel_specification { - KernelSpecification::Jupyter(spec) => spec, - KernelSpecification::PythonEnv(spec) => spec, - KernelSpecification::Remote(_spec) => { - // todo!(): Implement remote kernel specification - return Task::ready(Err(anyhow::anyhow!( - "Running remote kernels is not supported" - ))); - } - }; - + // todo: convert to weak view + session: View, + cx: &mut WindowContext, + ) -> Task>> { cx.spawn(|cx| async move { let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); let ports = peek_ports(ip).await?; let connection_info = ConnectionInfo { - transport: "tcp".to_string(), + transport: Transport::TCP, ip: ip.to_string(), stdin_port: ports[0], control_port: ports[1], @@ -304,7 +144,7 @@ impl RunningKernel { let mut cmd = kernel_specification.command(&connection_path)?; - let process = cmd + let mut process = cmd .current_dir(&working_directory) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) @@ -315,17 +155,13 @@ impl RunningKernel { let session_id = Uuid::new_v4().to_string(); - let mut iopub_socket = connection_info - .create_client_iopub_connection("", &session_id) - .await?; - let mut shell_socket = connection_info - .create_client_shell_connection(&session_id) - .await?; - let mut control_socket = connection_info - .create_client_control_connection(&session_id) - .await?; - - let (mut iopub, iosub) = futures::channel::mpsc::channel(100); + let mut iopub_socket = + runtimelib::create_client_iopub_connection(&connection_info, "", &session_id) + .await?; + let mut shell_socket = + runtimelib::create_client_shell_connection(&connection_info, &session_id).await?; + let mut control_socket = + runtimelib::create_client_control_connection(&connection_info, &session_id).await?; let (request_tx, mut request_rx) = futures::channel::mpsc::channel::(100); @@ -334,18 +170,41 @@ impl RunningKernel { let (mut shell_reply_tx, shell_reply_rx) = futures::channel::mpsc::channel(100); let mut messages_rx = SelectAll::new(); - messages_rx.push(iosub); messages_rx.push(control_reply_rx); messages_rx.push(shell_reply_rx); - let iopub_task = cx.background_executor().spawn({ - async move { - while let Ok(message) = iopub_socket.read().await { - iopub.send(message).await?; + cx.spawn({ + let session = session.clone(); + + |mut cx| async move { + while let Some(message) = messages_rx.next().await { + session + .update(&mut cx, |session, cx| { + session.route(&message, cx); + }) + .ok(); } anyhow::Ok(()) } - }); + }) + .detach(); + + // iopub task + cx.spawn({ + let session = session.clone(); + + |mut cx| async move { + while let Ok(message) = iopub_socket.read().await { + session + .update(&mut cx, |session, cx| { + session.route(&message, cx); + }) + .ok(); + } + anyhow::Ok(()) + } + }) + .detach(); let (mut control_request_tx, mut control_request_rx) = futures::channel::mpsc::channel(100); @@ -391,26 +250,118 @@ impl RunningKernel { } }); - anyhow::Ok(( - Self { - process, - request_tx, - working_directory, - _shell_task: shell_task, - _iopub_task: iopub_task, - _control_task: control_task, - _routing_task: routing_task, - connection_path, - execution_state: ExecutionState::Idle, - kernel_info: None, - }, - messages_rx, - )) + let stderr = process.stderr.take(); + + cx.spawn(|mut _cx| async move { + if stderr.is_none() { + return; + } + let reader = BufReader::new(stderr.unwrap()); + let mut lines = reader.lines(); + while let Some(Ok(line)) = lines.next().await { + log::error!("kernel: {}", line); + } + }) + .detach(); + + let stdout = process.stdout.take(); + + cx.spawn(|mut _cx| async move { + if stdout.is_none() { + return; + } + let reader = BufReader::new(stdout.unwrap()); + let mut lines = reader.lines(); + while let Some(Ok(line)) = lines.next().await { + log::info!("kernel: {}", line); + } + }) + .detach(); + + let status = process.status(); + + let process_status_task = cx.spawn(|mut cx| async move { + let error_message = match status.await { + Ok(status) => { + if status.success() { + log::info!("kernel process exited successfully"); + return; + } + + format!("kernel process exited with status: {:?}", status) + } + Err(err) => { + format!("kernel process exited with error: {:?}", err) + } + }; + + log::error!("{}", error_message); + + session + .update(&mut cx, |session, cx| { + session.kernel_errored(error_message, cx); + + cx.notify(); + }) + .ok(); + }); + + anyhow::Ok(Box::new(Self { + process, + request_tx, + working_directory, + _process_status_task: Some(process_status_task), + _shell_task: shell_task, + _control_task: control_task, + _routing_task: routing_task, + connection_path, + execution_state: ExecutionState::Idle, + kernel_info: None, + }) as Box) }) } } -impl Drop for RunningKernel { +impl RunningKernel for NativeRunningKernel { + fn request_tx(&self) -> mpsc::Sender { + self.request_tx.clone() + } + + fn working_directory(&self) -> &PathBuf { + &self.working_directory + } + + fn execution_state(&self) -> &ExecutionState { + &self.execution_state + } + + fn set_execution_state(&mut self, state: ExecutionState) { + self.execution_state = state; + } + + fn kernel_info(&self) -> Option<&KernelInfoReply> { + self.kernel_info.as_ref() + } + + fn set_kernel_info(&mut self, info: KernelInfoReply) { + self.kernel_info = Some(info); + } + + fn force_shutdown(&mut self, _cx: &mut WindowContext) -> Task> { + self._process_status_task.take(); + self.request_tx.close_channel(); + + Task::ready(match self.process.kill() { + Ok(_) => Ok(()), + Err(error) => Err(anyhow::anyhow!( + "Failed to kill the kernel process: {}", + error + )), + }) + } +} + +impl Drop for NativeRunningKernel { fn drop(&mut self) { std::fs::remove_file(&self.connection_path).ok(); self.request_tx.close_channel(); @@ -467,72 +418,6 @@ async fn read_kernels_dir(path: PathBuf, fs: &dyn Fs) -> Result, - worktree_id: WorktreeId, - cx: &mut AppContext, -) -> impl Future>> { - let python_language = LanguageName::new("Python"); - let toolchains = project - .read(cx) - .available_toolchains(worktree_id, python_language, cx); - let background_executor = cx.background_executor().clone(); - - async move { - let toolchains = if let Some(toolchains) = toolchains.await { - toolchains - } else { - return Ok(Vec::new()); - }; - - let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| { - background_executor.spawn(async move { - let python_path = toolchain.path.to_string(); - - // Check if ipykernel is installed - let ipykernel_check = Command::new(&python_path) - .args(&["-c", "import ipykernel"]) - .output() - .await; - - if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() { - // Create a default kernelspec for this environment - let default_kernelspec = JupyterKernelspec { - argv: vec![ - python_path.clone(), - "-m".to_string(), - "ipykernel_launcher".to_string(), - "-f".to_string(), - "{connection_file}".to_string(), - ], - display_name: toolchain.name.to_string(), - language: "python".to_string(), - interrupt_mode: None, - metadata: None, - env: None, - }; - - Some(KernelSpecification::PythonEnv(LocalKernelSpecification { - name: toolchain.name.to_string(), - path: PathBuf::from(&python_path), - kernelspec: default_kernelspec, - })) - } else { - None - } - }) - }); - - let kernel_specs = futures::future::join_all(kernelspecs) - .await - .into_iter() - .flatten() - .collect(); - - anyhow::Ok(kernel_specs) - } -} - pub async fn local_kernel_specifications(fs: Arc) -> Result> { let mut data_dirs = dirs::data_dirs(); @@ -544,17 +429,11 @@ pub async fn local_kernel_specifications(fs: Arc) -> Result, + kernel_name: &str, + _path: &str, +) -> Result { + // + let kernel_launch_request = KernelLaunchRequest { + name: kernel_name.to_string(), + // Note: since the path we have locally may not be the same as the one on the remote server, + // we don't send it. We'll have to evaluate this decision along the way. + path: None, + }; + + let kernel_launch_request = serde_json::to_string(&kernel_launch_request)?; + + let request = Request::builder() + .method("POST") + .uri(&remote_server.api_url("/kernels")) + .header("Authorization", format!("token {}", remote_server.token)) + .body(AsyncBody::from(kernel_launch_request))?; + + let response = http_client.send(request).await?; + + if !response.status().is_success() { + let mut body = String::new(); + response.into_body().read_to_string(&mut body).await?; + return Err(anyhow::anyhow!("Failed to launch kernel: {}", body)); + } + + let mut body = String::new(); + response.into_body().read_to_string(&mut body).await?; + + let response: jupyter_websocket_client::Kernel = serde_json::from_str(&body)?; + + Ok(response.id) +} + +pub async fn list_remote_kernelspecs( + remote_server: RemoteServer, + http_client: Arc, +) -> Result> { + let url = remote_server.api_url("/kernelspecs"); + + let request = Request::builder() + .method("GET") + .uri(&url) + .header("Authorization", format!("token {}", remote_server.token)) + .body(AsyncBody::default())?; + + let response = http_client.send(request).await?; + + if response.status().is_success() { + let mut body = response.into_body(); + + let mut body_bytes = Vec::new(); + body.read_to_end(&mut body_bytes).await?; + + let kernel_specs: KernelSpecsResponse = serde_json::from_slice(&body_bytes)?; + + let remote_kernelspecs = kernel_specs + .kernelspecs + .into_iter() + .map(|(name, spec)| RemoteKernelSpecification { + name: name.clone(), + url: remote_server.base_url.clone(), + token: remote_server.token.clone(), + kernelspec: spec.spec, + }) + .collect::>(); + + if remote_kernelspecs.is_empty() { + Err(anyhow::anyhow!("No kernel specs found")) + } else { + Ok(remote_kernelspecs.clone()) + } + } else { + Err(anyhow::anyhow!( + "Failed to fetch kernel specs: {}", + response.status() + )) + } +} + +impl PartialEq for RemoteKernelSpecification { + fn eq(&self, other: &Self) -> bool { + self.name == other.name && self.url == other.url + } +} + +impl Eq for RemoteKernelSpecification {} + +pub struct RemoteRunningKernel { + remote_server: RemoteServer, + _receiving_task: Task>, + _routing_task: Task>, + http_client: Arc, + pub working_directory: std::path::PathBuf, + pub request_tx: mpsc::Sender, + pub execution_state: ExecutionState, + pub kernel_info: Option, + pub kernel_id: String, +} + +impl RemoteRunningKernel { + pub fn new( + kernelspec: RemoteKernelSpecification, + working_directory: std::path::PathBuf, + session: View, + cx: &mut WindowContext, + ) -> Task>> { + let remote_server = RemoteServer { + base_url: kernelspec.url, + token: kernelspec.token, + }; + + let http_client = cx.http_client(); + + cx.spawn(|cx| async move { + let kernel_id = launch_remote_kernel( + &remote_server, + http_client.clone(), + &kernelspec.name, + working_directory.to_str().unwrap_or_default(), + ) + .await?; + + let ws_url = format!( + "{}/api/kernels/{}/channels?token={}", + remote_server.base_url.replace("http", "ws"), + kernel_id, + remote_server.token + ); + + let mut req: Request<()> = ws_url.into_client_request()?; + let headers = req.headers_mut(); + + headers.insert( + "User-Agent", + HeaderValue::from_str(&format!( + "Zed/{} ({}; {})", + "repl", + std::env::consts::OS, + std::env::consts::ARCH + ))?, + ); + + let response = connect_async(req).await; + + let (ws_stream, _response) = response?; + + let kernel_socket = JupyterWebSocket { inner: ws_stream }; + + let (mut w, mut r): (JupyterWebSocketWriter, JupyterWebSocketReader) = + kernel_socket.split(); + + let (request_tx, mut request_rx) = + futures::channel::mpsc::channel::(100); + + let routing_task = cx.background_executor().spawn({ + async move { + while let Some(message) = request_rx.next().await { + w.send(message).await.ok(); + } + Ok(()) + } + }); + + let receiving_task = cx.spawn({ + let session = session.clone(); + + |mut cx| async move { + while let Some(message) = r.next().await { + match message { + Ok(message) => { + session + .update(&mut cx, |session, cx| { + session.route(&message, cx); + }) + .ok(); + } + Err(e) => { + log::error!("Error receiving message: {:?}", e); + } + } + } + Ok(()) + } + }); + + anyhow::Ok(Box::new(Self { + _routing_task: routing_task, + _receiving_task: receiving_task, + remote_server, + working_directory, + request_tx, + // todo(kyle): pull this from the kernel API to start with + execution_state: ExecutionState::Idle, + kernel_info: None, + kernel_id, + http_client: http_client.clone(), + }) as Box) + }) + } +} + +impl Debug for RemoteRunningKernel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RemoteRunningKernel") + // custom debug that keeps tokens out of logs + .field("remote_server url", &self.remote_server.base_url) + .field("working_directory", &self.working_directory) + .field("request_tx", &self.request_tx) + .field("execution_state", &self.execution_state) + .field("kernel_info", &self.kernel_info) + .finish() + } +} + +impl RunningKernel for RemoteRunningKernel { + fn request_tx(&self) -> futures::channel::mpsc::Sender { + self.request_tx.clone() + } + + fn working_directory(&self) -> &std::path::PathBuf { + &self.working_directory + } + + fn execution_state(&self) -> &runtimelib::ExecutionState { + &self.execution_state + } + + fn set_execution_state(&mut self, state: runtimelib::ExecutionState) { + self.execution_state = state; + } + + fn kernel_info(&self) -> Option<&runtimelib::KernelInfoReply> { + self.kernel_info.as_ref() + } + + fn set_kernel_info(&mut self, info: runtimelib::KernelInfoReply) { + self.kernel_info = Some(info); + } + + fn force_shutdown(&mut self, cx: &mut WindowContext) -> Task> { + let url = self + .remote_server + .api_url(&format!("/kernels/{}", self.kernel_id)); + let token = self.remote_server.token.clone(); + let http_client = self.http_client.clone(); + + cx.spawn(|_| async move { + let request = Request::builder() + .method("DELETE") + .uri(&url) + .header("Authorization", format!("token {}", token)) + .body(AsyncBody::default())?; + + let response = http_client.send(request).await?; + + if response.status().is_success() { + Ok(()) + } else { + Err(anyhow::anyhow!( + "Failed to shutdown kernel: {}", + response.status() + )) + } + }) + } +} diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index 055e4c09f8..12d11853fb 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -114,7 +114,7 @@ impl Cell { id, metadata, source, - attachments: _, + .. } => { let source = source.join(""); diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index d10da13fd8..84455d0ccf 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -158,16 +158,6 @@ impl NotebookEditor { }) } - fn is_dirty(&self, cx: &AppContext) -> bool { - self.cell_map.values().any(|cell| { - if let Cell::Code(code_cell) = cell { - code_cell.read(cx).is_dirty(cx) - } else { - false - } - }) - } - fn clear_outputs(&mut self, cx: &mut ViewContext) { for cell in self.cell_map.values() { if let Cell::Code(code_cell) = cell { @@ -500,7 +490,7 @@ pub struct NotebookItem { id: ProjectEntryId, } -impl project::Item for NotebookItem { +impl project::ProjectItem for NotebookItem { fn try_open( project: &Model, path: &ProjectPath, @@ -525,7 +515,7 @@ impl project::Item for NotebookItem { Ok(nbformat::Notebook::V4(notebook)) => notebook, // 4.1 - 4.4 are converted to 4.5 Ok(nbformat::Notebook::Legacy(legacy_notebook)) => { - // todo!(): Decide if we want to mutate the notebook by including Cell IDs + // TODO: Decide if we want to mutate the notebook by including Cell IDs // and any other conversions let notebook = nbformat::upgrade_legacy_notebook(legacy_notebook)?; notebook @@ -561,6 +551,10 @@ impl project::Item for NotebookItem { fn project_path(&self, _: &AppContext) -> Option { Some(self.project_path.clone()) } + + fn is_dirty(&self) -> bool { + false + } } impl NotebookItem { @@ -656,7 +650,7 @@ impl Item for NotebookEditor { fn for_each_project_item( &self, cx: &AppContext, - f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item), + f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), ) { f(self.notebook_item.entity_id(), self.notebook_item.read(cx)) } @@ -679,7 +673,7 @@ impl Item for NotebookEditor { .into_any_element() } - fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + fn tab_icon(&self, _cx: &WindowContext) -> Option { Some(IconName::Book.into()) } @@ -734,8 +728,13 @@ impl Item for NotebookEditor { } fn is_dirty(&self, cx: &AppContext) -> bool { - // self.is_dirty(cx) TODO - false + self.cell_map.values().any(|cell| { + if let Cell::Code(code_cell) = cell { + code_cell.read(cx).is_dirty(cx) + } else { + false + } + }) } } diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index b705a15568..a1335f2a0d 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -334,9 +334,11 @@ impl ExecutionView { result.transient.as_ref().and_then(|t| t.display_id.clone()), cx, ), - JupyterMessageContent::DisplayData(result) => { - Output::new(&result.data, result.transient.display_id.clone(), cx) - } + JupyterMessageContent::DisplayData(result) => Output::new( + &result.data, + result.transient.as_ref().and_then(|t| t.display_id.clone()), + cx, + ), JupyterMessageContent::StreamContent(result) => { // Previous stream data will combine together, handling colors, carriage returns, etc if let Some(new_terminal) = self.apply_terminal_text(&result.text, cx) { diff --git a/crates/repl/src/outputs/markdown.rs b/crates/repl/src/outputs/markdown.rs index c472082561..0688172e68 100644 --- a/crates/repl/src/outputs/markdown.rs +++ b/crates/repl/src/outputs/markdown.rs @@ -57,7 +57,7 @@ impl OutputContent for MarkdownView { fn buffer_content(&mut self, cx: &mut WindowContext) -> Option> { let buffer = cx.new_model(|cx| { - // todo!(): Bring in the language registry so we can set the language to markdown + // TODO: Bring in the language registry so we can set the language to markdown let mut buffer = Buffer::local(self.raw_text.clone(), cx) .with_language(language::PLAIN_TEXT.clone(), cx); buffer.set_capability(language::Capability::ReadOnly, cx); diff --git a/crates/repl/src/repl.rs b/crates/repl/src/repl.rs index be187ff16f..f0745aa25c 100644 --- a/crates/repl/src/repl.rs +++ b/crates/repl/src/repl.rs @@ -1,6 +1,6 @@ pub mod components; mod jupyter_settings; -mod kernels; +pub mod kernels; pub mod notebook; mod outputs; mod repl_editor; @@ -24,16 +24,15 @@ pub use crate::repl_sessions_ui::{ }; use crate::repl_store::ReplStore; pub use crate::session::Session; -use client::telemetry::Telemetry; pub const KERNEL_DOCS_URL: &str = "https://zed.dev/docs/repl#changing-kernels"; -pub fn init(fs: Arc, telemetry: Arc, cx: &mut AppContext) { +pub fn init(fs: Arc, cx: &mut AppContext) { set_dispatcher(zed_dispatcher(cx)); JupyterSettings::register(cx); ::editor::init_settings(cx); repl_sessions_ui::init(cx); - ReplStore::init(fs, telemetry, cx); + ReplStore::init(fs, cx); } fn zed_dispatcher(cx: &mut AppContext) -> impl Dispatcher { diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index b032b1804a..e190dd1911 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -7,7 +7,7 @@ use anyhow::{Context, Result}; use editor::Editor; use gpui::{prelude::*, Entity, View, WeakView, WindowContext}; use language::{BufferSnapshot, Language, LanguageName, Point}; -use project::{Item as _, WorktreeId}; +use project::{ProjectItem as _, WorktreeId}; use crate::repl_store::ReplStore; use crate::session::SessionEvent; @@ -33,7 +33,6 @@ pub fn assign_kernelspec( }); let fs = store.read(cx).fs().clone(); - let telemetry = store.read(cx).telemetry().clone(); if let Some(session) = store.read(cx).get_session(weak_editor.entity_id()).cloned() { // Drop previous session, start new one @@ -44,8 +43,7 @@ pub fn assign_kernelspec( }); } - let session = cx - .new_view(|cx| Session::new(weak_editor.clone(), fs, telemetry, kernel_specification, cx)); + let session = cx.new_view(|cx| Session::new(weak_editor.clone(), fs, kernel_specification, cx)); weak_editor .update(cx, |_editor, cx| { @@ -105,15 +103,13 @@ pub fn run(editor: WeakView, move_down: bool, cx: &mut WindowContext) -> .ok_or_else(|| anyhow::anyhow!("No kernel found for language: {}", language.name()))?; let fs = store.read(cx).fs().clone(); - let telemetry = store.read(cx).telemetry().clone(); let session = if let Some(session) = store.read(cx).get_session(editor.entity_id()).cloned() { session } else { let weak_editor = editor.downgrade(); - let session = cx - .new_view(|cx| Session::new(weak_editor, fs, telemetry, kernel_specification, cx)); + let session = cx.new_view(|cx| Session::new(weak_editor, fs, kernel_specification, cx)); editor.update(cx, |_editor, cx| { cx.notify(); diff --git a/crates/repl/src/repl_sessions_ui.rs b/crates/repl/src/repl_sessions_ui.rs index 32b91ce28c..018a142734 100644 --- a/crates/repl/src/repl_sessions_ui.rs +++ b/crates/repl/src/repl_sessions_ui.rs @@ -3,7 +3,7 @@ use gpui::{ actions, prelude::*, AnyElement, AppContext, EventEmitter, FocusHandle, FocusableView, Subscription, View, }; -use project::Item as _; +use project::ProjectItem as _; use ui::{prelude::*, ButtonLike, ElevationIndex, KeyBinding}; use util::ResultExt as _; use workspace::item::ItemEvent; @@ -73,21 +73,27 @@ pub fn init(cx: &mut AppContext) { return; } - let project_path = editor - .buffer() - .read(cx) - .as_singleton() - .and_then(|buffer| buffer.read(cx).project_path(cx)); + let buffer = editor.buffer().read(cx).as_singleton(); + + let language = buffer + .as_ref() + .and_then(|buffer| buffer.read(cx).language()); + + let project_path = buffer.and_then(|buffer| buffer.read(cx).project_path(cx)); let editor_handle = cx.view().downgrade(); - if let (Some(project_path), Some(project)) = (project_path, project) { - let store = ReplStore::global(cx); - store.update(cx, |store, cx| { - store - .refresh_python_kernelspecs(project_path.worktree_id, &project, cx) - .detach_and_log_err(cx); - }); + if let Some(language) = language { + if language.name() == "Python".into() { + if let (Some(project_path), Some(project)) = (project_path, project) { + let store = ReplStore::global(cx); + store.update(cx, |store, cx| { + store + .refresh_python_kernelspecs(project_path.worktree_id, &project, cx) + .detach_and_log_err(cx); + }); + } + } } editor diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index a4863b809b..d51b478729 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -1,17 +1,19 @@ use std::sync::Arc; use anyhow::Result; -use client::telemetry::Telemetry; use collections::HashMap; use command_palette_hooks::CommandPaletteFilter; use gpui::{ prelude::*, AppContext, EntityId, Global, Model, ModelContext, Subscription, Task, View, }; +use jupyter_websocket_client::RemoteServer; use language::Language; use project::{Fs, Project, WorktreeId}; use settings::{Settings, SettingsStore}; -use crate::kernels::{local_kernel_specifications, python_env_kernel_specifications}; +use crate::kernels::{ + list_remote_kernelspecs, local_kernel_specifications, python_env_kernel_specifications, +}; use crate::{JupyterSettings, KernelSpecification, Session}; struct GlobalReplStore(Model); @@ -25,15 +27,14 @@ pub struct ReplStore { kernel_specifications: Vec, selected_kernel_for_worktree: HashMap, kernel_specifications_for_worktree: HashMap>, - telemetry: Arc, _subscriptions: Vec, } impl ReplStore { const NAMESPACE: &'static str = "repl"; - pub(crate) fn init(fs: Arc, telemetry: Arc, cx: &mut AppContext) { - let store = cx.new_model(move |cx| Self::new(fs, telemetry, cx)); + pub(crate) fn init(fs: Arc, cx: &mut AppContext) { + let store = cx.new_model(move |cx| Self::new(fs, cx)); store .update(cx, |store, cx| store.refresh_kernelspecs(cx)) @@ -46,14 +47,13 @@ impl ReplStore { cx.global::().0.clone() } - pub fn new(fs: Arc, telemetry: Arc, cx: &mut ModelContext) -> Self { + pub fn new(fs: Arc, cx: &mut ModelContext) -> Self { let subscriptions = vec![cx.observe_global::(move |this, cx| { this.set_enabled(JupyterSettings::enabled(cx), cx); })]; let this = Self { fs, - telemetry, enabled: JupyterSettings::enabled(cx), sessions: HashMap::default(), kernel_specifications: Vec::new(), @@ -69,10 +69,6 @@ impl ReplStore { &self.fs } - pub fn telemetry(&self) -> &Arc { - &self.telemetry - } - pub fn is_enabled(&self) -> bool { self.enabled } @@ -141,21 +137,63 @@ impl ReplStore { }) } + fn get_remote_kernel_specifications( + &self, + cx: &mut ModelContext, + ) -> Option>>> { + match ( + std::env::var("JUPYTER_SERVER"), + std::env::var("JUPYTER_TOKEN"), + ) { + (Ok(server), Ok(token)) => { + let remote_server = RemoteServer { + base_url: server, + token, + }; + let http_client = cx.http_client(); + Some(cx.spawn(|_, _| async move { + list_remote_kernelspecs(remote_server, http_client) + .await + .map(|specs| specs.into_iter().map(KernelSpecification::Remote).collect()) + })) + } + _ => None, + } + } + pub fn refresh_kernelspecs(&mut self, cx: &mut ModelContext) -> Task> { let local_kernel_specifications = local_kernel_specifications(self.fs.clone()); - cx.spawn(|this, mut cx| async move { - let local_kernel_specifications = local_kernel_specifications.await?; + let remote_kernel_specifications = self.get_remote_kernel_specifications(cx); - let mut kernel_options = Vec::new(); - for kernel_specification in local_kernel_specifications { - kernel_options.push(KernelSpecification::Jupyter(kernel_specification)); + let all_specs = cx.background_executor().spawn(async move { + let mut all_specs = local_kernel_specifications + .await? + .into_iter() + .map(KernelSpecification::Jupyter) + .collect::>(); + + if let Some(remote_task) = remote_kernel_specifications { + if let Ok(remote_specs) = remote_task.await { + all_specs.extend(remote_specs); + } } - this.update(&mut cx, |this, cx| { - this.kernel_specifications = kernel_options; - cx.notify(); - }) + anyhow::Ok(all_specs) + }); + + cx.spawn(|this, mut cx| async move { + let all_specs = all_specs.await; + + if let Ok(specs) = all_specs { + this.update(&mut cx, |this, cx| { + this.kernel_specifications = specs; + cx.notify(); + }) + .ok(); + } + + anyhow::Ok(()) }) } @@ -224,8 +262,9 @@ impl ReplStore { runtime_specification.kernelspec.language.to_lowercase() == language_at_cursor.code_fence_block_name().to_lowercase() } - KernelSpecification::Remote(_) => { - unimplemented!() + KernelSpecification::Remote(remote_spec) => { + remote_spec.kernelspec.language.to_lowercase() + == language_at_cursor.code_fence_block_name().to_lowercase() } }) .cloned() diff --git a/crates/repl/src/session.rs b/crates/repl/src/session.rs index 74ce497572..b98027d430 100644 --- a/crates/repl/src/session.rs +++ b/crates/repl/src/session.rs @@ -1,11 +1,11 @@ use crate::components::KernelListItem; +use crate::kernels::RemoteRunningKernel; use crate::setup_editor_session_actions; use crate::{ - kernels::{Kernel, KernelSpecification, RunningKernel}, + kernels::{Kernel, KernelSpecification, NativeRunningKernel}, outputs::{ExecutionStatus, ExecutionView}, KernelStatus, }; -use client::telemetry::Telemetry; use collections::{HashMap, HashSet}; use editor::{ display_map::{ @@ -15,8 +15,7 @@ use editor::{ scroll::Autoscroll, Anchor, AnchorRangeExt as _, Editor, MultiBuffer, ToPoint, }; -use futures::io::BufReader; -use futures::{AsyncBufReadExt as _, FutureExt as _, StreamExt as _}; +use futures::FutureExt as _; use gpui::{ div, prelude::*, EventEmitter, Model, Render, Subscription, Task, View, ViewContext, WeakView, }; @@ -29,16 +28,14 @@ use runtimelib::{ use std::{env::temp_dir, ops::Range, sync::Arc, time::Duration}; use theme::ActiveTheme; use ui::{prelude::*, IconButtonShape, Tooltip}; +use util::ResultExt as _; pub struct Session { fs: Arc, editor: WeakView, pub kernel: Kernel, blocks: HashMap, - messaging_task: Option>, - process_status_task: Option>, pub kernel_specification: KernelSpecification, - telemetry: Arc, _buffer_subscription: Subscription, } @@ -195,7 +192,6 @@ impl Session { pub fn new( editor: WeakView, fs: Arc, - telemetry: Arc, kernel_specification: KernelSpecification, cx: &mut ViewContext, ) -> Self { @@ -219,12 +215,9 @@ impl Session { fs, editor, kernel: Kernel::StartingKernel(Task::ready(()).shared()), - messaging_task: None, - process_status_task: None, blocks: HashMap::default(), kernel_specification, _buffer_subscription: subscription, - telemetry, }; session.start_kernel(cx); @@ -240,138 +233,47 @@ impl Session { .and_then(|editor| editor.read(cx).working_directory(cx)) .unwrap_or_else(temp_dir); - self.telemetry.report_repl_event( - kernel_language.into(), - KernelStatus::Starting.to_string(), - cx.entity_id().to_string(), + telemetry::event!( + "Kernel Status Changed", + kernel_language, + kernel_status = KernelStatus::Starting.to_string(), + repl_session_id = cx.entity_id().to_string(), ); - let kernel = RunningKernel::new( - self.kernel_specification.clone(), - entity_id, - working_directory, - self.fs.clone(), - cx, - ); + let session_view = cx.view().clone(); + + let kernel = match self.kernel_specification.clone() { + KernelSpecification::Jupyter(kernel_specification) + | KernelSpecification::PythonEnv(kernel_specification) => NativeRunningKernel::new( + kernel_specification, + entity_id, + working_directory, + self.fs.clone(), + session_view, + cx, + ), + KernelSpecification::Remote(remote_kernel_specification) => RemoteRunningKernel::new( + remote_kernel_specification, + working_directory, + session_view, + cx, + ), + }; let pending_kernel = cx .spawn(|this, mut cx| async move { let kernel = kernel.await; match kernel { - Ok((mut kernel, mut messages_rx)) => { + Ok(kernel) => { this.update(&mut cx, |session, cx| { - let stderr = kernel.process.stderr.take(); - - cx.spawn(|_session, mut _cx| async move { - if stderr.is_none() { - return; - } - let reader = BufReader::new(stderr.unwrap()); - let mut lines = reader.lines(); - while let Some(Ok(line)) = lines.next().await { - // todo!(): Log stdout and stderr to something the session can show - log::error!("kernel: {}", line); - } - }) - .detach(); - - let stdout = kernel.process.stdout.take(); - - cx.spawn(|_session, mut _cx| async move { - if stdout.is_none() { - return; - } - let reader = BufReader::new(stdout.unwrap()); - let mut lines = reader.lines(); - while let Some(Ok(line)) = lines.next().await { - log::info!("kernel: {}", line); - } - }) - .detach(); - - let status = kernel.process.status(); session.kernel(Kernel::RunningKernel(kernel), cx); - - let process_status_task = cx.spawn(|session, mut cx| async move { - let error_message = match status.await { - Ok(status) => { - if status.success() { - log::info!("kernel process exited successfully"); - return; - } - - format!("kernel process exited with status: {:?}", status) - } - Err(err) => { - format!("kernel process exited with error: {:?}", err) - } - }; - - log::error!("{}", error_message); - - session - .update(&mut cx, |session, cx| { - session.kernel( - Kernel::ErroredLaunch(error_message.clone()), - cx, - ); - - session.blocks.values().for_each(|block| { - block.execution_view.update( - cx, - |execution_view, cx| { - match execution_view.status { - ExecutionStatus::Finished => { - // Do nothing when the output was good - } - _ => { - // All other cases, set the status to errored - execution_view.status = - ExecutionStatus::KernelErrored( - error_message.clone(), - ) - } - } - cx.notify(); - }, - ); - }); - - cx.notify(); - }) - .ok(); - }); - - session.process_status_task = Some(process_status_task); - - session.messaging_task = Some(cx.spawn(|session, mut cx| async move { - while let Some(message) = messages_rx.next().await { - session - .update(&mut cx, |session, cx| { - session.route(&message, cx); - }) - .ok(); - } - })); - - // todo!(@rgbkrk): send KernelInfoRequest once our shell channel read/writes are split - // cx.spawn(|this, mut cx| async move { - // cx.background_executor() - // .timer(Duration::from_millis(120)) - // .await; - // this.update(&mut cx, |this, cx| { - // this.send(KernelInfoRequest {}.into(), cx).ok(); - // }) - // .ok(); - // }) - // .detach(); }) .ok(); } Err(err) => { this.update(&mut cx, |session, cx| { - session.kernel(Kernel::ErroredLaunch(err.to_string()), cx); + session.kernel_errored(err.to_string(), cx); }) .ok(); } @@ -383,6 +285,26 @@ impl Session { cx.notify(); } + pub fn kernel_errored(&mut self, error_message: String, cx: &mut ViewContext) { + self.kernel(Kernel::ErroredLaunch(error_message.clone()), cx); + + self.blocks.values().for_each(|block| { + block.execution_view.update(cx, |execution_view, cx| { + match execution_view.status { + ExecutionStatus::Finished => { + // Do nothing when the output was good + } + _ => { + // All other cases, set the status to errored + execution_view.status = + ExecutionStatus::KernelErrored(error_message.clone()) + } + } + cx.notify(); + }); + }); + } + fn on_buffer_event( &mut self, buffer: Model, @@ -416,7 +338,7 @@ impl Session { fn send(&mut self, message: JupyterMessage, _cx: &mut ViewContext) -> anyhow::Result<()> { if let Kernel::RunningKernel(kernel) = &mut self.kernel { - kernel.request_tx.try_send(message).ok(); + kernel.request_tx().try_send(message).ok(); } anyhow::Ok(()) @@ -553,7 +475,7 @@ impl Session { } } - fn route(&mut self, message: &JupyterMessage, cx: &mut ViewContext) { + pub fn route(&mut self, message: &JupyterMessage, cx: &mut ViewContext) { let parent_message_id = match message.parent_header.as_ref() { Some(header) => &header.msg_id, None => return, @@ -563,10 +485,11 @@ impl Session { JupyterMessageContent::Status(status) => { self.kernel.set_execution_state(&status.execution_state); - self.telemetry.report_repl_event( - self.kernel_specification.language().into(), - KernelStatus::from(&self.kernel).to_string(), - cx.entity_id().to_string(), + telemetry::event!( + "Kernel Status Changed", + kernel_language = self.kernel_specification.language(), + kernel_status = KernelStatus::from(&self.kernel).to_string(), + repl_session_id = cx.entity_id().to_string(), ); cx.notify(); @@ -615,12 +538,13 @@ impl Session { } let kernel_status = KernelStatus::from(&kernel).to_string(); - let kernel_language = self.kernel_specification.language().into(); + let kernel_language = self.kernel_specification.language(); - self.telemetry.report_repl_event( + telemetry::event!( + "Kernel Status Changed", kernel_language, kernel_status, - cx.entity_id().to_string(), + repl_session_id = cx.entity_id().to_string(), ); self.kernel = kernel; @@ -631,23 +555,19 @@ impl Session { match kernel { Kernel::RunningKernel(mut kernel) => { - let mut request_tx = kernel.request_tx.clone(); + let mut request_tx = kernel.request_tx().clone(); + + let forced = kernel.force_shutdown(cx); cx.spawn(|this, mut cx| async move { let message: JupyterMessage = ShutdownRequest { restart: false }.into(); request_tx.try_send(message).ok(); + forced.await.log_err(); + // Give the kernel a bit of time to clean up cx.background_executor().timer(Duration::from_secs(3)).await; - this.update(&mut cx, |session, _cx| { - session.messaging_task.take(); - session.process_status_task.take(); - }) - .ok(); - - kernel.process.kill().ok(); - this.update(&mut cx, |session, cx| { session.clear_outputs(cx); session.kernel(Kernel::Shutdown, cx); @@ -658,8 +578,6 @@ impl Session { .detach(); } _ => { - self.messaging_task.take(); - self.process_status_task.take(); self.kernel(Kernel::Shutdown, cx); } } @@ -674,7 +592,9 @@ impl Session { // Do nothing if already restarting } Kernel::RunningKernel(mut kernel) => { - let mut request_tx = kernel.request_tx.clone(); + let mut request_tx = kernel.request_tx().clone(); + + let forced = kernel.force_shutdown(cx); cx.spawn(|this, mut cx| async move { // Send shutdown request with restart flag @@ -682,21 +602,15 @@ impl Session { let message: JupyterMessage = ShutdownRequest { restart: true }.into(); request_tx.try_send(message).ok(); - this.update(&mut cx, |session, _cx| { - session.messaging_task.take(); - session.process_status_task.take(); - }) - .ok(); - // Wait for kernel to shutdown cx.background_executor().timer(Duration::from_secs(1)).await; // Force kill the kernel if it hasn't shut down - kernel.process.kill().ok(); + forced.await.log_err(); // Start a new kernel this.update(&mut cx, |session, cx| { - // todo!(): Differentiate between restart and restart+clear-outputs + // TODO: Differentiate between restart and restart+clear-outputs session.clear_outputs(cx); session.start_kernel(cx); }) @@ -705,9 +619,6 @@ impl Session { .detach(); } _ => { - // If it's not already running, we can just clean up and start a new kernel - self.messaging_task.take(); - self.process_status_task.take(); self.clear_outputs(cx); self.start_kernel(cx); } @@ -727,7 +638,7 @@ impl Render for Session { let (status_text, interrupt_button) = match &self.kernel { Kernel::RunningKernel(kernel) => ( kernel - .kernel_info + .kernel_info() .as_ref() .map(|info| info.language_info.name.clone()), Some( @@ -747,7 +658,7 @@ impl Render for Session { KernelListItem::new(self.kernel_specification.clone()) .status_color(match &self.kernel { - Kernel::RunningKernel(kernel) => match kernel.execution_state { + Kernel::RunningKernel(kernel) => match kernel.execution_state() { ExecutionState::Idle => Color::Success, ExecutionState::Busy => Color::Modified, }, diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs index 80b7786c24..df830419d3 100644 --- a/crates/rich_text/src/rich_text.rs +++ b/crates/rich_text/src/rich_text.rs @@ -310,12 +310,7 @@ pub fn render_markdown_mut( } Event::Start(tag) => match tag { Tag::Paragraph => new_paragraph(text, &mut list_stack), - Tag::Heading { - level: _, - id: _, - classes: _, - attrs: _, - } => { + Tag::Heading { .. } => { new_paragraph(text, &mut list_stack); bold_depth += 1; } @@ -333,12 +328,7 @@ pub fn render_markdown_mut( Tag::Emphasis => italic_depth += 1, Tag::Strong => bold_depth += 1, Tag::Strikethrough => strikethrough_depth += 1, - Tag::Link { - link_type: _, - dest_url, - title: _, - id: _, - } => link_url = Some(dest_url.to_string()), + Tag::Link { dest_url, .. } => link_url = Some(dest_url.to_string()), Tag::List(number) => { list_stack.push((number, false)); } diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index c158d2429e..5c2b9b87c3 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -504,8 +504,6 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] pub fn tabs(&self) -> Tabs { Tabs { - byte_offset: 0, - char_offset: 0, tabs: self.tabs, chars: self.chars, } @@ -513,8 +511,6 @@ impl<'a> ChunkSlice<'a> { } pub struct Tabs { - byte_offset: usize, - char_offset: usize, tabs: u128, chars: u128, } @@ -536,21 +532,14 @@ impl Iterator for Tabs { let tab_offset = self.tabs.trailing_zeros() as usize; let chars_mask = (1 << tab_offset) - 1; let char_offset = (self.chars & chars_mask).count_ones() as usize; - self.byte_offset += tab_offset; - self.char_offset += char_offset; - let position = TabPosition { - byte_offset: self.byte_offset, - char_offset: self.char_offset, - }; - self.byte_offset += 1; - self.char_offset += 1; - if self.byte_offset == MAX_BASE { - self.tabs = 0; - } else { - self.tabs >>= tab_offset + 1; - self.chars >>= tab_offset + 1; - } + // Since tabs are 1 byte the tab offset is the same as the byte offset + let position = TabPosition { + byte_offset: tab_offset, + char_offset: char_offset, + }; + // Remove the tab we've just seen + self.tabs ^= 1 << tab_offset; Some(position) } diff --git a/crates/rpc/src/llm.rs b/crates/rpc/src/llm.rs index 0a7510d891..df48d9ed92 100644 --- a/crates/rpc/src/llm.rs +++ b/crates/rpc/src/llm.rs @@ -33,3 +33,15 @@ pub struct PerformCompletionParams { pub model: String, pub provider_request: Box, } + +#[derive(Debug, Serialize, Deserialize)] +pub struct PredictEditsParams { + pub outline: Option, + pub input_events: String, + pub input_excerpt: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct PredictEditsResponse { + pub output_excerpt: String, +} diff --git a/crates/rpc/src/notification.rs b/crates/rpc/src/notification.rs index bf76436468..52108f13dc 100644 --- a/crates/rpc/src/notification.rs +++ b/crates/rpc/src/notification.rs @@ -1,7 +1,7 @@ use crate::proto; use serde::{Deserialize, Serialize}; use serde_json::{map, Value}; -use strum::{EnumVariantNames, VariantNames as _}; +use strum::VariantNames; const KIND: &str = "kind"; const ENTITY_ID: &str = "entity_id"; @@ -15,7 +15,7 @@ const ENTITY_ID: &str = "entity_id"; /// Most notification types have a special field which is aliased to /// `entity_id`. This field is stored in its own database column, and can /// be used to query the notification. -#[derive(Debug, Clone, PartialEq, Eq, EnumVariantNames, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, VariantNames, Serialize, Deserialize)] #[serde(tag = "kind")] pub enum Notification { ContactRequest { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 41e5ba28df..a81ddc1a6a 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -27,7 +27,10 @@ use settings::Settings; use std::sync::Arc; use theme::ThemeSettings; -use ui::{h_flex, prelude::*, IconButton, IconButtonShape, IconName, Tooltip, BASE_REM_SIZE_IN_PX}; +use ui::{ + h_flex, prelude::*, utils::SearchInputWidth, IconButton, IconButtonShape, IconName, Tooltip, + BASE_REM_SIZE_IN_PX, +}; use util::ResultExt; use workspace::{ item::ItemHandle, @@ -38,8 +41,6 @@ use workspace::{ pub use registrar::DivRegistrar; use registrar::{ForDeployed, ForDismissed, SearchActionsRegistrar, WithResults}; -const MIN_INPUT_WIDTH_REMS: f32 = 10.; -const MAX_INPUT_WIDTH_REMS: f32 = 30.; const MAX_BUFFER_SEARCH_HISTORY_SIZE: usize = 50; #[derive(PartialEq, Clone, Deserialize)] @@ -160,12 +161,12 @@ impl Render for BufferSearchBar { query_editor.placeholder_text(cx).is_none() }) { self.query_editor.update(cx, |editor, cx| { - editor.set_placeholder_text("Search", cx); + editor.set_placeholder_text("Search…", cx); }); } self.replacement_editor.update(cx, |editor, cx| { - editor.set_placeholder_text("Replace with...", cx); + editor.set_placeholder_text("Replace with…", cx); }); let mut text_color = Color::Default; @@ -203,54 +204,67 @@ impl Render for BufferSearchBar { cx.theme().colors().border }; + let container_width = cx.viewport_size().width; + let input_width = SearchInputWidth::calc_width(container_width); + + let input_base_styles = || { + h_flex() + .min_w_32() + .w(input_width) + .h_8() + .pl_2() + .pr_1() + .py_1() + .border_1() + .border_color(editor_border) + .rounded_lg() + }; + let search_line = h_flex() .gap_2() .child( - h_flex() + input_base_styles() .id("editor-scroll") .track_scroll(&self.editor_scroll_handle) - .flex_1() - .h_8() - .px_2() - .py_1() - .border_1() - .border_color(editor_border) - .min_w(rems(MIN_INPUT_WIDTH_REMS)) - .max_w(rems(MAX_INPUT_WIDTH_REMS)) - .rounded_lg() .child(self.render_text_input(&self.query_editor, text_color.color(cx), cx)) .when(!hide_inline_icons, |div| { - div.children(supported_options.case.then(|| { - self.render_search_option_button( - SearchOptions::CASE_SENSITIVE, - focus_handle.clone(), - cx.listener(|this, _, cx| { - this.toggle_case_sensitive(&ToggleCaseSensitive, cx) - }), - ) - })) - .children(supported_options.word.then(|| { - self.render_search_option_button( - SearchOptions::WHOLE_WORD, - focus_handle.clone(), - cx.listener(|this, _, cx| { - this.toggle_whole_word(&ToggleWholeWord, cx) - }), - ) - })) - .children(supported_options.regex.then(|| { - self.render_search_option_button( - SearchOptions::REGEX, - focus_handle.clone(), - cx.listener(|this, _, cx| this.toggle_regex(&ToggleRegex, cx)), - ) - })) + div.child( + h_flex() + .gap_1() + .children(supported_options.case.then(|| { + self.render_search_option_button( + SearchOptions::CASE_SENSITIVE, + focus_handle.clone(), + cx.listener(|this, _, cx| { + this.toggle_case_sensitive(&ToggleCaseSensitive, cx) + }), + ) + })) + .children(supported_options.word.then(|| { + self.render_search_option_button( + SearchOptions::WHOLE_WORD, + focus_handle.clone(), + cx.listener(|this, _, cx| { + this.toggle_whole_word(&ToggleWholeWord, cx) + }), + ) + })) + .children(supported_options.regex.then(|| { + self.render_search_option_button( + SearchOptions::REGEX, + focus_handle.clone(), + cx.listener(|this, _, cx| { + this.toggle_regex(&ToggleRegex, cx) + }), + ) + })), + ) }), ) .child( h_flex() - .flex_none() - .gap_0p5() + .gap_1() + .min_w_64() .when(supported_options.replacement, |this| { this.child( IconButton::new( @@ -265,7 +279,7 @@ impl Render for BufferSearchBar { .on_click(cx.listener(|this, _: &ClickEvent, cx| { this.toggle_replace(&ToggleReplace, cx); })) - .selected(self.replace_enabled) + .toggle_state(self.replace_enabled) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -293,7 +307,7 @@ impl Render for BufferSearchBar { .on_click(cx.listener(|this, _: &ClickEvent, cx| { this.toggle_selection(&ToggleSelection, cx); })) - .selected(self.selection_search_enabled) + .toggle_state(self.selection_search_enabled) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -323,20 +337,27 @@ impl Render for BufferSearchBar { } }), ) - .child(render_nav_button( - ui::IconName::ChevronLeft, - self.active_match_index.is_some(), - "Select Previous Match", - &SelectPrevMatch, - focus_handle.clone(), - )) - .child(render_nav_button( - ui::IconName::ChevronRight, - self.active_match_index.is_some(), - "Select Next Match", - &SelectNextMatch, - focus_handle.clone(), - )) + .child( + h_flex() + .pl_2() + .ml_1() + .border_l_1() + .border_color(cx.theme().colors().border_variant) + .child(render_nav_button( + ui::IconName::ChevronLeft, + self.active_match_index.is_some(), + "Select Previous Match", + &SelectPrevMatch, + focus_handle.clone(), + )) + .child(render_nav_button( + ui::IconName::ChevronRight, + self.active_match_index.is_some(), + "Select Next Match", + &SelectNextMatch, + focus_handle.clone(), + )), + ) .when(!narrow_mode, |this| { this.child(h_flex().ml_2().min_w(rems_from_px(40.)).child( Label::new(match_text).size(LabelSize::Small).color( @@ -353,30 +374,15 @@ impl Render for BufferSearchBar { let replace_line = should_show_replace_input.then(|| { h_flex() .gap_2() - .flex_1() + .child(input_base_styles().child(self.render_text_input( + &self.replacement_editor, + cx.theme().colors().text, + cx, + ))) .child( h_flex() - .flex_1() - // We're giving this a fixed height to match the height of the search input, - // which has an icon inside that is increasing its height. - .h_8() - .px_2() - .py_1() - .border_1() - .border_color(cx.theme().colors().border) - .rounded_lg() - .min_w(rems(MIN_INPUT_WIDTH_REMS)) - .max_w(rems(MAX_INPUT_WIDTH_REMS)) - .child(self.render_text_input( - &self.replacement_editor, - cx.theme().colors().text, - cx, - )), - ) - .child( - h_flex() - .flex_none() - .gap_0p5() + .min_w_64() + .gap_1() .child( IconButton::new("search-replace-next", ui::IconName::ReplaceNext) .shape(IconButtonShape::Square) @@ -418,6 +424,8 @@ impl Render for BufferSearchBar { v_flex() .id("buffer_search") + .gap_2() + .py(px(1.0)) .track_scroll(&self.scroll_handle) .key_context(key_context) .capture_action(cx.listener(Self::tab)) @@ -446,20 +454,22 @@ impl Render for BufferSearchBar { .when(self.supported_options().selection, |this| { this.on_action(cx.listener(Self::toggle_selection)) }) - .gap_2() .child( h_flex() + .relative() .child(search_line.w_full()) .when(!narrow_mode, |div| { div.child( - IconButton::new(SharedString::from("Close"), IconName::Close) - .shape(IconButtonShape::Square) - .tooltip(move |cx| { - Tooltip::for_action("Close Search Bar", &Dismiss, cx) - }) - .on_click(cx.listener(|this, _: &ClickEvent, cx| { - this.dismiss(&Dismiss, cx) - })), + h_flex().absolute().right_0().child( + IconButton::new(SharedString::from("Close"), IconName::Close) + .shape(IconButtonShape::Square) + .tooltip(move |cx| { + Tooltip::for_action("Close Search Bar", &Dismiss, cx) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.dismiss(&Dismiss, cx) + })), + ), ) }), ) @@ -528,6 +538,11 @@ impl BufferSearchBar { this.toggle_whole_word(action, cx); } })); + registrar.register_handler(ForDeployed(|this, action: &ToggleRegex, cx| { + if this.supported_options().regex { + this.toggle_regex(action, cx); + } + })); registrar.register_handler(ForDeployed(|this, action: &ToggleSelection, cx| { if this.supported_options().selection { this.toggle_selection(action, cx); @@ -1866,6 +1881,86 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_search_query_with_match_whole_word(cx: &mut TestAppContext) { + init_globals(cx); + let buffer_text = r#" + self.buffer.update(cx, |buffer, cx| { + buffer.edit( + edits, + Some(AutoindentMode::Block { + original_indent_columns, + }), + cx, + ) + }); + + this.buffer.update(cx, |buffer, cx| { + buffer.edit([(end_of_line..start_of_next_line, replace)], None, cx) + }); + "# + .unindent(); + let buffer = cx.new_model(|cx| Buffer::local(buffer_text, cx)); + let cx = cx.add_empty_window(); + + let editor = cx.new_view(|cx| Editor::for_buffer(buffer.clone(), None, cx)); + + let search_bar = cx.new_view(|cx| { + let mut search_bar = BufferSearchBar::new(cx); + search_bar.set_active_pane_item(Some(&editor), cx); + search_bar.show(cx); + search_bar + }); + + search_bar + .update(cx, |search_bar, cx| { + search_bar.search( + "edit\\(", + Some(SearchOptions::WHOLE_WORD | SearchOptions::REGEX), + cx, + ) + }) + .await + .unwrap(); + + search_bar.update(cx, |search_bar, cx| { + search_bar.select_all_matches(&SelectAllMatches, cx); + }); + search_bar.update(cx, |_, cx| { + let all_selections = + editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)); + assert_eq!( + all_selections.len(), + 2, + "Should select all `edit(` in the buffer, but got: {all_selections:?}" + ); + }); + + search_bar + .update(cx, |search_bar, cx| { + search_bar.search( + "edit(", + Some(SearchOptions::WHOLE_WORD | SearchOptions::CASE_SENSITIVE), + cx, + ) + }) + .await + .unwrap(); + + search_bar.update(cx, |search_bar, cx| { + search_bar.select_all_matches(&SelectAllMatches, cx); + }); + search_bar.update(cx, |_, cx| { + let all_selections = + editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)); + assert_eq!( + all_selections.len(), + 2, + "Should select all `edit(` in the buffer, but got: {all_selections:?}" + ); + }); + } + #[gpui::test] async fn test_search_query_history(cx: &mut TestAppContext) { init_globals(cx); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 1f4492d992..321cbcc3ae 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -5,18 +5,16 @@ use crate::{ }; use collections::{HashMap, HashSet}; use editor::{ - actions::SelectAll, - items::active_match_index, - scroll::{Autoscroll, Axis}, - Anchor, Editor, EditorElement, EditorEvent, EditorSettings, EditorStyle, MultiBuffer, - MAX_TAB_TITLE_LEN, + actions::SelectAll, items::active_match_index, scroll::Autoscroll, Anchor, Editor, + EditorElement, EditorEvent, EditorSettings, EditorStyle, MultiBuffer, MAX_TAB_TITLE_LEN, }; use futures::StreamExt; use gpui::{ - actions, div, Action, AnyElement, AnyView, AppContext, Context as _, EntityId, EventEmitter, - FocusHandle, FocusableView, Global, Hsla, InteractiveElement, IntoElement, KeyContext, Model, - ModelContext, ParentElement, Point, Render, SharedString, Styled, Subscription, Task, - TextStyle, UpdateGlobal, View, ViewContext, VisualContext, WeakModel, WeakView, WindowContext, + actions, div, Action, AnyElement, AnyView, AppContext, Axis, Context as _, EntityId, + EventEmitter, FocusHandle, FocusableView, Global, Hsla, InteractiveElement, IntoElement, + KeyContext, Model, ModelContext, ParentElement, Point, Render, SharedString, Styled, + Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, VisualContext, WeakModel, + WeakView, WindowContext, }; use language::Buffer; use menu::Confirm; @@ -34,8 +32,8 @@ use std::{ }; use theme::ThemeSettings; use ui::{ - h_flex, prelude::*, v_flex, Icon, IconButton, IconButtonShape, IconName, KeyBinding, Label, - LabelCommon, LabelSize, Selectable, Tooltip, + h_flex, prelude::*, utils::SearchInputWidth, v_flex, Icon, IconButton, IconButtonShape, + IconName, KeyBinding, Label, LabelCommon, LabelSize, Toggleable, Tooltip, }; use util::paths::PathMatcher; use workspace::{ @@ -333,20 +331,20 @@ impl Render for ProjectSearchView { let model = self.model.read(cx); let has_no_results = model.no_results.unwrap_or(false); let is_search_underway = model.pending_search.is_some(); - let major_text = if is_search_underway { - "Searching..." + + let heading_text = if is_search_underway { + "Searching…" } else if has_no_results { - "No results" + "No Results" } else { - "Search all files" + "Search All Files" }; - let major_text = div() + let heading_text = div() .justify_center() - .max_w_96() - .child(Label::new(major_text).size(LabelSize::Large)); + .child(Label::new(heading_text).size(LabelSize::Large)); - let minor_text: Option = if let Some(no_results) = model.no_results { + let page_content: Option = if let Some(no_results) = model.no_results { if model.pending_search.is_none() && no_results { Some( Label::new("No results found in this project for the provided query") @@ -359,20 +357,24 @@ impl Render for ProjectSearchView { } else { Some(self.landing_text_minor(cx).into_any_element()) }; - let minor_text = minor_text.map(|text| div().items_center().max_w_96().child(text)); + + let page_content = page_content.map(|text| div().child(text)); + v_flex() - .flex_1() .size_full() + .items_center() .justify_center() + .overflow_hidden() .bg(cx.theme().colors().editor_background) .track_focus(&self.focus_handle(cx)) .child( - h_flex() - .size_full() - .justify_center() - .child(h_flex().flex_1()) - .child(v_flex().gap_1().child(major_text).children(minor_text)) - .child(h_flex().flex_1()), + v_flex() + .id("project-search-landing-page") + .overflow_y_scroll() + .max_w_80() + .gap_1() + .child(heading_text) + .children(page_content), ) } } @@ -445,7 +447,7 @@ impl Item for ProjectSearchView { fn for_each_project_item( &self, cx: &AppContext, - f: &mut dyn FnMut(EntityId, &dyn project::Item), + f: &mut dyn FnMut(EntityId, &dyn project::ProjectItem), ) { self.results_editor.for_each_project_item(cx, f) } @@ -536,7 +538,7 @@ impl Item for ProjectSearchView { } } - fn breadcrumb_location(&self) -> ToolbarItemLocation { + fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation { if self.has_matches() { ToolbarItemLocation::Secondary } else { @@ -669,7 +671,7 @@ impl ProjectSearchView { let query_editor = cx.new_view(|cx| { let mut editor = Editor::single_line(cx); - editor.set_placeholder_text("Search all files...", cx); + editor.set_placeholder_text("Search all files…", cx); editor.set_text(query_text, cx); editor }); @@ -692,7 +694,7 @@ impl ProjectSearchView { ); let replacement_editor = cx.new_view(|cx| { let mut editor = Editor::single_line(cx); - editor.set_placeholder_text("Replace in project...", cx); + editor.set_placeholder_text("Replace in project…", cx); if let Some(text) = replacement_text { editor.set_text(text, cx); } @@ -1252,7 +1254,7 @@ impl ProjectSearchView { fn buffer_search_query( workspace: &mut Workspace, item: &dyn ItemHandle, - cx: &mut ViewContext<'_, Workspace>, + cx: &mut ViewContext, ) -> Option { let buffer_search_bar = workspace .pane_for(item) @@ -1586,11 +1588,16 @@ impl Render for ProjectSearchBar { let search = search.read(cx); let focus_handle = search.focus_handle(cx); + let container_width = cx.viewport_size().width; + let input_width = SearchInputWidth::calc_width(container_width); + let input_base_styles = || { h_flex() - .w_full() + .min_w_32() + .w(input_width) .h_8() - .px_2() + .pl_2() + .pr_1() .py_1() .border_1() .border_color(search.border_color_for(InputPanel::Query, cx)) @@ -1604,7 +1611,7 @@ impl Render for ProjectSearchBar { .child(self.render_text_input(&search.query_editor, cx)) .child( h_flex() - .gap_0p5() + .gap_1() .child(SearchOptions::CASE_SENSITIVE.as_button( self.is_option_enabled(SearchOptions::CASE_SENSITIVE, cx), focus_handle.clone(), @@ -1637,7 +1644,7 @@ impl Render for ProjectSearchBar { .on_click(cx.listener(|this, _, cx| { this.toggle_filters(cx); })) - .selected( + .toggle_state( self.active_project_search .as_ref() .map(|search| search.read(cx).filters_enabled) @@ -1661,7 +1668,7 @@ impl Render for ProjectSearchBar { .on_click(cx.listener(|this, _, cx| { this.toggle_replace(&ToggleReplace, cx); })) - .selected( + .toggle_state( self.active_project_search .as_ref() .map(|search| search.read(cx).replace_enabled) @@ -1701,6 +1708,10 @@ impl Render for ProjectSearchBar { .unwrap_or_else(|| "0/0".to_string()); let matches_column = h_flex() + .pl_2() + .ml_2() + .border_l_1() + .border_color(cx.theme().colors().border_variant) .child( IconButton::new("project-search-prev-match", IconName::ChevronLeft) .shape(IconButtonShape::Square) @@ -1751,13 +1762,13 @@ impl Render for ProjectSearchBar { div() .id("matches") .ml_1() - .child( - Label::new(match_text).color(if search.active_match_index.is_some() { + .child(Label::new(match_text).size(LabelSize::Small).color( + if search.active_match_index.is_some() { Color::Default } else { Color::Disabled - }), - ) + }, + )) .when(limit_reached, |el| { el.tooltip(|cx| { Tooltip::text("Search limits reached.\nTry narrowing your search.", cx) @@ -1767,9 +1778,9 @@ impl Render for ProjectSearchBar { let search_line = h_flex() .w_full() - .gap_1p5() + .gap_2() .child(query_column) - .child(h_flex().min_w_40().child(mode_column).child(matches_column)); + .child(h_flex().min_w_64().child(mode_column).child(matches_column)); let replace_line = search.replace_enabled.then(|| { let replace_column = @@ -1779,7 +1790,7 @@ impl Render for ProjectSearchBar { let replace_actions = h_flex() - .min_w_40() + .min_w_64() .gap_1() .when(search.replace_enabled, |this| { this.child( @@ -1830,7 +1841,7 @@ impl Render for ProjectSearchBar { h_flex() .w_full() - .gap_1p5() + .gap_2() .child(replace_column) .child(replace_actions) }); @@ -1838,7 +1849,7 @@ impl Render for ProjectSearchBar { let filter_line = search.filters_enabled.then(|| { h_flex() .w_full() - .gap_1p5() + .gap_2() .child( input_base_styles() .on_action( @@ -1861,13 +1872,12 @@ impl Render for ProjectSearchBar { ) .child( h_flex() - .min_w_40() + .min_w_64() .gap_1() .child( IconButton::new("project-search-opened-only", IconName::FileSearch) .shape(IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .selected(self.is_opened_only_enabled(cx)) + .toggle_state(self.is_opened_only_enabled(cx)) .tooltip(|cx| Tooltip::text("Only Search Open Files", cx)) .on_click(cx.listener(|this, _, cx| { this.toggle_opened_only(cx); @@ -1896,6 +1906,7 @@ impl Render for ProjectSearchBar { } v_flex() + .py(px(1.0)) .key_context(key_context) .on_action(cx.listener(|this, _: &ToggleFocus, cx| this.move_focus_to_results(cx))) .on_action(cx.listener(|this, _: &ToggleFilters, cx| { diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index 60ff80834f..adca7bd049 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -113,7 +113,7 @@ impl SearchOptions { .on_click(action) .style(ButtonStyle::Subtle) .shape(IconButtonShape::Square) - .selected(active) + .toggle_state(active) .tooltip({ let action = self.to_toggle_action(); let label = self.label(); diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 2efd94cb57..25e03f5b3a 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -25,7 +25,7 @@ fn main() { store.update_user_settings::(cx, |_| {}); }); - let clock = Arc::new(FakeSystemClock::default()); + let clock = Arc::new(FakeSystemClock::new()); let http = Arc::new(HttpClientWithUrl::new( Arc::new( diff --git a/crates/semantic_index/src/embedding_index.rs b/crates/semantic_index/src/embedding_index.rs index 0913124341..4e3d74a2ea 100644 --- a/crates/semantic_index/src/embedding_index.rs +++ b/crates/semantic_index/src/embedding_index.rs @@ -7,6 +7,7 @@ use anyhow::{anyhow, Context as _, Result}; use collections::Bound; use feature_flags::FeatureFlagAppExt; use fs::Fs; +use fs::MTime; use futures::stream::StreamExt; use futures_batch::ChunksTimeoutStreamExt; use gpui::{AppContext, Model, Task}; @@ -17,14 +18,7 @@ use project::{Entry, UpdatedEntriesSet, Worktree}; use serde::{Deserialize, Serialize}; use smol::channel; use smol::future::FutureExt; -use std::{ - cmp::Ordering, - future::Future, - iter, - path::Path, - sync::Arc, - time::{Duration, SystemTime}, -}; +use std::{cmp::Ordering, future::Future, iter, path::Path, sync::Arc, time::Duration}; use util::ResultExt; use worktree::Snapshot; @@ -451,7 +445,7 @@ struct ChunkFiles { pub struct ChunkedFile { pub path: Arc, - pub mtime: Option, + pub mtime: Option, pub handle: IndexingEntryHandle, pub text: String, pub chunks: Vec, @@ -465,7 +459,7 @@ pub struct EmbedFiles { #[derive(Debug, Serialize, Deserialize)] pub struct EmbeddedFile { pub path: Arc, - pub mtime: Option, + pub mtime: Option, pub chunks: Vec, } diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs index 21c036d60a..bc18eccc18 100644 --- a/crates/semantic_index/src/project_index.rs +++ b/crates/semantic_index/src/project_index.rs @@ -125,7 +125,7 @@ impl ProjectIndex { cx: &mut ModelContext, ) { match event { - project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => { + project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { self.update_worktree_indices(cx); } _ => {} diff --git a/crates/semantic_index/src/project_index_debug_view.rs b/crates/semantic_index/src/project_index_debug_view.rs index d6628064ac..80563bcffa 100644 --- a/crates/semantic_index/src/project_index_debug_view.rs +++ b/crates/semantic_index/src/project_index_debug_view.rs @@ -196,7 +196,7 @@ impl ProjectIndexDebugView { } impl Render for ProjectIndexDebugView { - fn render(&mut self, cx: &mut gpui::ViewContext<'_, Self>) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { if let Some(selected_path) = self.selected_path.as_ref() { v_flex() .child( diff --git a/crates/semantic_index/src/summary_backlog.rs b/crates/semantic_index/src/summary_backlog.rs index c6d8e33a45..e77fa4862f 100644 --- a/crates/semantic_index/src/summary_backlog.rs +++ b/crates/semantic_index/src/summary_backlog.rs @@ -1,5 +1,6 @@ use collections::HashMap; -use std::{path::Path, sync::Arc, time::SystemTime}; +use fs::MTime; +use std::{path::Path, sync::Arc}; const MAX_FILES_BEFORE_RESUMMARIZE: usize = 4; const MAX_BYTES_BEFORE_RESUMMARIZE: u64 = 1_000_000; // 1 MB @@ -7,14 +8,14 @@ const MAX_BYTES_BEFORE_RESUMMARIZE: u64 = 1_000_000; // 1 MB #[derive(Default, Debug)] pub struct SummaryBacklog { /// Key: path to a file that needs summarization, but that we haven't summarized yet. Value: that file's size on disk, in bytes, and its mtime. - files: HashMap, (u64, Option)>, + files: HashMap, (u64, Option)>, /// Cache of the sum of all values in `files`, so we don't have to traverse the whole map to check if we're over the byte limit. total_bytes: u64, } impl SummaryBacklog { /// Store the given path in the backlog, along with how many bytes are in it. - pub fn insert(&mut self, path: Arc, bytes_on_disk: u64, mtime: Option) { + pub fn insert(&mut self, path: Arc, bytes_on_disk: u64, mtime: Option) { let (prev_bytes, _) = self .files .insert(path, (bytes_on_disk, mtime)) @@ -34,7 +35,7 @@ impl SummaryBacklog { /// Remove all the entries in the backlog and return the file paths as an iterator. #[allow(clippy::needless_lifetimes)] // Clippy thinks this 'a can be elided, but eliding it gives a compile error - pub fn drain<'a>(&'a mut self) -> impl Iterator, Option)> + 'a { + pub fn drain<'a>(&'a mut self) -> impl Iterator, Option)> + 'a { self.total_bytes = 0; self.files diff --git a/crates/semantic_index/src/summary_index.rs b/crates/semantic_index/src/summary_index.rs index 1cbb670397..44cac88564 100644 --- a/crates/semantic_index/src/summary_index.rs +++ b/crates/semantic_index/src/summary_index.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Context as _, Result}; use arrayvec::ArrayString; -use fs::Fs; +use fs::{Fs, MTime}; use futures::{stream::StreamExt, TryFutureExt}; use futures_batch::ChunksTimeoutStreamExt; use gpui::{AppContext, Model, Task}; @@ -21,7 +21,7 @@ use std::{ future::Future, path::Path, sync::Arc, - time::{Duration, Instant, SystemTime}, + time::{Duration, Instant}, }; use util::ResultExt; use worktree::Snapshot; @@ -39,7 +39,7 @@ struct UnsummarizedFile { // Path to the file on disk path: Arc, // The mtime of the file on disk - mtime: Option, + mtime: Option, // BLAKE3 hash of the source file's contents digest: Blake3Digest, // The source file's contents @@ -51,7 +51,7 @@ struct SummarizedFile { // Path to the file on disk path: String, // The mtime of the file on disk - mtime: Option, + mtime: Option, // BLAKE3 hash of the source file's contents digest: Blake3Digest, // The LLM's summary of the file's contents @@ -63,7 +63,7 @@ pub type Blake3Digest = ArrayString<{ blake3::OUT_LEN * 2 }>; #[derive(Debug, Serialize, Deserialize)] pub struct FileDigest { - pub mtime: Option, + pub mtime: Option, pub digest: Blake3Digest, } @@ -88,7 +88,7 @@ pub struct SummaryIndex { } struct Backlogged { - paths_to_digest: channel::Receiver, Option)>>, + paths_to_digest: channel::Receiver, Option)>>, task: Task>, } @@ -319,7 +319,7 @@ impl SummaryIndex { digest_db: heed::Database>, txn: &RoTxn<'_>, entry: &Entry, - ) -> Vec<(Arc, Option)> { + ) -> Vec<(Arc, Option)> { let entry_db_key = db_key_for_path(&entry.path); match digest_db.get(&txn, &entry_db_key) { @@ -414,7 +414,7 @@ impl SummaryIndex { fn digest_files( &self, - paths: channel::Receiver, Option)>>, + paths: channel::Receiver, Option)>>, worktree_abs_path: Arc, cx: &AppContext, ) -> MightNeedSummaryFiles { @@ -646,7 +646,7 @@ impl SummaryIndex { let start = Instant::now(); let backlogged = { let (tx, rx) = channel::bounded(512); - let needs_summary: Vec<(Arc, Option)> = { + let needs_summary: Vec<(Arc, Option)> = { let mut backlog = self.backlog.lock(); backlog.drain().collect() diff --git a/crates/settings/src/key_equivalents.rs b/crates/settings/src/key_equivalents.rs index 1c68f48db4..bf08de97ae 100644 --- a/crates/settings/src/key_equivalents.rs +++ b/crates/settings/src/key_equivalents.rs @@ -5,7 +5,7 @@ use collections::HashMap; // for those users. // // The way macOS solves this problem is to move shortcuts around so that they are all reachable, -// even if the mnemoic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct +// even if the mnemonic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct // // For example, cmd-> is the "switch window" shortcut because the > key is right above tab. // To ensure this doesn't cause problems for shortcuts defined for a QWERTY layout, apple moves @@ -26,157 +26,1396 @@ use collections::HashMap; // From there I used multi-cursor to produce this match statement. #[cfg(target_os = "macos")] pub fn get_key_equivalents(layout: &str) -> Option> { - let (from, to) = match layout { - "com.apple.keylayout.Welsh" => ("#", "£"), - "com.apple.keylayout.Turkmen" => ("qc]Q`|[XV\\^v~Cx}{", "äçöÄžŞňÜÝş№ýŽÇüÖŇ"), - "com.apple.keylayout.Turkish-QWERTY-PC" => ( - "$\\|`'[}^=.#{*+:/~;)(@<,&]>\"", - "+,;<ığÜ&.ç^Ğ(:Ş*>ş=)'Öö/üÇI", - ), - "com.apple.keylayout.Sami-PC" => ( - "}*x\"w[~^/@`]{|<)>W(\\X=Qq&':;", - "Æ(čŊšøŽ&´\"žæØĐ;=:Š)đČ`Áá/ŋÅå", - ), - "com.apple.keylayout.LatinAmerican" => { - ("[^~>`(<\\@{;*&/):]|='}\"", "{&>:<);¿\"[ñ(/'=Ñ}¡*´]¨") - } - "com.apple.keylayout.IrishExtended" => ("#", "£"), - "com.apple.keylayout.Icelandic" => ("[}=:/'){(*&;^|`\"\\>]<~@", "æ´*Ð'ö=Æ)(/ð&Þ<Öþ:´;>\""), - "com.apple.keylayout.German-DIN-2137" => { - ("}~/<^>{`:\\)&=[]@|;#'\"(*", "Ä>ß;&:Ö<Ü#=/*öä\"'ü§´`)(") - } - "com.apple.keylayout.FinnishSami-PC" => { - (")=*\"\\[@{:>';/<|~(]}^`&", "=`(ˆ@ö\"ÖÅ:¨å´;*>)äÄ& { - ("];{`:'*<~=/}\\|&[\"($^)>@", "äåÖ<Ũ(;>`´Ä'*/öˆ)€&=:\"") - } - "com.apple.keylayout.Faroese" => ("}\";/$>^@~`:&[*){|]=(\\<'", "ÐØæ´€:&\"><Æ/å(=Å*ð`)';ø"), - "com.apple.keylayout.Croatian-PC" => { - ("{@~;<=>(&*['|]\":/}^`)\\", "Š\">č;*:)/(šćŽđĆČ'Đ&<=ž") - } - "com.apple.keylayout.Croatian" => ("{@;<~=>(&*['|]\":}^)\\`", "Š\"č;>*:)'(šćŽđĆČĐ&=ž<"), - "com.apple.keylayout.Azeri" => (":{W?./\"[}<]|,>';w", "IÖÜ,ş.ƏöĞÇğ/çŞəıü"), - "com.apple.keylayout.Albanian" => ("\\'~;:|<>`\"@", "ë@>çÇË;:<'\""), - "com.apple.keylayout.SwissFrench" => ( - ":@&'~^)$;\"][\\/#={!|*+`<(>}", - "ü\"/^>&=çè`àé$'*¨ö+£(!<;):ä", - ), - "com.apple.keylayout.Swedish" => ("(]\\\"~$`^{|/>*:;<)&=[}'@", ")ä'^>€<&Ö*´:(Åå;=/`öĨ\""), - "com.apple.keylayout.Swedish-Pro" => { - ("/^*`'{|)$>&<[\\;(~\"}@]:=", "´&(<¨Ö*=€:/;ö'å)>^Ä\"äÅ`") - } - "com.apple.keylayout.Spanish" => ("|!\\<{[:;@`/~].'>}\"^", "\"¡'¿Ññº´!<.>;ç`Ç:¨/"), - "com.apple.keylayout.Spanish-ISO" => ( - "|~`]/:)(<&^>*;#}\"{.\\['@", - "\"><;.º=)¿/&Ç(´·not found¨Ñç'ñ`\"", - ), - "com.apple.keylayout.Portuguese" => (")`/'^\"<];>[:{@}(&*=~", "=<'´&`;~º:çªÇ\"^)/(*>"), - "com.apple.keylayout.Italian" => ( - "*7};8:!5%(1&4]^\\6)32>.à32", - ), - "com.apple.keylayout.Italian-Pro" => { - ("/:@[]'\\=){;|#<\"(*^&`}>~", "'é\"òàìù*=çè§£;^)(&/<°:>") - } - "com.apple.keylayout.Irish" => ("#", "£"), - "com.apple.keylayout.German" => ("=`#'}:)/\"^&]*{;|[<(>~@\\", "*<§´ÄÜ=ß`&/ä(Öü'ö;):>\"#"), - "com.apple.keylayout.French" => ( - "*}7;8:!5%(1&4]\\^6)32>.ç32", - ), - "com.apple.keylayout.French-numerical" => ( - "|!52;][>&@\"%'{)<~7.1/^(}*8#0$9`6\\3:4", - "£1(é)$^/72%5ù¨0.>è;&:69*8!3à4ç<§`\"°'", - ), - "com.apple.keylayout.French-PC" => ( - "!&\"_$}/72>8]#:31)*<%4;6\\-{['@(0|5.`9~^", - "17%°4£:èé/_$3§\"&08.5'!-*)¨^ù29àμ(;<ç>6", - ), - "com.apple.keylayout.Finnish" => ("/^*`)'{|$>&<[\\~;(\"}@]:=", "´&(<=¨Ö*€:/;ö'>å)^Ä\"äÅ`"), - "com.apple.keylayout.Danish" => ("=[;'`{}|>]*^(&@~)<\\/$\":", "`æå¨<ÆØ*:ø(&)/\">=;'´€^Å"), - "com.apple.keylayout.Canadian-CSA" => ("\\?']/><[{}|~`\"", "àÉèçé\"'^¨ÇÀÙùÈ"), - "com.apple.keylayout.British" => ("#", "£"), - "com.apple.keylayout.Brazilian-ABNT2" => ("\"|~?`'/^\\", "`^\"Ç'´ç¨~"), - "com.apple.keylayout.Belgian" => ( - "`3/*<\\8>7#&96@);024(|'1\":$[~5.%^}]{!", - "<\":8.`!/è37ç§20)àé'9£ù&%°4^>(;56*$¨1", - ), - "com.apple.keylayout.Austrian" => ("/^*`'{|)>&<[\\;(~\"}@]:=#", "ß&(<´Ö'=:/;ö#ü)>`Ä\"äÜ*§"), - "com.apple.keylayout.Slovak-QWERTY" => ( - "):9;63'\"]^/+@~>`? ( - "!$`10&:#4^*~{%5')}6/\"[8]97?;<@23>(+", - "14ň+é7\"3č68ŇÚ5ť§0Äž'!úáäíýˇô?2ľš:9%", - ), - "com.apple.keylayout.Polish" => ( - "&)|?,%:;^}]_{!+#(*`/[~<\"$.>'@=\\", - ":\"$Ż.+Łł=)(ćź§]!/_<żó>śę?,ńą%[;", - ), - "com.apple.keylayout.Lithuanian" => ("+#&=!%1*@73^584$26", "ŽĘŲžĄĮąŪČųęŠįūėĖčš"), - "com.apple.keylayout.Hungarian" => ( - "}(*@\"{=/|;>'[`<~\\!$&0#:]^)+", - "Ú)(\"ÁŐóüŰé:áőíÜÍű'!=ö+Éú/ÖÓ", - ), - "com.apple.keylayout.Hungarian-QWERTY" => ( - "=]#>@/&<`0')~(\\!:*;$\"+^{|}[", - "óú+:\"ü=ÜíöáÖÍ)ű'É(é!ÁÓ/ŐŰÚő", - ), - "com.apple.keylayout.Czech-QWERTY" => ( - "9>0[2()\"}@]46%5;#8{*7^~+!3?&'<$/1`:", - "í:éúě90!(2)čž5řů3áÚ8ý6`%1šˇ7§?4'+¨\"", - ), - "com.apple.keylayout.Maltese" => ("[`}{#]~", "ġżĦĠ£ħŻ"), - "com.apple.keylayout.Turkish" => ( - "|}(#>&^-/`$%@]~*,[\"<_.{:'\\)", - "ÜI%\"Ç)/ş.<'(*ı>_öğ-ÖŞçĞ$,ü:", - ), - "com.apple.keylayout.Turkish-Standard" => { - ("|}(#>=&^`@]~*,;[\"<.{:'\\)", "ÜI)^;*'&ö\"ıÖ(.çğŞ:,ĞÇşü=") - } - "com.apple.keylayout.NorwegianSami-PC" => { - ("\"}~<`&>':{@*^|\\)=([]/;", "ˆÆ>; { - (";\\@>&'<]\"|(=}^)`[~:*{", "čž\":'ć;đĆŽ)*Đ&=<š>Č(Š") - } - "com.apple.keylayout.Slovenian" => ("]`^@)&\":'*=<{;}(~>\\|[", "đ<&\"='ĆČć(*;ŠčĐ)>:žŽš"), - "com.apple.keylayout.SwedishSami-PC" => { - ("@=<^|`>){'&\"}]~[/:*\\(;", "\"`;&*<:=Ö¨/ˆÄä>ö´Å(@)å") - } - "com.apple.keylayout.SwissGerman" => ( - "={#:\\}!(+]/<\";$'`*[>&^~@)|", - "¨é*è$à+)!ä';`üç^<(ö:/&>\"=£", - ), - "com.apple.keylayout.Hawaiian" => ("'", "ʻ"), - "com.apple.keylayout.NorthernSami" => ( - ":/[<{X\"wQx\\(;~>W}`*@])'^|=q&", - "Å´ø;ØČŊšÁčđ)åŽ:ŠÆž(\"æ=ŋ&Đ`á/", - ), - "com.apple.keylayout.USInternational-PC" => ("^~", "ˆ˜"), - "com.apple.keylayout.NorwegianExtended" => ("^~", "ˆ˜"), - "com.apple.keylayout.Norwegian" => ("`'~\"\\*|=/@)[:}&><]{(^;", "<¨>^@(*`´\"=øÅÆ/:;æØ)&å"), - "com.apple.keylayout.ABC-QWERTZ" => { - ("\"}~<`>'&#:{@*^|\\)=(]/;[", "`Ä>;<:´/§ÜÖ\"(&'#=*)äßüö") - } - "com.apple.keylayout.ABC-AZERTY" => ( - ">[$61%@7|)&8\":}593(.4^8:ùà", - ), - "com.apple.keylayout.Czech" => ( - "(7*#193620?/{)@~!$8+;:%4\">`^]&5}[<'", - "9ý83+íšžěéˇ'Ú02`14á%ů\"5č!:¨6)7ř(ú?§", - ), - "com.apple.keylayout.Brazilian-Pro" => ("^~", "ˆ˜"), - _ => { - return None; - } - }; - debug_assert!(from.chars().count() == to.chars().count()); + let mappings: &[(char, char)] = match layout { + "com.apple.keylayout.ABC-AZERTY" => &[ + ('!', '1'), + ('"', '%'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('.', ';'), + ('/', ':'), + ('0', 'à'), + ('1', '&'), + ('2', 'é'), + ('3', '"'), + ('4', '\''), + ('5', '('), + ('6', '§'), + ('7', 'è'), + ('8', '!'), + ('9', 'ç'), + (':', '°'), + (';', ')'), + ('<', '.'), + ('>', '/'), + ('@', '2'), + ('[', '^'), + ('\'', 'ù'), + ('\\', '`'), + (']', '$'), + ('^', '6'), + ('`', '<'), + ('{', '¨'), + ('|', '£'), + ('}', '*'), + ('~', '>'), + ], + "com.apple.keylayout.ABC-QWERTZ" => &[ + ('"', '`'), + ('#', '§'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', 'ß'), + (':', 'Ü'), + (';', 'ü'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '´'), + ('\\', '#'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '\''), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.Albanian" => &[ + ('"', '\''), + (':', 'Ç'), + (';', 'ç'), + ('<', ';'), + ('>', ':'), + ('@', '"'), + ('\'', '@'), + ('\\', 'ë'), + ('`', '<'), + ('|', 'Ë'), + ('~', '>'), + ], + "com.apple.keylayout.Austrian" => &[ + ('"', '`'), + ('#', '§'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', 'ß'), + (':', 'Ü'), + (';', 'ü'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '´'), + ('\\', '#'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '\''), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.Azeri" => &[ + ('"', 'Ə'), + (',', 'ç'), + ('.', 'ş'), + ('/', '.'), + (':', 'I'), + (';', 'ı'), + ('<', 'Ç'), + ('>', 'Ş'), + ('?', ','), + ('W', 'Ü'), + ('[', 'ö'), + ('\'', 'ə'), + (']', 'ğ'), + ('w', 'ü'), + ('{', 'Ö'), + ('|', '/'), + ('}', 'Ğ'), + ], + "com.apple.keylayout.Belgian" => &[ + ('!', '1'), + ('"', '%'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('.', ';'), + ('/', ':'), + ('0', 'à'), + ('1', '&'), + ('2', 'é'), + ('3', '"'), + ('4', '\''), + ('5', '('), + ('6', '§'), + ('7', 'è'), + ('8', '!'), + ('9', 'ç'), + (':', '°'), + (';', ')'), + ('<', '.'), + ('>', '/'), + ('@', '2'), + ('[', '^'), + ('\'', 'ù'), + ('\\', '`'), + (']', '$'), + ('^', '6'), + ('`', '<'), + ('{', '¨'), + ('|', '£'), + ('}', '*'), + ('~', '>'), + ], + "com.apple.keylayout.Brazilian-ABNT2" => &[ + ('"', '`'), + ('/', 'ç'), + ('?', 'Ç'), + ('\'', '´'), + ('\\', '~'), + ('^', '¨'), + ('`', '\''), + ('|', '^'), + ('~', '"'), + ], + "com.apple.keylayout.Brazilian-Pro" => &[('^', 'ˆ'), ('~', '˜')], + "com.apple.keylayout.British" => &[('#', '£')], + "com.apple.keylayout.Canadian-CSA" => &[ + ('"', 'È'), + ('/', 'é'), + ('<', '\''), + ('>', '"'), + ('?', 'É'), + ('[', '^'), + ('\'', 'è'), + ('\\', 'à'), + (']', 'ç'), + ('`', 'ù'), + ('{', '¨'), + ('|', 'À'), + ('}', 'Ç'), + ('~', 'Ù'), + ], + "com.apple.keylayout.Croatian" => &[ + ('"', 'Ć'), + ('&', '\''), + ('(', ')'), + (')', '='), + ('*', '('), + (':', 'Č'), + (';', 'č'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'š'), + ('\'', 'ć'), + ('\\', 'ž'), + (']', 'đ'), + ('^', '&'), + ('`', '<'), + ('{', 'Š'), + ('|', 'Ž'), + ('}', 'Đ'), + ('~', '>'), + ], + "com.apple.keylayout.Croatian-PC" => &[ + ('"', 'Ć'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '\''), + (':', 'Č'), + (';', 'č'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'š'), + ('\'', 'ć'), + ('\\', 'ž'), + (']', 'đ'), + ('^', '&'), + ('`', '<'), + ('{', 'Š'), + ('|', 'Ž'), + ('}', 'Đ'), + ('~', '>'), + ], + "com.apple.keylayout.Czech" => &[ + ('!', '1'), + ('"', '!'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('+', '%'), + ('/', '\''), + ('0', 'é'), + ('1', '+'), + ('2', 'ě'), + ('3', 'š'), + ('4', 'č'), + ('5', 'ř'), + ('6', 'ž'), + ('7', 'ý'), + ('8', 'á'), + ('9', 'í'), + (':', '"'), + (';', 'ů'), + ('<', '?'), + ('>', ':'), + ('?', 'ˇ'), + ('@', '2'), + ('[', 'ú'), + ('\'', '§'), + (']', ')'), + ('^', '6'), + ('`', '¨'), + ('{', 'Ú'), + ('}', '('), + ('~', '`'), + ], + "com.apple.keylayout.Czech-QWERTY" => &[ + ('!', '1'), + ('"', '!'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('+', '%'), + ('/', '\''), + ('0', 'é'), + ('1', '+'), + ('2', 'ě'), + ('3', 'š'), + ('4', 'č'), + ('5', 'ř'), + ('6', 'ž'), + ('7', 'ý'), + ('8', 'á'), + ('9', 'í'), + (':', '"'), + (';', 'ů'), + ('<', '?'), + ('>', ':'), + ('?', 'ˇ'), + ('@', '2'), + ('[', 'ú'), + ('\'', '§'), + (']', ')'), + ('^', '6'), + ('`', '¨'), + ('{', 'Ú'), + ('}', '('), + ('~', '`'), + ], + "com.apple.keylayout.Danish" => &[ + ('"', '^'), + ('$', '€'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'æ'), + ('\'', '¨'), + ('\\', '\''), + (']', 'ø'), + ('^', '&'), + ('`', '<'), + ('{', 'Æ'), + ('|', '*'), + ('}', 'Ø'), + ('~', '>'), + ], + "com.apple.keylayout.Faroese" => &[ + ('"', 'Ø'), + ('$', '€'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Æ'), + (';', 'æ'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'å'), + ('\'', 'ø'), + ('\\', '\''), + (']', 'ð'), + ('^', '&'), + ('`', '<'), + ('{', 'Å'), + ('|', '*'), + ('}', 'Ð'), + ('~', '>'), + ], + "com.apple.keylayout.Finnish" => &[ + ('"', '^'), + ('$', '€'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '¨'), + ('\\', '\''), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '*'), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.FinnishExtended" => &[ + ('"', 'ˆ'), + ('$', '€'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '¨'), + ('\\', '\''), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '*'), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.FinnishSami-PC" => &[ + ('"', 'ˆ'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '¨'), + ('\\', '@'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '*'), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.French" => &[ + ('!', '1'), + ('"', '%'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('.', ';'), + ('/', ':'), + ('0', 'à'), + ('1', '&'), + ('2', 'é'), + ('3', '"'), + ('4', '\''), + ('5', '('), + ('6', '§'), + ('7', 'è'), + ('8', '!'), + ('9', 'ç'), + (':', '°'), + (';', ')'), + ('<', '.'), + ('>', '/'), + ('@', '2'), + ('[', '^'), + ('\'', 'ù'), + ('\\', '`'), + (']', '$'), + ('^', '6'), + ('`', '<'), + ('{', '¨'), + ('|', '£'), + ('}', '*'), + ('~', '>'), + ], + "com.apple.keylayout.French-PC" => &[ + ('!', '1'), + ('"', '%'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('-', ')'), + ('.', ';'), + ('/', ':'), + ('0', 'à'), + ('1', '&'), + ('2', 'é'), + ('3', '"'), + ('4', '\''), + ('5', '('), + ('6', '-'), + ('7', 'è'), + ('8', '_'), + ('9', 'ç'), + (':', '§'), + (';', '!'), + ('<', '.'), + ('>', '/'), + ('@', '2'), + ('[', '^'), + ('\'', 'ù'), + ('\\', '*'), + (']', '$'), + ('^', '6'), + ('_', '°'), + ('`', '<'), + ('{', '¨'), + ('|', 'μ'), + ('}', '£'), + ('~', '>'), + ], + "com.apple.keylayout.French-numerical" => &[ + ('!', '1'), + ('"', '%'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('.', ';'), + ('/', ':'), + ('0', 'à'), + ('1', '&'), + ('2', 'é'), + ('3', '"'), + ('4', '\''), + ('5', '('), + ('6', '§'), + ('7', 'è'), + ('8', '!'), + ('9', 'ç'), + (':', '°'), + (';', ')'), + ('<', '.'), + ('>', '/'), + ('@', '2'), + ('[', '^'), + ('\'', 'ù'), + ('\\', '`'), + (']', '$'), + ('^', '6'), + ('`', '<'), + ('{', '¨'), + ('|', '£'), + ('}', '*'), + ('~', '>'), + ], + "com.apple.keylayout.German" => &[ + ('"', '`'), + ('#', '§'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', 'ß'), + (':', 'Ü'), + (';', 'ü'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '´'), + ('\\', '#'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '\''), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.German-DIN-2137" => &[ + ('"', '`'), + ('#', '§'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', 'ß'), + (':', 'Ü'), + (';', 'ü'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '´'), + ('\\', '#'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '\''), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.Hawaiian" => &[('\'', 'ʻ')], + "com.apple.keylayout.Hungarian" => &[ + ('!', '\''), + ('"', 'Á'), + ('#', '+'), + ('$', '!'), + ('&', '='), + ('(', ')'), + (')', 'Ö'), + ('*', '('), + ('+', 'Ó'), + ('/', 'ü'), + ('0', 'ö'), + (':', 'É'), + (';', 'é'), + ('<', 'Ü'), + ('=', 'ó'), + ('>', ':'), + ('@', '"'), + ('[', 'ő'), + ('\'', 'á'), + ('\\', 'ű'), + (']', 'ú'), + ('^', '/'), + ('`', 'í'), + ('{', 'Ő'), + ('|', 'Ű'), + ('}', 'Ú'), + ('~', 'Í'), + ], + "com.apple.keylayout.Hungarian-QWERTY" => &[ + ('!', '\''), + ('"', 'Á'), + ('#', '+'), + ('$', '!'), + ('&', '='), + ('(', ')'), + (')', 'Ö'), + ('*', '('), + ('+', 'Ó'), + ('/', 'ü'), + ('0', 'ö'), + (':', 'É'), + (';', 'é'), + ('<', 'Ü'), + ('=', 'ó'), + ('>', ':'), + ('@', '"'), + ('[', 'ő'), + ('\'', 'á'), + ('\\', 'ű'), + (']', 'ú'), + ('^', '/'), + ('`', 'í'), + ('{', 'Ő'), + ('|', 'Ű'), + ('}', 'Ú'), + ('~', 'Í'), + ], + "com.apple.keylayout.Icelandic" => &[ + ('"', 'Ö'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '\''), + (':', 'Ð'), + (';', 'ð'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'æ'), + ('\'', 'ö'), + ('\\', 'þ'), + (']', '´'), + ('^', '&'), + ('`', '<'), + ('{', 'Æ'), + ('|', 'Þ'), + ('}', '´'), + ('~', '>'), + ], + "com.apple.keylayout.Irish" => &[('#', '£')], + "com.apple.keylayout.IrishExtended" => &[('#', '£')], + "com.apple.keylayout.Italian" => &[ + ('!', '1'), + ('"', '%'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + (',', ';'), + ('.', ':'), + ('/', ','), + ('0', 'é'), + ('1', '&'), + ('2', '"'), + ('3', '\''), + ('4', '('), + ('5', 'ç'), + ('6', 'è'), + ('7', ')'), + ('8', '£'), + ('9', 'à'), + (':', '!'), + (';', 'ò'), + ('<', '.'), + ('>', '/'), + ('@', '2'), + ('[', 'ì'), + ('\'', 'ù'), + ('\\', '§'), + (']', '$'), + ('^', '6'), + ('`', '<'), + ('{', '^'), + ('|', '°'), + ('}', '*'), + ('~', '>'), + ], + "com.apple.keylayout.Italian-Pro" => &[ + ('"', '^'), + ('#', '£'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '\''), + (':', 'é'), + (';', 'è'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'ò'), + ('\'', 'ì'), + ('\\', 'ù'), + (']', 'à'), + ('^', '&'), + ('`', '<'), + ('{', 'ç'), + ('|', '§'), + ('}', '°'), + ('~', '>'), + ], + "com.apple.keylayout.LatinAmerican" => &[ + ('"', '¨'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '\''), + (':', 'Ñ'), + (';', 'ñ'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', '{'), + ('\'', '´'), + ('\\', '¿'), + (']', '}'), + ('^', '&'), + ('`', '<'), + ('{', '['), + ('|', '¡'), + ('}', ']'), + ('~', '>'), + ], + "com.apple.keylayout.Lithuanian" => &[ + ('!', 'Ą'), + ('#', 'Ę'), + ('$', 'Ė'), + ('%', 'Į'), + ('&', 'Ų'), + ('*', 'Ū'), + ('+', 'Ž'), + ('1', 'ą'), + ('2', 'č'), + ('3', 'ę'), + ('4', 'ė'), + ('5', 'į'), + ('6', 'š'), + ('7', 'ų'), + ('8', 'ū'), + ('=', 'ž'), + ('@', 'Č'), + ('^', 'Š'), + ], + "com.apple.keylayout.Maltese" => &[ + ('#', '£'), + ('[', 'ġ'), + (']', 'ħ'), + ('`', 'ż'), + ('{', 'Ġ'), + ('}', 'Ħ'), + ('~', 'Ż'), + ], + "com.apple.keylayout.NorthernSami" => &[ + ('"', 'Ŋ'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('Q', 'Á'), + ('W', 'Š'), + ('X', 'Č'), + ('[', 'ø'), + ('\'', 'ŋ'), + ('\\', 'đ'), + (']', 'æ'), + ('^', '&'), + ('`', 'ž'), + ('q', 'á'), + ('w', 'š'), + ('x', 'č'), + ('{', 'Ø'), + ('|', 'Đ'), + ('}', 'Æ'), + ('~', 'Ž'), + ], + "com.apple.keylayout.Norwegian" => &[ + ('"', '^'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ø'), + ('\'', '¨'), + ('\\', '@'), + (']', 'æ'), + ('^', '&'), + ('`', '<'), + ('{', 'Ø'), + ('|', '*'), + ('}', 'Æ'), + ('~', '>'), + ], + "com.apple.keylayout.NorwegianExtended" => &[ + ('"', 'ˆ'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ø'), + ('\\', '@'), + (']', 'æ'), + ('`', '<'), + ('}', 'Æ'), + ('~', '>'), + ], + "com.apple.keylayout.NorwegianSami-PC" => &[ + ('"', 'ˆ'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ø'), + ('\'', '¨'), + ('\\', '@'), + (']', 'æ'), + ('^', '&'), + ('`', '<'), + ('{', 'Ø'), + ('|', '*'), + ('}', 'Æ'), + ('~', '>'), + ], + "com.apple.keylayout.Polish" => &[ + ('!', '§'), + ('"', 'ę'), + ('#', '!'), + ('$', '?'), + ('%', '+'), + ('&', ':'), + ('(', '/'), + (')', '"'), + ('*', '_'), + ('+', ']'), + (',', '.'), + ('.', ','), + ('/', 'ż'), + (':', 'Ł'), + (';', 'ł'), + ('<', 'ś'), + ('=', '['), + ('>', 'ń'), + ('?', 'Ż'), + ('@', '%'), + ('[', 'ó'), + ('\'', 'ą'), + ('\\', ';'), + (']', '('), + ('^', '='), + ('_', 'ć'), + ('`', '<'), + ('{', 'ź'), + ('|', '$'), + ('}', ')'), + ('~', '>'), + ], + "com.apple.keylayout.Portuguese" => &[ + ('"', '`'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '\''), + (':', 'ª'), + (';', 'º'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'ç'), + ('\'', '´'), + (']', '~'), + ('^', '&'), + ('`', '<'), + ('{', 'Ç'), + ('}', '^'), + ('~', '>'), + ], + "com.apple.keylayout.Sami-PC" => &[ + ('"', 'Ŋ'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('Q', 'Á'), + ('W', 'Š'), + ('X', 'Č'), + ('[', 'ø'), + ('\'', 'ŋ'), + ('\\', 'đ'), + (']', 'æ'), + ('^', '&'), + ('`', 'ž'), + ('q', 'á'), + ('w', 'š'), + ('x', 'č'), + ('{', 'Ø'), + ('|', 'Đ'), + ('}', 'Æ'), + ('~', 'Ž'), + ], + "com.apple.keylayout.Serbian-Latin" => &[ + ('"', 'Ć'), + ('&', '\''), + ('(', ')'), + (')', '='), + ('*', '('), + (':', 'Č'), + (';', 'č'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'š'), + ('\'', 'ć'), + ('\\', 'ž'), + (']', 'đ'), + ('^', '&'), + ('`', '<'), + ('{', 'Š'), + ('|', 'Ž'), + ('}', 'Đ'), + ('~', '>'), + ], + "com.apple.keylayout.Slovak" => &[ + ('!', '1'), + ('"', '!'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('+', '%'), + ('/', '\''), + ('0', 'é'), + ('1', '+'), + ('2', 'ľ'), + ('3', 'š'), + ('4', 'č'), + ('5', 'ť'), + ('6', 'ž'), + ('7', 'ý'), + ('8', 'á'), + ('9', 'í'), + (':', '"'), + (';', 'ô'), + ('<', '?'), + ('>', ':'), + ('?', 'ˇ'), + ('@', '2'), + ('[', 'ú'), + ('\'', '§'), + (']', 'ä'), + ('^', '6'), + ('`', 'ň'), + ('{', 'Ú'), + ('}', 'Ä'), + ('~', 'Ň'), + ], + "com.apple.keylayout.Slovak-QWERTY" => &[ + ('!', '1'), + ('"', '!'), + ('#', '3'), + ('$', '4'), + ('%', '5'), + ('&', '7'), + ('(', '9'), + (')', '0'), + ('*', '8'), + ('+', '%'), + ('/', '\''), + ('0', 'é'), + ('1', '+'), + ('2', 'ľ'), + ('3', 'š'), + ('4', 'č'), + ('5', 'ť'), + ('6', 'ž'), + ('7', 'ý'), + ('8', 'á'), + ('9', 'í'), + (':', '"'), + (';', 'ô'), + ('<', '?'), + ('>', ':'), + ('?', 'ˇ'), + ('@', '2'), + ('[', 'ú'), + ('\'', '§'), + (']', 'ä'), + ('^', '6'), + ('`', 'ň'), + ('{', 'Ú'), + ('}', 'Ä'), + ('~', 'Ň'), + ], + "com.apple.keylayout.Slovenian" => &[ + ('"', 'Ć'), + ('&', '\''), + ('(', ')'), + (')', '='), + ('*', '('), + (':', 'Č'), + (';', 'č'), + ('<', ';'), + ('=', '*'), + ('>', ':'), + ('@', '"'), + ('[', 'š'), + ('\'', 'ć'), + ('\\', 'ž'), + (']', 'đ'), + ('^', '&'), + ('`', '<'), + ('{', 'Š'), + ('|', 'Ž'), + ('}', 'Đ'), + ('~', '>'), + ], + "com.apple.keylayout.Spanish" => &[ + ('!', '¡'), + ('"', '¨'), + ('.', 'ç'), + ('/', '.'), + (':', 'º'), + (';', '´'), + ('<', '¿'), + ('>', 'Ç'), + ('@', '!'), + ('[', 'ñ'), + ('\'', '`'), + ('\\', '\''), + (']', ';'), + ('^', '/'), + ('`', '<'), + ('{', 'Ñ'), + ('|', '"'), + ('}', ':'), + ('~', '>'), + ], + "com.apple.keylayout.Spanish-ISO" => &[ + ('"', '¨'), + ('#', '·'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('.', 'ç'), + ('/', '.'), + (':', 'º'), + (';', '´'), + ('<', '¿'), + ('>', 'Ç'), + ('@', '"'), + ('[', 'ñ'), + ('\'', '`'), + ('\\', '\''), + (']', ';'), + ('^', '&'), + ('`', '<'), + ('{', 'Ñ'), + ('|', '"'), + ('}', '`'), + ('~', '>'), + ], + "com.apple.keylayout.Swedish" => &[ + ('"', '^'), + ('$', '€'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '¨'), + ('\\', '\''), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '*'), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.Swedish-Pro" => &[ + ('"', '^'), + ('$', '€'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '¨'), + ('\\', '\''), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '*'), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.SwedishSami-PC" => &[ + ('"', 'ˆ'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('/', '´'), + (':', 'Å'), + (';', 'å'), + ('<', ';'), + ('=', '`'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '¨'), + ('\\', '@'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'Ö'), + ('|', '*'), + ('}', 'Ä'), + ('~', '>'), + ], + "com.apple.keylayout.SwissFrench" => &[ + ('!', '+'), + ('"', '`'), + ('#', '*'), + ('$', 'ç'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('+', '!'), + ('/', '\''), + (':', 'ü'), + (';', 'è'), + ('<', ';'), + ('=', '¨'), + ('>', ':'), + ('@', '"'), + ('[', 'é'), + ('\'', '^'), + ('\\', '$'), + (']', 'à'), + ('^', '&'), + ('`', '<'), + ('{', 'ö'), + ('|', '£'), + ('}', 'ä'), + ('~', '>'), + ], + "com.apple.keylayout.SwissGerman" => &[ + ('!', '+'), + ('"', '`'), + ('#', '*'), + ('$', 'ç'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('+', '!'), + ('/', '\''), + (':', 'è'), + (';', 'ü'), + ('<', ';'), + ('=', '¨'), + ('>', ':'), + ('@', '"'), + ('[', 'ö'), + ('\'', '^'), + ('\\', '$'), + (']', 'ä'), + ('^', '&'), + ('`', '<'), + ('{', 'é'), + ('|', '£'), + ('}', 'à'), + ('~', '>'), + ], + "com.apple.keylayout.Turkish" => &[ + ('"', '-'), + ('#', '"'), + ('$', '\''), + ('%', '('), + ('&', ')'), + ('(', '%'), + (')', ':'), + ('*', '_'), + (',', 'ö'), + ('-', 'ş'), + ('.', 'ç'), + ('/', '.'), + (':', '$'), + ('<', 'Ö'), + ('>', 'Ç'), + ('@', '*'), + ('[', 'ğ'), + ('\'', ','), + ('\\', 'ü'), + (']', 'ı'), + ('^', '/'), + ('_', 'Ş'), + ('`', '<'), + ('{', 'Ğ'), + ('|', 'Ü'), + ('}', 'I'), + ('~', '>'), + ], + "com.apple.keylayout.Turkish-QWERTY-PC" => &[ + ('"', 'I'), + ('#', '^'), + ('$', '+'), + ('&', '/'), + ('(', ')'), + (')', '='), + ('*', '('), + ('+', ':'), + (',', 'ö'), + ('.', 'ç'), + ('/', '*'), + (':', 'Ş'), + (';', 'ş'), + ('<', 'Ö'), + ('=', '.'), + ('>', 'Ç'), + ('@', '\''), + ('[', 'ğ'), + ('\'', 'ı'), + ('\\', ','), + (']', 'ü'), + ('^', '&'), + ('`', '<'), + ('{', 'Ğ'), + ('|', ';'), + ('}', 'Ü'), + ('~', '>'), + ], + "com.apple.keylayout.Turkish-Standard" => &[ + ('"', 'Ş'), + ('#', '^'), + ('&', '\''), + ('(', ')'), + (')', '='), + ('*', '('), + (',', '.'), + ('.', ','), + (':', 'Ç'), + (';', 'ç'), + ('<', ':'), + ('=', '*'), + ('>', ';'), + ('@', '"'), + ('[', 'ğ'), + ('\'', 'ş'), + ('\\', 'ü'), + (']', 'ı'), + ('^', '&'), + ('`', 'ö'), + ('{', 'Ğ'), + ('|', 'Ü'), + ('}', 'I'), + ('~', 'Ö'), + ], + "com.apple.keylayout.Turkmen" => &[ + ('C', 'Ç'), + ('Q', 'Ä'), + ('V', 'Ý'), + ('X', 'Ü'), + ('[', 'ň'), + ('\\', 'ş'), + (']', 'ö'), + ('^', '№'), + ('`', 'ž'), + ('c', 'ç'), + ('q', 'ä'), + ('v', 'ý'), + ('x', 'ü'), + ('{', 'Ň'), + ('|', 'Ş'), + ('}', 'Ö'), + ('~', 'Ž'), + ], + "com.apple.keylayout.USInternational-PC" => &[('^', 'ˆ'), ('~', '˜')], + "com.apple.keylayout.Welsh" => &[('#', '£')], - Some(HashMap::from_iter(from.chars().zip(to.chars()))) + _ => return None, + }; + + Some(HashMap::from_iter(mappings.into_iter().cloned())) } #[cfg(not(target_os = "macos"))] diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index b34806405c..c2b4625ffc 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -5,7 +5,7 @@ use gpui::{Action, AppContext, KeyBinding, SharedString}; use schemars::{ gen::{SchemaGenerator, SchemaSettings}, schema::{InstanceType, Schema, SchemaObject, SingleOrVec, SubschemaValidation}, - JsonSchema, + JsonSchema, Map, }; use serde::Deserialize; use serde_json::Value; @@ -20,7 +20,7 @@ pub struct KeymapBlock { #[serde(default)] context: Option, #[serde(default)] - use_layout_keys: Option, + use_key_equivalents: Option, bindings: BTreeMap, } @@ -80,7 +80,7 @@ impl KeymapFile { for KeymapBlock { context, - use_layout_keys, + use_key_equivalents, bindings, } in self.0 { @@ -124,10 +124,10 @@ impl KeymapFile { &keystroke, action, context.as_deref(), - if use_layout_keys.unwrap_or_default() { - None - } else { + if use_key_equivalents.unwrap_or_default() { key_equivalents.as_ref() + } else { + None }, ) }) @@ -139,34 +139,51 @@ impl KeymapFile { Ok(()) } - pub fn generate_json_schema(action_names: &[SharedString]) -> serde_json::Value { + pub fn generate_json_schema( + action_names: &[SharedString], + deprecations: &[(SharedString, SharedString)], + ) -> serde_json::Value { let mut root_schema = SchemaSettings::draft07() .with(|settings| settings.option_add_null_type = false) .into_generator() .into_root_schema_for::(); + let mut alternatives = vec![ + Schema::Object(SchemaObject { + instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::String))), + enum_values: Some( + action_names + .iter() + .map(|name| Value::String(name.to_string())) + .collect(), + ), + ..Default::default() + }), + Schema::Object(SchemaObject { + instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Array))), + ..Default::default() + }), + Schema::Object(SchemaObject { + instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Null))), + ..Default::default() + }), + ]; + for (old, new) in deprecations { + alternatives.push(Schema::Object(SchemaObject { + instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::String))), + const_value: Some(Value::String(old.to_string())), + extensions: Map::from_iter([( + // deprecationMessage is not part of the JSON Schema spec, + // but json-language-server recognizes it. + "deprecationMessage".to_owned(), + format!("Deprecated, use {new}").into(), + )]), + ..Default::default() + })); + } let action_schema = Schema::Object(SchemaObject { subschemas: Some(Box::new(SubschemaValidation { - one_of: Some(vec![ - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::String))), - enum_values: Some( - action_names - .iter() - .map(|name| Value::String(name.to_string())) - .collect(), - ), - ..Default::default() - }), - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Array))), - ..Default::default() - }), - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Null))), - ..Default::default() - }), - ]), + one_of: Some(alternatives), ..Default::default() })), ..Default::default() diff --git a/crates/settings_ui/src/appearance_settings_controls.rs b/crates/settings_ui/src/appearance_settings_controls.rs index 39bfda0816..c9360de840 100644 --- a/crates/settings_ui/src/appearance_settings_controls.rs +++ b/crates/settings_ui/src/appearance_settings_controls.rs @@ -145,7 +145,7 @@ impl RenderOnce for ThemeModeControl { ToggleButton::new("light", "Light") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(value == ThemeMode::Light) + .toggle_state(value == ThemeMode::Light) .on_click(|_, cx| Self::write(ThemeMode::Light, cx)) .first(), ) @@ -153,7 +153,7 @@ impl RenderOnce for ThemeModeControl { ToggleButton::new("system", "System") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(value == ThemeMode::System) + .toggle_state(value == ThemeMode::System) .on_click(|_, cx| Self::write(ThemeMode::System, cx)) .middle(), ) @@ -161,7 +161,7 @@ impl RenderOnce for ThemeModeControl { ToggleButton::new("dark", "Dark") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(value == ThemeMode::Dark) + .toggle_state(value == ThemeMode::Dark) .on_click(|_, cx| Self::write(ThemeMode::Dark, cx)) .last(), ) @@ -375,8 +375,8 @@ impl RenderOnce for UiFontLigaturesControl { |selection, cx| { Self::write( match selection { - Selection::Selected => true, - Selection::Unselected | Selection::Indeterminate => false, + ToggleState::Selected => true, + ToggleState::Unselected | ToggleState::Indeterminate => false, }, cx, ); diff --git a/crates/snippet/src/snippet.rs b/crates/snippet/src/snippet.rs index 41529939a1..3eeaff285e 100644 --- a/crates/snippet/src/snippet.rs +++ b/crates/snippet/src/snippet.rs @@ -8,7 +8,11 @@ pub struct Snippet { pub tabstops: Vec, } -type TabStop = SmallVec<[Range; 2]>; +#[derive(Clone, Debug, Default, PartialEq)] +pub struct TabStop { + pub ranges: SmallVec<[Range; 2]>, + pub choices: Option>, +} impl Snippet { pub fn parse(source: &str) -> Result { @@ -24,7 +28,11 @@ impl Snippet { if let Some(final_tabstop) = final_tabstop { tabstops.push(final_tabstop); } else { - let end_tabstop = [len..len].into_iter().collect(); + let end_tabstop = TabStop { + ranges: [len..len].into_iter().collect(), + choices: None, + }; + if !tabstops.last().map_or(false, |t| *t == end_tabstop) { tabstops.push(end_tabstop); } @@ -88,11 +96,17 @@ fn parse_tabstop<'a>( ) -> Result<&'a str> { let tabstop_start = text.len(); let tabstop_index; + let mut choices = None; + if source.starts_with('{') { let (index, rest) = parse_int(&source[1..])?; tabstop_index = index; source = rest; + if source.starts_with("|") { + (source, choices) = parse_choices(&source[1..], text)?; + } + if source.starts_with(':') { source = parse_snippet(&source[1..], true, text, tabstops)?; } @@ -110,7 +124,11 @@ fn parse_tabstop<'a>( tabstops .entry(tabstop_index) - .or_default() + .or_insert_with(|| TabStop { + ranges: Default::default(), + choices, + }) + .ranges .push(tabstop_start as isize..text.len() as isize); Ok(source) } @@ -126,6 +144,61 @@ fn parse_int(source: &str) -> Result<(usize, &str)> { Ok((prefix.parse()?, suffix)) } +fn parse_choices<'a>( + mut source: &'a str, + text: &mut String, +) -> Result<(&'a str, Option>)> { + let mut found_default_choice = false; + let mut current_choice = String::new(); + let mut choices = Vec::new(); + + loop { + match source.chars().next() { + None => return Ok(("", Some(choices))), + Some('\\') => { + source = &source[1..]; + + if let Some(c) = source.chars().next() { + if !found_default_choice { + current_choice.push(c); + text.push(c); + } + source = &source[c.len_utf8()..]; + } + } + Some(',') => { + found_default_choice = true; + source = &source[1..]; + choices.push(current_choice); + current_choice = String::new(); + } + Some('|') => { + source = &source[1..]; + choices.push(current_choice); + return Ok((source, Some(choices))); + } + Some(_) => { + let chunk_end = source.find([',', '|', '\\']); + + if chunk_end.is_none() { + return Err(anyhow!( + "Placeholder choice doesn't contain closing pipe-character '|'" + )); + } + + let (chunk, rest) = source.split_at(chunk_end.unwrap()); + + if !found_default_choice { + text.push_str(chunk); + } + + current_choice.push_str(chunk); + source = rest; + } + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -142,11 +215,13 @@ mod tests { let snippet = Snippet::parse("one$1two").unwrap(); assert_eq!(snippet.text, "onetwo"); assert_eq!(tabstops(&snippet), &[vec![3..3], vec![6..6]]); + assert_eq!(tabstop_choices(&snippet), &[&None, &None]); // Multi-digit numbers let snippet = Snippet::parse("one$123-$99-two").unwrap(); assert_eq!(snippet.text, "one--two"); assert_eq!(tabstops(&snippet), &[vec![4..4], vec![3..3], vec![8..8]]); + assert_eq!(tabstop_choices(&snippet), &[&None, &None, &None]); } #[test] @@ -157,6 +232,7 @@ mod tests { // an additional tabstop at the end. assert_eq!(snippet.text, r#"foo."#); assert_eq!(tabstops(&snippet), &[vec![4..4]]); + assert_eq!(tabstop_choices(&snippet), &[&None]); } #[test] @@ -167,6 +243,7 @@ mod tests { // don't insert an additional tabstop at the end. assert_eq!(snippet.text, r#"
"#); assert_eq!(tabstops(&snippet), &[vec![12..12], vec![14..14]]); + assert_eq!(tabstop_choices(&snippet), &[&None, &None]); } #[test] @@ -177,6 +254,30 @@ mod tests { tabstops(&snippet), &[vec![3..6], vec![11..15], vec![15..15]] ); + assert_eq!(tabstop_choices(&snippet), &[&None, &None, &None]); + } + + #[test] + fn test_snippet_with_choice_placeholders() { + let snippet = Snippet::parse("type ${1|i32, u32|} = $2") + .expect("Should be able to unpack choice placeholders"); + + assert_eq!(snippet.text, "type i32 = "); + assert_eq!(tabstops(&snippet), &[vec![5..8], vec![11..11],]); + assert_eq!( + tabstop_choices(&snippet), + &[&Some(vec!["i32".to_string(), " u32".to_string()]), &None] + ); + + let snippet = Snippet::parse(r"${1|\$\{1\|one\,two\,tree\|\}|}") + .expect("Should be able to parse choice with escape characters"); + + assert_eq!(snippet.text, "${1|one,two,tree|}"); + assert_eq!(tabstops(&snippet), &[vec![0..18], vec![18..18]]); + assert_eq!( + tabstop_choices(&snippet), + &[&Some(vec!["${1|one,two,tree|}".to_string(),]), &None] + ); } #[test] @@ -196,6 +297,10 @@ mod tests { vec![40..40], ] ); + assert_eq!( + tabstop_choices(&snippet), + &[&None, &None, &None, &None, &None] + ); } #[test] @@ -203,10 +308,12 @@ mod tests { let snippet = Snippet::parse("\"\\$schema\": $1").unwrap(); assert_eq!(snippet.text, "\"$schema\": "); assert_eq!(tabstops(&snippet), &[vec![11..11]]); + assert_eq!(tabstop_choices(&snippet), &[&None]); let snippet = Snippet::parse("{a\\}").unwrap(); assert_eq!(snippet.text, "{a}"); assert_eq!(tabstops(&snippet), &[vec![3..3]]); + assert_eq!(tabstop_choices(&snippet), &[&None]); // backslash not functioning as an escape let snippet = Snippet::parse("a\\b").unwrap(); @@ -221,6 +328,10 @@ mod tests { } fn tabstops(snippet: &Snippet) -> Vec>> { - snippet.tabstops.iter().map(|t| t.to_vec()).collect() + snippet.tabstops.iter().map(|t| t.ranges.to_vec()).collect() + } + + fn tabstop_choices(snippet: &Snippet) -> Vec<&Option>> { + snippet.tabstops.iter().map(|t| &t.choices).collect() } } diff --git a/crates/snippet_provider/Cargo.toml b/crates/snippet_provider/Cargo.toml index 95ab19ebb6..aa4e1a5f84 100644 --- a/crates/snippet_provider/Cargo.toml +++ b/crates/snippet_provider/Cargo.toml @@ -11,6 +11,7 @@ workspace = true [dependencies] anyhow.workspace = true collections.workspace = true +extension.workspace = true fs.workspace = true futures.workspace = true gpui.workspace = true diff --git a/crates/snippet_provider/src/extension_snippet.rs b/crates/snippet_provider/src/extension_snippet.rs new file mode 100644 index 0000000000..41a7c886e1 --- /dev/null +++ b/crates/snippet_provider/src/extension_snippet.rs @@ -0,0 +1,26 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::Result; +use extension::{ExtensionHostProxy, ExtensionSnippetProxy}; +use gpui::AppContext; + +use crate::SnippetRegistry; + +pub fn init(cx: &mut AppContext) { + let proxy = ExtensionHostProxy::default_global(cx); + proxy.register_snippet_proxy(SnippetRegistryProxy { + snippet_registry: SnippetRegistry::global(cx), + }); +} + +struct SnippetRegistryProxy { + snippet_registry: Arc, +} + +impl ExtensionSnippetProxy for SnippetRegistryProxy { + fn register_snippet(&self, path: &PathBuf, snippet_contents: &str) -> Result<()> { + self.snippet_registry + .register_snippets(path, snippet_contents) + } +} diff --git a/crates/snippet_provider/src/lib.rs b/crates/snippet_provider/src/lib.rs index 17d60d25a0..34aa1ebefc 100644 --- a/crates/snippet_provider/src/lib.rs +++ b/crates/snippet_provider/src/lib.rs @@ -1,3 +1,4 @@ +mod extension_snippet; mod format; mod registry; @@ -18,6 +19,7 @@ use util::ResultExt; pub fn init(cx: &mut AppContext) { SnippetRegistry::init_global(cx); + extension_snippet::init(cx); } // Is `None` if the snippet file is global. diff --git a/crates/snippets_ui/src/snippets_ui.rs b/crates/snippets_ui/src/snippets_ui.rs index c8ab6febda..237261e999 100644 --- a/crates/snippets_ui/src/snippets_ui.rs +++ b/crates/snippets_ui/src/snippets_ui.rs @@ -96,7 +96,7 @@ impl ScopeSelectorDelegate { let candidates = candidates .chain(languages) .enumerate() - .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name)) + .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, &name)) .collect::>(); Self { @@ -219,7 +219,7 @@ impl PickerDelegate for ScopeSelectorDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child(HighlightedLabel::new(label, mat.positions.clone())), ) } diff --git a/crates/sqlez/Cargo.toml b/crates/sqlez/Cargo.toml index 43626d7747..4204a45d80 100644 --- a/crates/sqlez/Cargo.toml +++ b/crates/sqlez/Cargo.toml @@ -13,7 +13,7 @@ anyhow.workspace = true collections.workspace = true futures.workspace = true indoc.workspace = true -libsqlite3-sys = { version = "0.28", features = ["bundled"] } +libsqlite3-sys.workspace = true parking_lot.workspace = true smol.workspace = true sqlformat.workspace = true diff --git a/crates/story/Cargo.toml b/crates/story/Cargo.toml index be55f587d8..05c1f85659 100644 --- a/crates/story/Cargo.toml +++ b/crates/story/Cargo.toml @@ -13,5 +13,5 @@ workspace = true [dependencies] gpui.workspace = true -itertools = { package = "itertools", version = "0.13" } +itertools = { package = "itertools", version = "0.14" } smallvec.workspace = true diff --git a/crates/storybook/src/stories/focus.rs b/crates/storybook/src/stories/focus.rs index 4240aa6fd1..9eb715dbd9 100644 --- a/crates/storybook/src/stories/focus.rs +++ b/crates/storybook/src/stories/focus.rs @@ -56,7 +56,7 @@ impl FocusStory { } impl Render for FocusStory { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let theme = cx.theme(); let color_1 = theme.status().created; let color_2 = theme.status().modified; diff --git a/crates/storybook/src/stories/picker.rs b/crates/storybook/src/stories/picker.rs index 5336b63e49..19c115a6b2 100644 --- a/crates/storybook/src/stories/picker.rs +++ b/crates/storybook/src/stories/picker.rs @@ -22,11 +22,7 @@ impl Delegate { .iter() .copied() .enumerate() - .map(|(id, string)| StringMatchCandidate { - id, - char_bag: string.into(), - string: string.into(), - }) + .map(|(id, string)| StringMatchCandidate::new(id, string)) .collect(), matches: vec![], selected_ix: 0, @@ -49,7 +45,7 @@ impl PickerDelegate for Delegate { &self, ix: usize, selected: bool, - _cx: &mut gpui::ViewContext>, + _cx: &mut ViewContext>, ) -> Option { let candidate_ix = self.matches.get(ix)?; // TASK: Make StringMatchCandidate::string a SharedString @@ -59,7 +55,7 @@ impl PickerDelegate for Delegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child(Label::new(candidate)), ) } @@ -68,12 +64,12 @@ impl PickerDelegate for Delegate { self.selected_ix } - fn set_selected_index(&mut self, ix: usize, cx: &mut gpui::ViewContext>) { + fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext>) { self.selected_ix = ix; cx.notify(); } - fn confirm(&mut self, secondary: bool, _cx: &mut gpui::ViewContext>) { + fn confirm(&mut self, secondary: bool, _cx: &mut ViewContext>) { let candidate_ix = self.matches[self.selected_ix]; let candidate = self.candidates[candidate_ix].string.clone(); @@ -84,15 +80,11 @@ impl PickerDelegate for Delegate { } } - fn dismissed(&mut self, cx: &mut gpui::ViewContext>) { + fn dismissed(&mut self, cx: &mut ViewContext>) { cx.quit(); } - fn update_matches( - &mut self, - query: String, - cx: &mut gpui::ViewContext>, - ) -> Task<()> { + fn update_matches(&mut self, query: String, cx: &mut ViewContext>) -> Task<()> { let candidates = self.candidates.clone(); self.matches = cx .background_executor() @@ -198,7 +190,7 @@ impl PickerStory { } impl Render for PickerStory { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { div() .bg(cx.theme().styles.colors.background) .size_full() diff --git a/crates/storybook/src/stories/scroll.rs b/crates/storybook/src/stories/scroll.rs index 096afaccf6..ace2b4811f 100644 --- a/crates/storybook/src/stories/scroll.rs +++ b/crates/storybook/src/stories/scroll.rs @@ -11,7 +11,7 @@ impl ScrollStory { } impl Render for ScrollStory { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let theme = cx.theme(); let color_1 = theme.status().created; let color_2 = theme.status().modified; diff --git a/crates/storybook/src/stories/text.rs b/crates/storybook/src/stories/text.rs index 6c37b88ee6..8aa0ea701b 100644 --- a/crates/storybook/src/stories/text.rs +++ b/crates/storybook/src/stories/text.rs @@ -1,6 +1,6 @@ use gpui::{ div, green, red, HighlightStyle, InteractiveText, IntoElement, ParentElement, Render, Styled, - StyledText, View, VisualContext, WindowContext, + StyledText, View, ViewContext, VisualContext, WindowContext, }; use indoc::indoc; use story::*; @@ -14,7 +14,7 @@ impl TextStory { } impl Render for TextStory { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { Story::container() .child(Story::title("Text")) .children(vec![ diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 773e7db88b..3e33d8b43e 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -9,6 +9,15 @@ struct StackEntry<'a, T: Item, D> { position: D, } +impl<'a, T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for StackEntry<'a, T, D> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("StackEntry") + .field("index", &self.index) + .field("position", &self.position) + .finish() + } +} + #[derive(Clone)] pub struct Cursor<'a, T: Item, D> { tree: &'a SumTree, @@ -18,6 +27,21 @@ pub struct Cursor<'a, T: Item, D> { at_end: bool, } +impl<'a, T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'a, T, D> +where + T::Summary: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Cursor") + .field("tree", &self.tree) + .field("stack", &self.stack) + .field("position", &self.position) + .field("did_seek", &self.did_seek) + .field("at_end", &self.at_end) + .finish() + } +} + pub struct Iter<'a, T: Item> { tree: &'a SumTree, stack: ArrayVec, 16>, @@ -60,6 +84,7 @@ where } } + /// Item is None, when the list is empty, or this cursor is at the end of the list. #[track_caller] pub fn item(&self) -> Option<&'a T> { self.assert_did_seek(); diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index fbfe3b06f3..fa37c67599 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -42,6 +42,21 @@ pub trait Summary: Clone { fn add_summary(&mut self, summary: &Self, cx: &Self::Context); } +/// This type exists because we can't implement Summary for () without causing +/// type resolution errors +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct Unit; + +impl Summary for Unit { + type Context = (); + + fn zero(_: &()) -> Self { + Unit + } + + fn add_summary(&mut self, _: &Self, _: &()) {} +} + /// Each [`Summary`] type can have more than one [`Dimension`] type that it measures. /// /// You can use dimensions to seek to a specific location in the [`SumTree`] @@ -761,6 +776,55 @@ impl SumTree { None } } + + #[inline] + pub fn contains(&self, key: &T::Key, cx: &::Context) -> bool { + self.get(key, cx).is_some() + } + + pub fn update( + &mut self, + key: &T::Key, + cx: &::Context, + f: F, + ) -> Option + where + F: FnOnce(&mut T) -> R, + { + let mut cursor = self.cursor::(cx); + let mut new_tree = cursor.slice(key, Bias::Left, cx); + let mut result = None; + if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal { + let mut updated = cursor.item().unwrap().clone(); + result = Some(f(&mut updated)); + new_tree.push(updated, cx); + cursor.next(cx); + } + new_tree.append(cursor.suffix(cx), cx); + drop(cursor); + *self = new_tree; + result + } + + pub fn retain bool>( + &mut self, + cx: &::Context, + mut predicate: F, + ) { + let mut new_map = SumTree::new(cx); + + let mut cursor = self.cursor::(cx); + cursor.next(cx); + while let Some(item) = cursor.item() { + if predicate(&item) { + new_map.push(item.clone(), cx); + } + cursor.next(cx); + } + drop(cursor); + + *self = new_map; + } } impl Default for SumTree diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml index e04d0ef51b..5af03b1b1b 100644 --- a/crates/supermaven/Cargo.toml +++ b/crates/supermaven/Cargo.toml @@ -16,25 +16,22 @@ doctest = false anyhow.workspace = true client.workspace = true collections.workspace = true -editor.workspace = true -gpui.workspace = true futures.workspace = true +gpui.workspace = true +inline_completion.workspace = true language.workspace = true log.workspace = true postage.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -supermaven_api.workspace = true smol.workspace = true +supermaven_api.workspace = true text.workspace = true ui.workspace = true unicode-segmentation.workspace = true util.workspace = true -[target.'cfg(target_os = "windows")'.dependencies] -windows.workspace = true - [dev-dependencies] editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true diff --git a/crates/supermaven/src/supermaven.rs b/crates/supermaven/src/supermaven.rs index 152a41c3be..c39bef557e 100644 --- a/crates/supermaven/src/supermaven.rs +++ b/crates/supermaven/src/supermaven.rs @@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize}; use settings::SettingsStore; use smol::{ io::AsyncWriteExt, - process::{Child, ChildStdin, ChildStdout, Command}, + process::{Child, ChildStdin, ChildStdout}, }; use std::{path::PathBuf, process::Stdio, sync::Arc}; use ui::prelude::*; @@ -269,21 +269,14 @@ impl SupermavenAgent { client: Arc, cx: &mut ModelContext, ) -> Result { - let mut process = Command::new(&binary_path); - process + let mut process = util::command::new_smol_command(&binary_path) .arg("stdio") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .kill_on_drop(true); - - #[cfg(target_os = "windows")] - { - use smol::process::windows::CommandExt; - process.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - - let mut process = process.spawn().context("failed to start the binary")?; + .kill_on_drop(true) + .spawn() + .context("failed to start the binary")?; let stdin = process .stdin diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index b9185c9762..e9c2bfb37b 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -1,14 +1,12 @@ use crate::{Supermaven, SupermavenCompletionStateId}; use anyhow::Result; -use client::telemetry::Telemetry; -use editor::{CompletionProposal, Direction, InlayProposal, InlineCompletionProvider}; use futures::StreamExt as _; use gpui::{AppContext, EntityId, Model, ModelContext, Task}; +use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; use language::{language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot}; use std::{ ops::{AddAssign, Range}, path::Path, - sync::Arc, time::Duration, }; use text::{ToOffset, ToPoint}; @@ -21,8 +19,7 @@ pub struct SupermavenCompletionProvider { buffer_id: Option, completion_id: Option, file_extension: Option, - pending_refresh: Task>, - telemetry: Option>, + pending_refresh: Option>>, } impl SupermavenCompletionProvider { @@ -32,32 +29,26 @@ impl SupermavenCompletionProvider { buffer_id: None, completion_id: None, file_extension: None, - pending_refresh: Task::ready(Ok(())), - telemetry: None, + pending_refresh: None, } } - - pub fn with_telemetry(mut self, telemetry: Arc) -> Self { - self.telemetry = Some(telemetry); - self - } } -// Computes the completion state from the difference between the completion text. +// Computes the inline completion from the difference between the completion text. // this is defined by greedily matching the buffer text against the completion text, with any leftover buffer placed at the end. // for example, given the completion text "moo cows are cool" and the buffer text "cowsre pool", the completion state would be // the inlays "moo ", " a", and "cool" which will render as "[moo ]cows[ a]re [cool]pool" in the editor. -fn completion_state_from_diff( +fn completion_from_diff( snapshot: BufferSnapshot, completion_text: &str, position: Anchor, delete_range: Range, -) -> CompletionProposal { +) -> InlineCompletion { let buffer_text = snapshot .text_for_range(delete_range.clone()) .collect::(); - let mut inlays: Vec = Vec::new(); + let mut edits: Vec<(Range, String)> = Vec::new(); let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect(); let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect(); @@ -74,11 +65,10 @@ fn completion_state_from_diff( match k { Some(k) => { if k != 0 { + let offset = snapshot.anchor_after(offset); // the range from the current position to item is an inlay. - inlays.push(InlayProposal::Suggestion( - snapshot.anchor_after(offset), - completion_graphemes[i..i + k].join("").into(), - )); + let edit = (offset..offset, completion_graphemes[i..i + k].join("")); + edits.push(edit); } i += k + 1; j += 1; @@ -93,18 +83,14 @@ fn completion_state_from_diff( } if j == buffer_graphemes.len() && i < completion_graphemes.len() { + let offset = snapshot.anchor_after(offset); // there is leftover completion text, so drop it as an inlay. - inlays.push(InlayProposal::Suggestion( - snapshot.anchor_after(offset), - completion_graphemes[i..].join("").into(), - )); + let edit_range = offset..offset; + let edit_text = completion_graphemes[i..].join(""); + edits.push((edit_range, edit_text)); } - CompletionProposal { - inlays, - text: completion_text.into(), - delete_range: Some(delete_range), - } + InlineCompletion { edits } } impl InlineCompletionProvider for SupermavenCompletionProvider { @@ -112,6 +98,18 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { "supermaven" } + fn display_name() -> &'static str { + "Supermaven" + } + + fn show_completions_in_menu() -> bool { + false + } + + fn show_completions_in_normal_mode() -> bool { + false + } + fn is_enabled(&self, buffer: &Model, cursor_position: Anchor, cx: &AppContext) -> bool { if !self.supermaven.read(cx).is_enabled() { return false; @@ -124,6 +122,10 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx) } + fn is_refreshing(&self) -> bool { + self.pending_refresh.is_some() + } + fn refresh( &mut self, buffer_handle: Model, @@ -137,7 +139,7 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { return; }; - self.pending_refresh = cx.spawn(|this, mut cx| async move { + self.pending_refresh = Some(cx.spawn(|this, mut cx| async move { if debounce { cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; } @@ -154,11 +156,12 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { .to_string(), ) }); + this.pending_refresh = None; cx.notify(); })?; } Ok(()) - }); + })); } fn cycle( @@ -171,44 +174,21 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { } fn accept(&mut self, _cx: &mut ModelContext) { - if self.completion_id.is_some() { - if let Some(telemetry) = self.telemetry.as_ref() { - telemetry.report_inline_completion_event( - Self::name().to_string(), - true, - self.file_extension.clone(), - ); - } - } - self.pending_refresh = Task::ready(Ok(())); + self.pending_refresh = None; self.completion_id = None; } - fn discard( + fn discard(&mut self, _cx: &mut ModelContext) { + self.pending_refresh = None; + self.completion_id = None; + } + + fn suggest( &mut self, - should_report_inline_completion_event: bool, - _cx: &mut ModelContext, - ) { - if should_report_inline_completion_event && self.completion_id.is_some() { - if let Some(telemetry) = self.telemetry.as_ref() { - telemetry.report_inline_completion_event( - Self::name().to_string(), - false, - self.file_extension.clone(), - ); - } - } - - self.pending_refresh = Task::ready(Ok(())); - self.completion_id = None; - } - - fn active_completion_text<'a>( - &'a self, buffer: &Model, cursor_position: Anchor, - cx: &'a AppContext, - ) -> Option { + cx: &mut ModelContext, + ) -> Option { let completion_text = self .supermaven .read(cx) @@ -223,7 +203,7 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { let mut point = cursor_position.to_point(&snapshot); point.column = snapshot.line_len(point.row); let range = cursor_position..snapshot.anchor_after(point); - Some(completion_state_from_diff( + Some(completion_from_diff( snapshot, completion_text, cursor_position, diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 8c961dc54d..f076a4f1bc 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -253,7 +253,7 @@ impl TabSwitcherDelegate { fn select_item( &mut self, item_id: EntityId, - cx: &mut ViewContext<'_, Picker>, + cx: &mut ViewContext>, ) { let selected_idx = self .matches @@ -263,7 +263,7 @@ impl TabSwitcherDelegate { self.set_selected_index(selected_idx, cx); } - fn close_item_at(&mut self, ix: usize, cx: &mut ViewContext<'_, Picker>) { + fn close_item_at(&mut self, ix: usize, cx: &mut ViewContext>) { let Some(tab_match) = self.matches.get(ix) else { return; }; @@ -358,13 +358,14 @@ impl PickerDelegate for TabSwitcherDelegate { .item .project_path(cx) .as_ref() - .and_then(|path| self.project.read(cx).entry_for_path(path, cx)) - .map(|entry| { - entry_git_aware_label_color( - entry.git_status, - entry.is_ignored, - selected, - ) + .and_then(|path| { + let project = self.project.read(cx); + let entry = project.entry_for_path(path, cx)?; + let git_status = project.project_path_git_status(path, cx); + Some((entry, git_status)) + }) + .map(|(entry, git_status)| { + entry_git_aware_label_color(git_status, entry.is_ignored, selected) }) }) .flatten(); @@ -407,7 +408,7 @@ impl PickerDelegate for TabSwitcherDelegate { ListItem::new(ix) .spacing(ListItemSpacing::Sparse) .inset(true) - .selected(selected) + .toggle_state(selected) .child(h_flex().w_full().child(label)) .start_slot::(icon) .map(|el| { diff --git a/crates/task/Cargo.toml b/crates/task/Cargo.toml index 43e3060a4e..6bc7489d86 100644 --- a/crates/task/Cargo.toml +++ b/crates/task/Cargo.toml @@ -21,6 +21,7 @@ serde_json_lenient.workspace = true sha2.workspace = true shellexpand.workspace = true util.workspace = true +zed_actions.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/task/src/lib.rs b/crates/task/src/lib.rs index 534b77b743..af31f56450 100644 --- a/crates/task/src/lib.rs +++ b/crates/task/src/lib.rs @@ -15,6 +15,7 @@ use std::str::FromStr; pub use task_template::{HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates}; pub use vscode_format::VsCodeTaskFile; +pub use zed_actions::RevealTarget; /// Task identifier, unique within the application. /// Based on it, task reruns and terminal tabs are managed. @@ -47,13 +48,19 @@ pub struct SpawnInTerminal { pub allow_concurrent_runs: bool, /// What to do with the terminal pane and tab, after the command was started. pub reveal: RevealStrategy, + /// Where to show tasks' terminal output. + pub reveal_target: RevealTarget, /// What to do with the terminal pane and tab, after the command had finished. pub hide: HideStrategy, /// Which shell to use when spawning the task. pub shell: Shell, + /// Whether to show the task summary line in the task output (sucess/failure). + pub show_summary: bool, + /// Whether to show the command line in the task output. + pub show_command: bool, } -/// A final form of the [`TaskTemplate`], that got resolved with a particualar [`TaskContext`] and now is ready to spawn the actual task. +/// A final form of the [`TaskTemplate`], that got resolved with a particular [`TaskContext`] and now is ready to spawn the actual task. #[derive(Clone, Debug, PartialEq, Eq)] pub struct ResolvedTask { /// A way to distinguish tasks produced by the same template, but different contexts. @@ -131,6 +138,10 @@ impl VariableName { pub fn template_value(&self) -> String { format!("${self}") } + /// Generates a `"$VARIABLE"`-like string, to be used instead of `Self::template_value` when expanded value could contain spaces or special characters. + pub fn template_value_with_whitespace(&self) -> String { + format!("\"${self}\"") + } } impl FromStr for VariableName { diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index b72a0d25f8..2d13a7e18b 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -1,4 +1,5 @@ use std::path::PathBuf; +use util::serde::default_true; use anyhow::{bail, Context}; use collections::{HashMap, HashSet}; @@ -8,7 +9,7 @@ use sha2::{Digest, Sha256}; use util::{truncate_and_remove_front, ResultExt}; use crate::{ - ResolvedTask, Shell, SpawnInTerminal, TaskContext, TaskId, VariableName, + ResolvedTask, RevealTarget, Shell, SpawnInTerminal, TaskContext, TaskId, VariableName, ZED_VARIABLE_NAME_PREFIX, }; @@ -41,10 +42,16 @@ pub struct TaskTemplate { #[serde(default)] pub allow_concurrent_runs: bool, /// What to do with the terminal pane and tab, after the command was started: - /// * `always` — always show the terminal pane, add and focus the corresponding task's tab in it (default) - /// * `never` — avoid changing current terminal pane focus, but still add/reuse the task's tab there + /// * `always` — always show the task's pane, and focus the corresponding tab in it (default) + // * `no_focus` — always show the task's pane, add the task's tab in it, but don't focus it + // * `never` — do not alter focus, but still add/reuse the task's tab in its pane #[serde(default)] pub reveal: RevealStrategy, + /// Where to place the task's terminal item after starting the task. + /// * `dock` — in the terminal dock, "regular" terminal items' place (default). + /// * `center` — in the central pane group, "main" editor area. + #[serde(default)] + pub reveal_target: RevealTarget, /// What to do with the terminal pane and tab, after the command had finished: /// * `never` — do nothing when the command finishes (default) /// * `always` — always hide the terminal tab, hide the pane also if it was the last tab in it @@ -57,18 +64,24 @@ pub struct TaskTemplate { /// Which shell to use when spawning the task. #[serde(default)] pub shell: Shell, + /// Whether to show the task line in the task output. + #[serde(default = "default_true")] + pub show_summary: bool, + /// Whether to show the command line in the task output. + #[serde(default = "default_true")] + pub show_command: bool, } /// What to do with the terminal pane and tab, after the command was started. #[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum RevealStrategy { - /// Always show the terminal pane, add and focus the corresponding task's tab in it. + /// Always show the task's pane, and focus the corresponding tab in it. #[default] Always, - /// Always show the terminal pane, add the task's tab in it, but don't focus it. + /// Always show the task's pane, add the task's tab in it, but don't focus it. NoFocus, - /// Do not change terminal pane focus, but still add/reuse the task's tab there. + /// Do not alter focus, but still add/reuse the task's tab in its pane. Never, } @@ -130,13 +143,13 @@ impl TaskTemplate { let truncated_variables = truncate_variables(&task_variables); let cwd = match self.cwd.as_deref() { Some(cwd) => { - let substitured_cwd = substitute_all_template_variables_in_str( + let substituted_cwd = substitute_all_template_variables_in_str( cwd, &task_variables, &variable_names, &mut substituted_variables, )?; - Some(PathBuf::from(substitured_cwd)) + Some(PathBuf::from(substituted_cwd)) } None => None, } @@ -228,8 +241,11 @@ impl TaskTemplate { use_new_terminal: self.use_new_terminal, allow_concurrent_runs: self.allow_concurrent_runs, reveal: self.reveal, + reveal_target: self.reveal_target, hide: self.hide, shell: self.shell.clone(), + show_summary: self.show_summary, + show_command: self.show_command, }), }) } @@ -554,7 +570,7 @@ mod tests { spawn_in_terminal.label, format!( "test label for 1234 and …{}", - &long_value[..=MAX_DISPLAY_VARIABLE_LENGTH] + &long_value[long_value.len() - MAX_DISPLAY_VARIABLE_LENGTH..] ), "Human-readable label should have long substitutions trimmed" ); @@ -621,7 +637,7 @@ mod tests { label: "My task".into(), command: "echo".into(), args: vec!["$PATH".into()], - ..Default::default() + ..TaskTemplate::default() }; let resolved_task = task .resolve_task(TEST_ID_BASE, &TaskContext::default()) @@ -639,7 +655,7 @@ mod tests { label: "My task".into(), command: "echo".into(), args: vec!["$ZED_VARIABLE".into()], - ..Default::default() + ..TaskTemplate::default() }; assert!(task .resolve_task(TEST_ID_BASE, &TaskContext::default()) diff --git a/crates/tasks_ui/Cargo.toml b/crates/tasks_ui/Cargo.toml index 265755319b..528d238329 100644 --- a/crates/tasks_ui/Cargo.toml +++ b/crates/tasks_ui/Cargo.toml @@ -25,7 +25,7 @@ ui.workspace = true util.workspace = true workspace.workspace = true language.workspace = true - +zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/tasks_ui/src/lib.rs b/crates/tasks_ui/src/lib.rs index 38b15403e2..21625cbe24 100644 --- a/crates/tasks_ui/src/lib.rs +++ b/crates/tasks_ui/src/lib.rs @@ -1,8 +1,9 @@ use ::settings::Settings; use editor::{tasks::task_context, Editor}; use gpui::{AppContext, Task as AsyncTask, ViewContext, WindowContext}; -use modal::TasksModal; +use modal::{TaskOverrides, TasksModal}; use project::{Location, WorktreeId}; +use task::{RevealTarget, TaskId}; use workspace::tasks::schedule_task; use workspace::{tasks::schedule_resolved_task, Workspace}; @@ -25,9 +26,13 @@ pub fn init(cx: &mut AppContext) { .read(cx) .task_inventory() .and_then(|inventory| { - inventory - .read(cx) - .last_scheduled_task(action.task_id.as_ref()) + inventory.read(cx).last_scheduled_task( + action + .task_id + .as_ref() + .map(|id| TaskId(id.clone())) + .as_ref(), + ) }) { if action.reevaluate_context { @@ -74,7 +79,7 @@ pub fn init(cx: &mut AppContext) { ); } } else { - toggle_modal(workspace, cx).detach(); + toggle_modal(workspace, None, cx).detach(); }; }); }, @@ -83,13 +88,25 @@ pub fn init(cx: &mut AppContext) { } fn spawn_task_or_modal(workspace: &mut Workspace, action: &Spawn, cx: &mut ViewContext) { - match &action.task_name { - Some(name) => spawn_task_with_name(name.clone(), cx).detach_and_log_err(cx), - None => toggle_modal(workspace, cx).detach(), + match action { + Spawn::ByName { + task_name, + reveal_target, + } => { + let overrides = reveal_target.map(|reveal_target| TaskOverrides { + reveal_target: Some(reveal_target), + }); + spawn_task_with_name(task_name.clone(), overrides, cx).detach_and_log_err(cx) + } + Spawn::ViaModal { reveal_target } => toggle_modal(workspace, *reveal_target, cx).detach(), } } -fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) -> AsyncTask<()> { +fn toggle_modal( + workspace: &mut Workspace, + reveal_target: Option, + cx: &mut ViewContext, +) -> AsyncTask<()> { let task_store = workspace.project().read(cx).task_store().clone(); let workspace_handle = workspace.weak_handle(); let can_open_modal = workspace.project().update(cx, |project, cx| { @@ -102,7 +119,15 @@ fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) workspace .update(&mut cx, |workspace, cx| { workspace.toggle_modal(cx, |cx| { - TasksModal::new(task_store.clone(), task_context, workspace_handle, cx) + TasksModal::new( + task_store.clone(), + task_context, + reveal_target.map(|target| TaskOverrides { + reveal_target: Some(target), + }), + workspace_handle, + cx, + ) }) }) .ok(); @@ -114,6 +139,7 @@ fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) fn spawn_task_with_name( name: String, + overrides: Option, cx: &mut ViewContext, ) -> AsyncTask> { cx.spawn(|workspace, mut cx| async move { @@ -148,8 +174,13 @@ fn spawn_task_with_name( let did_spawn = workspace .update(&mut cx, |workspace, cx| { - let (task_source_kind, target_task) = + let (task_source_kind, mut target_task) = tasks.into_iter().find(|(_, task)| task.label == name)?; + if let Some(overrides) = &overrides { + if let Some(target_override) = overrides.reveal_target { + target_task.reveal_target = target_override; + } + } schedule_task( workspace, task_source_kind, @@ -164,7 +195,13 @@ fn spawn_task_with_name( if !did_spawn { workspace .update(&mut cx, |workspace, cx| { - spawn_task_or_modal(workspace, &Spawn::default(), cx); + spawn_task_or_modal( + workspace, + &Spawn::ViaModal { + reveal_target: overrides.and_then(|overrides| overrides.reveal_target), + }, + cx, + ); }) .ok(); } diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 3de116702a..ead9a01396 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -3,68 +3,28 @@ use std::sync::Arc; use crate::active_item_selection_properties; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - impl_actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusableView, + rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusableView, InteractiveElement, Model, ParentElement, Render, SharedString, Styled, Subscription, Task, View, ViewContext, VisualContext, WeakView, }; use picker::{highlighted_match_with_paths::HighlightedText, Picker, PickerDelegate}; use project::{task_store::TaskStore, TaskSourceKind}; -use task::{ResolvedTask, TaskContext, TaskId, TaskTemplate}; +use task::{ResolvedTask, RevealTarget, TaskContext, TaskTemplate}; use ui::{ div, h_flex, v_flex, ActiveTheme, Button, ButtonCommon, ButtonSize, Clickable, Color, FluentBuilder as _, Icon, IconButton, IconButtonShape, IconName, IconSize, IntoElement, - KeyBinding, LabelSize, ListItem, ListItemSpacing, RenderOnce, Selectable, Tooltip, + KeyBinding, LabelSize, ListItem, ListItemSpacing, RenderOnce, Toggleable, Tooltip, WindowContext, }; use util::ResultExt; use workspace::{tasks::schedule_resolved_task, ModalView, Workspace}; - -use serde::Deserialize; - -/// Spawn a task with name or open tasks modal -#[derive(PartialEq, Clone, Deserialize, Default)] -pub struct Spawn { - #[serde(default)] - /// Name of the task to spawn. - /// If it is not set, a modal with a list of available tasks is opened instead. - /// Defaults to None. - pub task_name: Option, -} - -impl Spawn { - pub fn modal() -> Self { - Self { task_name: None } - } -} - -/// Rerun last task -#[derive(PartialEq, Clone, Deserialize, Default)] -pub struct Rerun { - /// Controls whether the task context is reevaluated prior to execution of a task. - /// If it is not, environment variables such as ZED_COLUMN, ZED_FILE are gonna be the same as in the last execution of a task - /// If it is, these variables will be updated to reflect current state of editor at the time task::Rerun is executed. - /// default: false - #[serde(default)] - pub reevaluate_context: bool, - /// Overrides `allow_concurrent_runs` property of the task being reran. - /// Default: null - #[serde(default)] - pub allow_concurrent_runs: Option, - /// Overrides `use_new_terminal` property of the task being reran. - /// Default: null - #[serde(default)] - pub use_new_terminal: Option, - - /// If present, rerun the task with this ID, otherwise rerun the last task. - pub task_id: Option, -} - -impl_actions!(task, [Rerun, Spawn]); +pub use zed_actions::{Rerun, Spawn}; /// A modal used to spawn new tasks. pub(crate) struct TasksModalDelegate { task_store: Model, candidates: Option>, + task_overrides: Option, last_used_candidate_index: Option, divider_index: Option, matches: Vec, @@ -75,12 +35,28 @@ pub(crate) struct TasksModalDelegate { placeholder_text: Arc, } +/// Task template amendments to do before resolving the context. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub(crate) struct TaskOverrides { + /// See [`RevealTarget`]. + pub(crate) reveal_target: Option, +} + impl TasksModalDelegate { fn new( task_store: Model, task_context: TaskContext, + task_overrides: Option, workspace: WeakView, ) -> Self { + let placeholder_text = if let Some(TaskOverrides { + reveal_target: Some(RevealTarget::Center), + }) = &task_overrides + { + Arc::from("Find a task, or run a command in the central pane") + } else { + Arc::from("Find a task, or run a command") + }; Self { task_store, workspace, @@ -91,7 +67,8 @@ impl TasksModalDelegate { selected_index: 0, prompt: String::default(), task_context, - placeholder_text: Arc::from("Find a task, or run a command"), + task_overrides, + placeholder_text, } } @@ -102,11 +79,17 @@ impl TasksModalDelegate { let source_kind = TaskSourceKind::UserInput; let id_base = source_kind.to_id_base(); - let new_oneshot = TaskTemplate { + let mut new_oneshot = TaskTemplate { label: self.prompt.clone(), command: self.prompt.clone(), ..TaskTemplate::default() }; + if let Some(TaskOverrides { + reveal_target: Some(reveal_target), + }) = &self.task_overrides + { + new_oneshot.reveal_target = *reveal_target; + } Some(( source_kind, new_oneshot.resolve_task(&id_base, &self.task_context)?, @@ -141,12 +124,13 @@ impl TasksModal { pub(crate) fn new( task_store: Model, task_context: TaskContext, + task_overrides: Option, workspace: WeakView, cx: &mut ViewContext, ) -> Self { let picker = cx.new_view(|cx| { Picker::uniform_list( - TasksModalDelegate::new(task_store, task_context, workspace), + TasksModalDelegate::new(task_store, task_context, task_overrides, workspace), cx, ) }); @@ -298,9 +282,17 @@ impl PickerDelegate for TasksModalDelegate { .as_ref() .map(|candidates| candidates[ix].clone()) }); - let Some((task_source_kind, task)) = task else { + let Some((task_source_kind, mut task)) = task else { return; }; + if let Some(TaskOverrides { + reveal_target: Some(reveal_target), + }) = &self.task_overrides + { + if let Some(resolved_task) = &mut task.resolved { + resolved_task.reveal_target = *reveal_target; + } + } self.workspace .update(cx, |workspace, cx| { @@ -420,7 +412,7 @@ impl PickerDelegate for TasksModalDelegate { }; item }) - .selected(selected) + .toggle_state(selected) .child(highlighted_location.render(cx)), ) } @@ -437,9 +429,18 @@ impl PickerDelegate for TasksModalDelegate { } fn confirm_input(&mut self, omit_history_entry: bool, cx: &mut ViewContext>) { - let Some((task_source_kind, task)) = self.spawn_oneshot() else { + let Some((task_source_kind, mut task)) = self.spawn_oneshot() else { return; }; + + if let Some(TaskOverrides { + reveal_target: Some(reveal_target), + }) = self.task_overrides + { + if let Some(resolved_task) = &mut task.resolved { + resolved_task.reveal_target = reveal_target; + } + } self.workspace .update(cx, |workspace, cx| { schedule_resolved_task(workspace, task_source_kind, task, omit_history_entry, cx); @@ -554,11 +555,7 @@ fn string_match_candidates<'a>( ) -> Vec { candidates .enumerate() - .map(|(index, (_, candidate))| StringMatchCandidate { - id: index, - char_bag: candidate.resolved_label.chars().collect(), - string: candidate.display_label().to_owned(), - }) + .map(|(index, (_, candidate))| StringMatchCandidate::new(index, candidate.display_label())) .collect() } @@ -723,8 +720,9 @@ mod tests { "No query should be added to the list, as it was submitted with secondary action (that maps to omit_history = true)" ); - cx.dispatch_action(Spawn { - task_name: Some("example task".to_string()), + cx.dispatch_action(Spawn::ByName { + task_name: "example task".to_string(), + reveal_target: None, }); let tasks_picker = workspace.update(cx, |workspace, cx| { workspace @@ -793,7 +791,7 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "hello from …th.odd_extension:1:1".to_string(), + "hello from …h.odd_extension:1:1".to_string(), "opened now: /dir".to_string() ], "Second opened buffer should fill the context, labels should be trimmed if long enough" @@ -822,7 +820,7 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "hello from …ithout_extension:2:3".to_string(), + "hello from …thout_extension:2:3".to_string(), "opened now: /dir".to_string() ], "Opened buffer should fill the context, labels should be trimmed if long enough" @@ -1035,7 +1033,7 @@ mod tests { workspace: &View, cx: &mut VisualTestContext, ) -> View> { - cx.dispatch_action(Spawn::default()); + cx.dispatch_action(Spawn::modal()); workspace.update(cx, |workspace, cx| { workspace .active_modal::(cx) diff --git a/crates/telemetry/Cargo.toml b/crates/telemetry/Cargo.toml new file mode 100644 index 0000000000..cc524f0ceb --- /dev/null +++ b/crates/telemetry/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "telemetry" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/telemetry.rs" + +[dependencies] +serde.workspace = true +serde_json.workspace = true +telemetry_events.workspace = true +futures.workspace = true diff --git a/crates/telemetry/LICENSE-GPL b/crates/telemetry/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/telemetry/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/telemetry/src/telemetry.rs b/crates/telemetry/src/telemetry.rs new file mode 100644 index 0000000000..9fb05543a9 --- /dev/null +++ b/crates/telemetry/src/telemetry.rs @@ -0,0 +1,64 @@ +//! See [Telemetry in Zed](https://zed.dev/docs/telemetry) for additional information. +use futures::channel::mpsc; +pub use serde_json; +use std::sync::OnceLock; +pub use telemetry_events::FlexibleEvent as Event; + +/// Macro to create telemetry events and send them to the telemetry queue. +/// +/// By convention, the name should be "Noun Verbed", e.g. "Keymap Changed" +/// or "Project Diagnostics Opened". +/// +/// The properties can be any value that implements serde::Serialize. +/// +/// ``` +/// telemetry::event!("Keymap Changed", version = "1.0.0"); +/// telemetry::event!("Documentation Viewed", url, source = "Extension Upsell"); +/// ``` +#[macro_export] +macro_rules! event { + ($name:expr) => {{ + let event = $crate::Event { + event_type: $name.to_string(), + event_properties: std::collections::HashMap::new(), + }; + $crate::send_event(event); + }}; + ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => {{ + let event = $crate::Event { + event_type: $name.to_string(), + event_properties: std::collections::HashMap::from([ + $( + (stringify!($key).to_string(), + $crate::serde_json::value::to_value(&$crate::serialize_property!($key $(= $value)?)) + .unwrap_or_else(|_| $crate::serde_json::to_value(&()).unwrap()) + ), + )+ + ]), + }; + $crate::send_event(event); + }}; +} + +#[macro_export] +macro_rules! serialize_property { + ($key:ident) => { + $key + }; + ($key:ident = $value:expr) => { + $value + }; +} + +pub fn send_event(event: Event) { + if let Some(queue) = TELEMETRY_QUEUE.get() { + queue.unbounded_send(event).ok(); + return; + } +} + +pub fn init(tx: mpsc::UnboundedSender) { + TELEMETRY_QUEUE.set(tx).ok(); +} + +static TELEMETRY_QUEUE: OnceLock> = OnceLock::new(); diff --git a/crates/telemetry_events/Cargo.toml b/crates/telemetry_events/Cargo.toml index 01145549b1..35a87f46ff 100644 --- a/crates/telemetry_events/Cargo.toml +++ b/crates/telemetry_events/Cargo.toml @@ -14,3 +14,4 @@ path = "src/telemetry_events.rs" [dependencies] semantic_version.workspace = true serde.workspace = true +serde_json.workspace = true diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 0c4ee8cb9e..a39c514ced 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -2,7 +2,7 @@ use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; -use std::{fmt::Display, sync::Arc, time::Duration}; +use std::{collections::HashMap, fmt::Display, sync::Arc, time::Duration}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct EventRequestBody { @@ -91,8 +91,10 @@ impl Display for AssistantPhase { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(tag = "type")] pub enum Event { + Flexible(FlexibleEvent), Editor(EditorEvent), InlineCompletion(InlineCompletionEvent), + InlineCompletionRating(InlineCompletionRatingEvent), Call(CallEvent), Assistant(AssistantEvent), Cpu(CpuEvent), @@ -105,6 +107,12 @@ pub enum Event { Repl(ReplEvent), } +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct FlexibleEvent { + pub event_type: String, + pub event_properties: HashMap, +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EditorEvent { /// The editor operation performed (open, save) @@ -130,6 +138,21 @@ pub struct InlineCompletionEvent { pub file_extension: Option, } +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub enum InlineCompletionRating { + Positive, + Negative, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct InlineCompletionRatingEvent { + pub rating: InlineCompletionRating, + pub input_events: Arc, + pub input_excerpt: Arc, + pub output_excerpt: Arc, + pub feedback: String, +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CallEvent { /// Operation performed: invite/join call; begin/end screenshare; share/unshare project; etc @@ -246,6 +269,7 @@ pub struct Panic { pub app_version: String, /// Zed release channel (stable, preview, dev) pub release_channel: String, + pub target: Option, pub os_name: String, pub os_version: Option, pub architecture: String, diff --git a/crates/terminal/src/mappings/keys.rs b/crates/terminal/src/mappings/keys.rs index 2d4fe4c62e..1efc1f17d2 100644 --- a/crates/terminal/src/mappings/keys.rs +++ b/crates/terminal/src/mappings/keys.rs @@ -343,7 +343,7 @@ mod test { function: false, }, key: "🖖🏻".to_string(), //2 char string - ime_key: None, + key_char: None, }; assert_eq!(to_esc_str(&ks, &TermMode::NONE, false), None); } diff --git a/crates/terminal/src/pty_info.rs b/crates/terminal/src/pty_info.rs index 559d022fda..6478cb4ad8 100644 --- a/crates/terminal/src/pty_info.rs +++ b/crates/terminal/src/pty_info.rs @@ -10,7 +10,7 @@ use windows::Win32::{Foundation::HANDLE, System::Threading::GetProcessId}; use sysinfo::{Pid, Process, ProcessRefreshKind, RefreshKind, System, UpdateKind}; -struct ProcessIdGetter { +pub struct ProcessIdGetter { handle: i32, fallback_pid: u32, } @@ -31,6 +31,10 @@ impl ProcessIdGetter { } Some(Pid::from_u32(pid as u32)) } + + pub fn fallback_pid(&self) -> u32 { + self.fallback_pid + } } #[cfg(windows)] @@ -62,6 +66,10 @@ impl ProcessIdGetter { } Some(Pid::from_u32(pid)) } + + pub fn fallback_pid(&self) -> u32 { + self.fallback_pid + } } #[derive(Clone, Debug)] @@ -96,6 +104,10 @@ impl PtyProcessInfo { } } + pub fn pid_getter(&self) -> &ProcessIdGetter { + &self.pid_getter + } + fn refresh(&mut self) -> Option<&Process> { let pid = self.pid_getter.pid()?; if self.system.refresh_processes_specifics( diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 5a15723cee..4ab36ae102 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -324,6 +324,7 @@ impl TerminalBuilder { #[allow(clippy::too_many_arguments)] pub fn new( working_directory: Option, + python_venv_directory: Option, task: Option, shell: Shell, mut env: HashMap, @@ -346,6 +347,7 @@ impl TerminalBuilder { env.insert("ZED_TERM".to_string(), "true".to_string()); env.insert("TERM_PROGRAM".to_string(), "zed".to_string()); + env.insert("TERM".to_string(), "xterm-256color".to_string()); env.insert( "TERM_PROGRAM_VERSION".to_string(), release_channel::AppVersion::global(cx).to_string(), @@ -471,6 +473,7 @@ impl TerminalBuilder { word_regex: RegexSearch::new(WORD_REGEX).unwrap(), vi_mode_enabled: false, is_ssh_terminal, + python_venv_directory, }; Ok(TerminalBuilder { @@ -619,6 +622,7 @@ pub struct Terminal { pub breadcrumb_text: String, pub pty_info: PtyProcessInfo, title_override: Option, + pub python_venv_directory: Option, scroll_px: Pixels, next_link_id: usize, selection_phase: SelectionPhase, @@ -639,6 +643,8 @@ pub struct TaskState { pub status: TaskStatus, pub completion_rx: Receiver<()>, pub hide: HideStrategy, + pub show_summary: bool, + pub show_command: bool, } /// A status of the current terminal tab's task. @@ -948,22 +954,32 @@ impl Terminal { match found_word { Some((maybe_url_or_path, is_url, url_match)) => { - if *open { - let target = if is_url { - MaybeNavigationTarget::Url(maybe_url_or_path) - } else { + let target = if is_url { + // Treat "file://" URLs like file paths to ensure + // that line numbers at the end of the path are + // handled correctly + if let Some(path) = maybe_url_or_path.strip_prefix("file://") { MaybeNavigationTarget::PathLike(PathLikeTarget { - maybe_path: maybe_url_or_path, + maybe_path: path.to_string(), terminal_dir: self.working_directory(), }) - }; + } else { + MaybeNavigationTarget::Url(maybe_url_or_path.clone()) + } + } else { + MaybeNavigationTarget::PathLike(PathLikeTarget { + maybe_path: maybe_url_or_path.clone(), + terminal_dir: self.working_directory(), + }) + }; + if *open { cx.emit(Event::Open(target)); } else { self.update_selected_word( prev_hovered_word, url_match, maybe_url_or_path, - is_url, + target, cx, ); } @@ -985,7 +1001,7 @@ impl Terminal { prev_word: Option, word_match: RangeInclusive, word: String, - is_url: bool, + navigation_target: MaybeNavigationTarget, cx: &mut ModelContext, ) { if let Some(prev_word) = prev_word { @@ -1004,14 +1020,6 @@ impl Terminal { word_match, id: self.next_link_id(), }); - let navigation_target = if is_url { - MaybeNavigationTarget::Url(word) - } else { - MaybeNavigationTarget::PathLike(PathLikeTarget { - maybe_path: word, - terminal_dir: self.working_directory(), - }) - }; cx.emit(Event::NewNavigationTarget(Some(navigation_target))); } @@ -1178,10 +1186,10 @@ impl Terminal { } let motion: Option = match key.as_str() { - "h" => Some(ViMotion::Left), - "j" => Some(ViMotion::Down), - "k" => Some(ViMotion::Up), - "l" => Some(ViMotion::Right), + "h" | "left" => Some(ViMotion::Left), + "j" | "down" => Some(ViMotion::Down), + "k" | "up" => Some(ViMotion::Up), + "l" | "right" => Some(ViMotion::Right), "w" => Some(ViMotion::WordRight), "b" if !keystroke.modifiers.control => Some(ViMotion::WordLeft), "e" => Some(ViMotion::WordRightEnd), @@ -1456,7 +1464,7 @@ impl Terminal { fn drag_line_delta(&self, e: &MouseMoveEvent, region: Bounds) -> Option { //TODO: Why do these need to be doubled? Probably the same problem that the IME has let top = region.origin.y + (self.last_content.size.line_height * 2.); - let bottom = region.lower_left().y - (self.last_content.size.line_height * 2.); + let bottom = region.bottom_left().y - (self.last_content.size.line_height * 2.); let scroll_delta = if e.position.y < top { (top - e.position.y).pow(1.1) } else if e.position.y > bottom { @@ -1760,11 +1768,22 @@ impl Terminal { }; let (finished_successfully, task_line, command_line) = task_summary(task, error_code); - // SAFETY: the invocation happens on non `TaskStatus::Running` tasks, once, - // after either `AlacTermEvent::Exit` or `AlacTermEvent::ChildExit` events that are spawned - // when Zed task finishes and no more output is made. - // After the task summary is output once, no more text is appended to the terminal. - unsafe { append_text_to_term(&mut self.term.lock(), &[&task_line, &command_line]) }; + let mut lines_to_show = Vec::new(); + if task.show_summary { + lines_to_show.push(task_line.as_str()); + } + if task.show_command { + lines_to_show.push(command_line.as_str()); + } + + if !lines_to_show.is_empty() { + // SAFETY: the invocation happens on non `TaskStatus::Running` tasks, once, + // after either `AlacTermEvent::Exit` or `AlacTermEvent::ChildExit` events that are spawned + // when Zed task finishes and no more output is made. + // After the task summary is output once, no more text is appended to the terminal. + unsafe { append_text_to_term(&mut self.term.lock(), &lines_to_show) }; + } + match task.hide { HideStrategy::Never => {} HideStrategy::Always => { diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index e48e23b141..760eb14b21 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -21,10 +21,10 @@ pub enum TerminalDockPosition { #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct Toolbar { - pub title: bool, + pub breadcrumbs: bool, } -#[derive(Debug, Deserialize)] +#[derive(Clone, Debug, Deserialize)] pub struct TerminalSettings { pub shell: Shell, pub working_directory: WorkingDirectory, @@ -286,10 +286,14 @@ pub enum WorkingDirectory { // Toolbar related settings #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ToolbarContent { - /// Whether to display the terminal title in its toolbar. + /// Whether to display the terminal title in breadcrumbs inside the terminal pane. + /// Only shown if the terminal title is not empty. + /// + /// The shell running in the terminal needs to be configured to emit the title. + /// Example: `echo -e "\e]2;New Title\007";` /// /// Default: true - pub title: Option, + pub breadcrumbs: Option, } #[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/terminal_view/Cargo.toml b/crates/terminal_view/Cargo.toml index 09b0b0d2d5..83803ff21c 100644 --- a/crates/terminal_view/Cargo.toml +++ b/crates/terminal_view/Cargo.toml @@ -8,14 +8,19 @@ license = "GPL-3.0-or-later" [lints] workspace = true +[features] +test-support = ["editor/test-support", "gpui/test-support"] + [lib] path = "src/terminal_view.rs" doctest = false [dependencies] anyhow.workspace = true -db.workspace = true +async-recursion.workspace = true +breadcrumbs.workspace = true collections.workspace = true +db.workspace = true dirs.workspace = true editor.workspace = true futures.workspace = true @@ -24,7 +29,6 @@ itertools.workspace = true language.workspace = true project.workspace = true task.workspace = true -tasks_ui.workspace = true search.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/terminal_view/src/persistence.rs b/crates/terminal_view/src/persistence.rs index b8c31e05b0..a3bb2cc522 100644 --- a/crates/terminal_view/src/persistence.rs +++ b/crates/terminal_view/src/persistence.rs @@ -1,8 +1,368 @@ use anyhow::Result; -use std::path::PathBuf; +use async_recursion::async_recursion; +use collections::HashSet; +use futures::{stream::FuturesUnordered, StreamExt as _}; +use gpui::{AsyncWindowContext, Axis, Model, Task, View, WeakView}; +use project::{terminals::TerminalKind, Project}; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; +use ui::{Pixels, ViewContext, VisualContext as _, WindowContext}; +use util::ResultExt as _; use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql}; -use workspace::{ItemId, WorkspaceDb, WorkspaceId}; +use workspace::{ + ItemHandle, ItemId, Member, Pane, PaneAxis, PaneGroup, SerializableItem as _, Workspace, + WorkspaceDb, WorkspaceId, +}; + +use crate::{ + default_working_directory, + terminal_panel::{new_terminal_pane, TerminalPanel}, + TerminalView, +}; + +pub(crate) fn serialize_pane_group( + pane_group: &PaneGroup, + active_pane: &View, + cx: &WindowContext, +) -> SerializedPaneGroup { + build_serialized_pane_group(&pane_group.root, active_pane, cx) +} + +fn build_serialized_pane_group( + pane_group: &Member, + active_pane: &View, + cx: &WindowContext, +) -> SerializedPaneGroup { + match pane_group { + Member::Axis(PaneAxis { + axis, + members, + flexes, + bounding_boxes: _, + }) => SerializedPaneGroup::Group { + axis: SerializedAxis(*axis), + children: members + .iter() + .map(|member| build_serialized_pane_group(member, active_pane, cx)) + .collect::>(), + flexes: Some(flexes.lock().clone()), + }, + Member::Pane(pane_handle) => { + SerializedPaneGroup::Pane(serialize_pane(pane_handle, pane_handle == active_pane, cx)) + } + } +} + +fn serialize_pane(pane: &View, active: bool, cx: &WindowContext) -> SerializedPane { + let mut items_to_serialize = HashSet::default(); + let pane = pane.read(cx); + let children = pane + .items() + .filter_map(|item| { + let terminal_view = item.act_as::(cx)?; + if terminal_view.read(cx).terminal().read(cx).task().is_some() { + None + } else { + let id = item.item_id().as_u64(); + items_to_serialize.insert(id); + Some(id) + } + }) + .collect::>(); + let active_item = pane + .active_item() + .map(|item| item.item_id().as_u64()) + .filter(|active_id| items_to_serialize.contains(active_id)); + + SerializedPane { + active, + children, + active_item, + } +} + +pub(crate) fn deserialize_terminal_panel( + workspace: WeakView, + project: Model, + database_id: WorkspaceId, + serialized_panel: SerializedTerminalPanel, + cx: &mut WindowContext, +) -> Task>> { + cx.spawn(move |mut cx| async move { + let terminal_panel = workspace.update(&mut cx, |workspace, cx| { + cx.new_view(|cx| { + let mut panel = TerminalPanel::new(workspace, cx); + panel.height = serialized_panel.height.map(|h| h.round()); + panel.width = serialized_panel.width.map(|w| w.round()); + panel + }) + })?; + match &serialized_panel.items { + SerializedItems::NoSplits(item_ids) => { + let items = deserialize_terminal_views( + database_id, + project, + workspace, + item_ids.as_slice(), + &mut cx, + ) + .await; + let active_item = serialized_panel.active_item_id; + terminal_panel.update(&mut cx, |terminal_panel, cx| { + terminal_panel.active_pane.update(cx, |pane, cx| { + populate_pane_items(pane, items, active_item, cx); + }); + })?; + } + SerializedItems::WithSplits(serialized_pane_group) => { + let center_pane = deserialize_pane_group( + workspace, + project, + terminal_panel.clone(), + database_id, + serialized_pane_group, + &mut cx, + ) + .await; + if let Some((center_group, active_pane)) = center_pane { + terminal_panel.update(&mut cx, |terminal_panel, _| { + terminal_panel.center = PaneGroup::with_root(center_group); + terminal_panel.active_pane = + active_pane.unwrap_or_else(|| terminal_panel.center.first_pane()); + })?; + } + } + } + + Ok(terminal_panel) + }) +} + +fn populate_pane_items( + pane: &mut Pane, + items: Vec>, + active_item: Option, + cx: &mut ViewContext, +) { + let mut item_index = pane.items_len(); + for item in items { + let activate_item = Some(item.item_id().as_u64()) == active_item; + pane.add_item(Box::new(item), false, false, None, cx); + item_index += 1; + if activate_item { + pane.activate_item(item_index, false, false, cx); + } + } +} + +#[async_recursion(?Send)] +async fn deserialize_pane_group( + workspace: WeakView, + project: Model, + panel: View, + workspace_id: WorkspaceId, + serialized: &SerializedPaneGroup, + cx: &mut AsyncWindowContext, +) -> Option<(Member, Option>)> { + match serialized { + SerializedPaneGroup::Group { + axis, + flexes, + children, + } => { + let mut current_active_pane = None; + let mut members = Vec::new(); + for child in children { + if let Some((new_member, active_pane)) = deserialize_pane_group( + workspace.clone(), + project.clone(), + panel.clone(), + workspace_id, + child, + cx, + ) + .await + { + members.push(new_member); + current_active_pane = current_active_pane.or(active_pane); + } + } + + if members.is_empty() { + return None; + } + + if members.len() == 1 { + return Some((members.remove(0), current_active_pane)); + } + + Some(( + Member::Axis(PaneAxis::load(axis.0, members, flexes.clone())), + current_active_pane, + )) + } + SerializedPaneGroup::Pane(serialized_pane) => { + let active = serialized_pane.active; + let new_items = deserialize_terminal_views( + workspace_id, + project.clone(), + workspace.clone(), + serialized_pane.children.as_slice(), + cx, + ) + .await; + + let pane = panel + .update(cx, |terminal_panel, cx| { + new_terminal_pane( + workspace.clone(), + project.clone(), + terminal_panel.active_pane.read(cx).is_zoomed(), + cx, + ) + }) + .log_err()?; + let active_item = serialized_pane.active_item; + + let terminal = pane + .update(cx, |pane, cx| { + populate_pane_items(pane, new_items, active_item, cx); + // Avoid blank panes in splits + if pane.items_len() == 0 { + let working_directory = workspace + .update(cx, |workspace, cx| default_working_directory(workspace, cx)) + .ok() + .flatten(); + let kind = TerminalKind::Shell( + working_directory.as_deref().map(Path::to_path_buf), + ); + let window = cx.window_handle(); + let terminal = project + .update(cx, |project, cx| project.create_terminal(kind, window, cx)); + Some(Some(terminal)) + } else { + Some(None) + } + }) + .ok() + .flatten()?; + if let Some(terminal) = terminal { + let terminal = terminal.await.ok()?; + pane.update(cx, |pane, cx| { + let terminal_view = Box::new(cx.new_view(|cx| { + TerminalView::new( + terminal, + workspace.clone(), + Some(workspace_id), + project.downgrade(), + cx, + ) + })); + pane.add_item(terminal_view, true, false, None, cx); + }) + .ok()?; + } + Some((Member::Pane(pane.clone()), active.then_some(pane))) + } + } +} + +async fn deserialize_terminal_views( + workspace_id: WorkspaceId, + project: Model, + workspace: WeakView, + item_ids: &[u64], + cx: &mut AsyncWindowContext, +) -> Vec> { + let mut items = Vec::with_capacity(item_ids.len()); + let mut deserialized_items = item_ids + .iter() + .map(|item_id| { + cx.update(|cx| { + TerminalView::deserialize( + project.clone(), + workspace.clone(), + workspace_id, + *item_id, + cx, + ) + }) + .unwrap_or_else(|e| Task::ready(Err(e.context("no window present")))) + }) + .collect::>(); + while let Some(item) = deserialized_items.next().await { + if let Some(item) = item.log_err() { + items.push(item); + } + } + items +} + +#[derive(Debug, Serialize, Deserialize)] +pub(crate) struct SerializedTerminalPanel { + pub items: SerializedItems, + // A deprecated field, kept for backwards compatibility for the code before terminal splits were introduced. + pub active_item_id: Option, + pub width: Option, + pub height: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub(crate) enum SerializedItems { + // The data stored before terminal splits were introduced. + NoSplits(Vec), + WithSplits(SerializedPaneGroup), +} + +#[derive(Debug, Serialize, Deserialize)] +pub(crate) enum SerializedPaneGroup { + Pane(SerializedPane), + Group { + axis: SerializedAxis, + flexes: Option>, + children: Vec, + }, +} + +#[derive(Debug, Serialize, Deserialize)] +pub(crate) struct SerializedPane { + pub active: bool, + pub children: Vec, + pub active_item: Option, +} + +#[derive(Debug)] +pub(crate) struct SerializedAxis(pub Axis); + +impl Serialize for SerializedAxis { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self.0 { + Axis::Horizontal => serializer.serialize_str("horizontal"), + Axis::Vertical => serializer.serialize_str("vertical"), + } + } +} + +impl<'de> Deserialize<'de> for SerializedAxis { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + match s.as_str() { + "horizontal" => Ok(SerializedAxis(Axis::Horizontal)), + "vertical" => Ok(SerializedAxis(Axis::Vertical)), + invalid => Err(serde::de::Error::custom(format!( + "Invalid axis value: '{invalid}'" + ))), + } + } +} define_connection! { pub static ref TERMINAL_DB: TerminalDb = diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index bc4f58a5ef..b984444fbf 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -867,7 +867,7 @@ impl Element for TerminalElement { bounds: Bounds, _: &mut Self::RequestLayoutState, layout: &mut Self::PrepaintState, - cx: &mut WindowContext<'_>, + cx: &mut WindowContext, ) { cx.with_content_mask(Some(ContentMask { bounds }), |cx| { let scroll_top = self.terminal_view.read(cx).scroll_top; @@ -1001,6 +1001,7 @@ impl InputHandler for TerminalInputHandler { fn text_for_range( &mut self, _: std::ops::Range, + _: &mut Option>, _: &mut WindowContext, ) -> Option { None diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 6d64ac1a48..125c1384b5 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -1,38 +1,47 @@ -use std::{ops::ControlFlow, path::PathBuf, sync::Arc}; +use std::{cmp, ops::ControlFlow, path::PathBuf, sync::Arc, time::Duration}; -use crate::{default_working_directory, TerminalView}; -use collections::{HashMap, HashSet}; +use crate::{ + default_working_directory, + persistence::{ + deserialize_terminal_panel, serialize_pane_group, SerializedItems, SerializedTerminalPanel, + }, + TerminalView, +}; +use breadcrumbs::Breadcrumbs; +use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use futures::future::join_all; use gpui::{ - actions, Action, AnchorCorner, AnyView, AppContext, AsyncWindowContext, Entity, EventEmitter, + actions, Action, AnyView, AppContext, AsyncWindowContext, Corner, Entity, EventEmitter, ExternalPaths, FocusHandle, FocusableView, IntoElement, Model, ParentElement, Pixels, Render, - Styled, Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowContext, + Styled, Task, View, ViewContext, VisualContext, WeakView, WindowContext, }; use itertools::Itertools; -use project::{terminals::TerminalKind, Fs, ProjectEntryId}; +use project::{terminals::TerminalKind, Fs, Project, ProjectEntryId}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; -use serde::{Deserialize, Serialize}; use settings::Settings; -use task::{RevealStrategy, Shell, SpawnInTerminal, TaskId}; +use task::{RevealStrategy, RevealTarget, Shell, SpawnInTerminal, TaskId}; use terminal::{ terminal_settings::{TerminalDockPosition, TerminalSettings}, Terminal, }; use ui::{ - h_flex, ButtonCommon, Clickable, ContextMenu, IconButton, IconSize, PopoverMenu, Selectable, + prelude::*, ButtonCommon, Clickable, ContextMenu, FluentBuilder, PopoverMenu, Toggleable, Tooltip, }; use util::{ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, item::SerializableItem, - pane, + move_active_item, move_item, pane, ui::IconName, - DraggedTab, ItemId, NewTerminal, Pane, ToggleZoom, Workspace, + ActivateNextPane, ActivatePane, ActivatePaneInDirection, ActivatePreviousPane, DraggedTab, + ItemId, MoveItemToPane, MoveItemToPaneInDirection, NewTerminal, Pane, PaneGroup, + SplitDirection, SplitDown, SplitLeft, SplitRight, SplitUp, SwapPaneInDirection, ToggleZoom, + Workspace, }; -use anyhow::Result; +use anyhow::{anyhow, Context, Result}; use zed_actions::InlineAssist; const TERMINAL_PANEL_KEY: &str = "TerminalPanel"; @@ -45,11 +54,7 @@ pub fn init(cx: &mut AppContext) { workspace.register_action(TerminalPanel::new_terminal); workspace.register_action(TerminalPanel::open_terminal); workspace.register_action(|workspace, _: &ToggleFocus, cx| { - if workspace - .panel::(cx) - .as_ref() - .is_some_and(|panel| panel.read(cx).enabled) - { + if is_enabled_in_workspace(workspace, cx) { workspace.toggle_panel_focus::(cx); } }); @@ -59,97 +64,28 @@ pub fn init(cx: &mut AppContext) { } pub struct TerminalPanel { - pane: View, + pub(crate) active_pane: View, + pub(crate) center: PaneGroup, fs: Arc, workspace: WeakView, - width: Option, - height: Option, + pub(crate) width: Option, + pub(crate) height: Option, pending_serialization: Task>, pending_terminals_to_add: usize, - _subscriptions: Vec, deferred_tasks: HashMap>, - enabled: bool, assistant_enabled: bool, assistant_tab_bar_button: Option, } impl TerminalPanel { - fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { - let pane = cx.new_view(|cx| { - let mut pane = Pane::new( - workspace.weak_handle(), - workspace.project().clone(), - Default::default(), - None, - NewTerminal.boxed_clone(), - cx, - ); - pane.set_can_split(false, cx); - pane.set_can_navigate(false, cx); - pane.display_nav_history_buttons(None); - pane.set_should_display_tab_bar(|_| true); - - let is_local = workspace.project().read(cx).is_local(); - let workspace = workspace.weak_handle(); - pane.set_custom_drop_handle(cx, move |pane, dropped_item, cx| { - if let Some(tab) = dropped_item.downcast_ref::() { - let item = if &tab.pane == cx.view() { - pane.item_for_index(tab.ix) - } else { - tab.pane.read(cx).item_for_index(tab.ix) - }; - if let Some(item) = item { - if item.downcast::().is_some() { - return ControlFlow::Continue(()); - } else if let Some(project_path) = item.project_path(cx) { - if let Some(entry_path) = workspace - .update(cx, |workspace, cx| { - workspace - .project() - .read(cx) - .absolute_path(&project_path, cx) - }) - .log_err() - .flatten() - { - add_paths_to_terminal(pane, &[entry_path], cx); - } - } - } - } else if let Some(&entry_id) = dropped_item.downcast_ref::() { - if let Some(entry_path) = workspace - .update(cx, |workspace, cx| { - let project = workspace.project().read(cx); - project - .path_for_entry(entry_id, cx) - .and_then(|project_path| project.absolute_path(&project_path, cx)) - }) - .log_err() - .flatten() - { - add_paths_to_terminal(pane, &[entry_path], cx); - } - } else if is_local { - if let Some(paths) = dropped_item.downcast_ref::() { - add_paths_to_terminal(pane, paths.paths(), cx); - } - } - - ControlFlow::Break(()) - }); - let buffer_search_bar = cx.new_view(search::BufferSearchBar::new); - pane.toolbar() - .update(cx, |toolbar, cx| toolbar.add_item(buffer_search_bar, cx)); - pane - }); - let subscriptions = vec![ - cx.observe(&pane, |_, _, cx| cx.notify()), - cx.subscribe(&pane, Self::handle_pane_event), - ]; - let project = workspace.project().read(cx); - let enabled = project.supports_terminal(cx); - let this = Self { - pane, + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let project = workspace.project(); + let pane = new_terminal_pane(workspace.weak_handle(), project.clone(), false, cx); + let center = PaneGroup::new(pane.clone()); + cx.focus_view(&pane); + let terminal_panel = Self { + center, + active_pane: pane, fs: workspace.app_state().fs.clone(), workspace: workspace.weak_handle(), pending_serialization: Task::ready(None), @@ -157,20 +93,18 @@ impl TerminalPanel { height: None, pending_terminals_to_add: 0, deferred_tasks: HashMap::default(), - _subscriptions: subscriptions, - enabled, assistant_enabled: false, assistant_tab_bar_button: None, }; - this.apply_tab_bar_buttons(cx); - this + terminal_panel.apply_tab_bar_buttons(&terminal_panel.active_pane, cx); + terminal_panel } - pub fn asssistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext) { + pub fn set_assistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext) { self.assistant_enabled = enabled; if enabled { let focus_handle = self - .pane + .active_pane .read(cx) .active_item() .map(|item| item.focus_handle(cx)) @@ -182,28 +116,33 @@ impl TerminalPanel { } else { self.assistant_tab_bar_button = None; } - self.apply_tab_bar_buttons(cx); + for pane in self.center.panes() { + self.apply_tab_bar_buttons(pane, cx); + } } - fn apply_tab_bar_buttons(&self, cx: &mut ViewContext) { + fn apply_tab_bar_buttons(&self, terminal_pane: &View, cx: &mut ViewContext) { let assistant_tab_bar_button = self.assistant_tab_bar_button.clone(); - self.pane.update(cx, |pane, cx| { + terminal_pane.update(cx, |pane, cx| { pane.set_render_tab_bar_buttons(cx, move |pane, cx| { + let split_context = pane + .active_item() + .and_then(|item| item.downcast::()) + .map(|terminal_view| terminal_view.read(cx).focus_handle.clone()); if !pane.has_focus(cx) && !pane.context_menu_focused(cx) { return (None, None); } let focus_handle = pane.focus_handle(cx); let right_children = h_flex() - .gap_2() - .children(assistant_tab_bar_button.clone()) + .gap(DynamicSpacing::Base02.rems(cx)) .child( PopoverMenu::new("terminal-tab-bar-popover-menu") .trigger( IconButton::new("plus", IconName::Plus) .icon_size(IconSize::Small) - .tooltip(|cx| Tooltip::text("New...", cx)), + .tooltip(|cx| Tooltip::text("New…", cx)), ) - .anchor(AnchorCorner::TopRight) + .anchor(Corner::TopRight) .with_handle(pane.new_item_context_menu_handle.clone()) .menu(move |cx| { let focus_handle = focus_handle.clone(); @@ -218,18 +157,45 @@ impl TerminalPanel { // context menu will be gone the moment we spawn the modal. .action( "Spawn task", - tasks_ui::Spawn::modal().boxed_clone(), + zed_actions::Spawn::modal().boxed_clone(), ) }); Some(menu) }), ) + .children(assistant_tab_bar_button.clone()) + .child( + PopoverMenu::new("terminal-pane-tab-bar-split") + .trigger( + IconButton::new("terminal-pane-split", IconName::Split) + .icon_size(IconSize::Small) + .tooltip(|cx| Tooltip::text("Split Pane", cx)), + ) + .anchor(Corner::TopRight) + .with_handle(pane.split_item_context_menu_handle.clone()) + .menu({ + let split_context = split_context.clone(); + move |cx| { + ContextMenu::build(cx, |menu, _| { + menu.when_some( + split_context.clone(), + |menu, split_context| menu.context(split_context), + ) + .action("Split Right", SplitRight.boxed_clone()) + .action("Split Left", SplitLeft.boxed_clone()) + .action("Split Up", SplitUp.boxed_clone()) + .action("Split Down", SplitDown.boxed_clone()) + }) + .into() + } + }), + ) .child({ let zoomed = pane.is_zoomed(); IconButton::new("toggle_zoom", IconName::Maximize) .icon_size(IconSize::Small) - .selected(zoomed) + .toggle_state(zoomed) .selected_icon(IconName::Minimize) .on_click(cx.listener(|pane, _, cx| { pane.toggle_zoom(&workspace::ToggleZoom, cx); @@ -264,80 +230,48 @@ impl TerminalPanel { .log_err() .flatten(); - let (panel, pane, items) = workspace.update(&mut cx, |workspace, cx| { - let panel = cx.new_view(|cx| TerminalPanel::new(workspace, cx)); - let items = if let Some((serialized_panel, database_id)) = - serialized_panel.as_ref().zip(workspace.database_id()) - { - panel.update(cx, |panel, cx| { - cx.notify(); - panel.height = serialized_panel.height.map(|h| h.round()); - panel.width = serialized_panel.width.map(|w| w.round()); - panel.pane.update(cx, |_, cx| { - serialized_panel - .items - .iter() - .map(|item_id| { - TerminalView::deserialize( - workspace.project().clone(), - workspace.weak_handle(), - database_id, - *item_id, - cx, - ) - }) - .collect::>() - }) - }) - } else { - Vec::new() - }; - let pane = panel.read(cx).pane.clone(); - (panel, pane, items) - })?; + let terminal_panel = workspace + .update(&mut cx, |workspace, cx| { + match serialized_panel.zip(workspace.database_id()) { + Some((serialized_panel, database_id)) => deserialize_terminal_panel( + workspace.weak_handle(), + workspace.project().clone(), + database_id, + serialized_panel, + cx, + ), + None => Task::ready(Ok(cx.new_view(|cx| TerminalPanel::new(workspace, cx)))), + } + })? + .await?; if let Some(workspace) = workspace.upgrade() { - panel - .update(&mut cx, |panel, cx| { - panel._subscriptions.push(cx.subscribe( - &workspace, - |terminal_panel, _, e, cx| { - if let workspace::Event::SpawnTask(spawn_in_terminal) = e { - terminal_panel.spawn_task(spawn_in_terminal, cx); - }; - }, - )) + terminal_panel + .update(&mut cx, |_, cx| { + cx.subscribe(&workspace, |terminal_panel, _, e, cx| { + if let workspace::Event::SpawnTask { + action: spawn_in_terminal, + } = e + { + terminal_panel.spawn_task(spawn_in_terminal, cx); + }; + }) + .detach(); }) .ok(); } - let pane = pane.downgrade(); - let items = futures::future::join_all(items).await; - let mut alive_item_ids = Vec::new(); - pane.update(&mut cx, |pane, cx| { - let active_item_id = serialized_panel - .as_ref() - .and_then(|panel| panel.active_item_id); - let mut active_ix = None; - for item in items { - if let Some(item) = item.log_err() { - let item_id = item.entity_id().as_u64(); - pane.add_item(Box::new(item), false, false, None, cx); - alive_item_ids.push(item_id as ItemId); - if Some(item_id) == active_item_id { - active_ix = Some(pane.items_len() - 1); - } - } - } - - if let Some(active_ix) = active_ix { - pane.activate_item(active_ix, false, false, cx) - } - })?; - // Since panels/docks are loaded outside from the workspace, we cleanup here, instead of through the workspace. if let Some(workspace) = workspace.upgrade() { let cleanup_task = workspace.update(&mut cx, |workspace, cx| { + let alive_item_ids = terminal_panel + .read(cx) + .center + .panes() + .into_iter() + .flat_map(|pane| pane.read(cx).items()) + .map(|item| item.item_id().as_u64() as ItemId) + .collect(); workspace .database_id() .map(|workspace_id| TerminalView::cleanup(workspace_id, alive_item_ids, cx)) @@ -347,33 +281,132 @@ impl TerminalPanel { } } - Ok(panel) + Ok(terminal_panel) } fn handle_pane_event( &mut self, - _pane: View, + pane: View, event: &pane::Event, cx: &mut ViewContext, ) { match event { pane::Event::ActivateItem { .. } => self.serialize(cx), pane::Event::RemovedItem { .. } => self.serialize(cx), - pane::Event::Remove { .. } => cx.emit(PanelEvent::Close), - pane::Event::ZoomIn => cx.emit(PanelEvent::ZoomIn), - pane::Event::ZoomOut => cx.emit(PanelEvent::ZoomOut), - + pane::Event::Remove { focus_on_pane } => { + let pane_count_before_removal = self.center.panes().len(); + let _removal_result = self.center.remove(&pane); + if pane_count_before_removal == 1 { + self.center.first_pane().update(cx, |pane, cx| { + pane.set_zoomed(false, cx); + }); + cx.emit(PanelEvent::Close); + } else { + if let Some(focus_on_pane) = + focus_on_pane.as_ref().or_else(|| self.center.panes().pop()) + { + focus_on_pane.focus_handle(cx).focus(cx); + } + } + } + pane::Event::ZoomIn => { + for pane in self.center.panes() { + pane.update(cx, |pane, cx| { + pane.set_zoomed(true, cx); + }) + } + cx.emit(PanelEvent::ZoomIn); + cx.notify(); + } + pane::Event::ZoomOut => { + for pane in self.center.panes() { + pane.update(cx, |pane, cx| { + pane.set_zoomed(false, cx); + }) + } + cx.emit(PanelEvent::ZoomOut); + cx.notify(); + } pane::Event::AddItem { item } => { if let Some(workspace) = self.workspace.upgrade() { - let pane = self.pane.clone(); - workspace.update(cx, |workspace, cx| item.added_to_pane(workspace, pane, cx)) + workspace.update(cx, |workspace, cx| { + item.added_to_pane(workspace, pane.clone(), cx) + }) } + self.serialize(cx); + } + pane::Event::Split(direction) => { + let Some(new_pane) = self.new_pane_with_cloned_active_terminal(cx) else { + return; + }; + let pane = pane.clone(); + let direction = *direction; + self.center.split(&pane, &new_pane, direction).log_err(); + cx.focus_view(&new_pane); + } + pane::Event::Focus => { + self.active_pane = pane.clone(); } _ => {} } } + fn new_pane_with_cloned_active_terminal( + &mut self, + cx: &mut ViewContext, + ) -> Option> { + let workspace = self.workspace.upgrade()?; + let workspace = workspace.read(cx); + let database_id = workspace.database_id(); + let weak_workspace = self.workspace.clone(); + let project = workspace.project().clone(); + let (working_directory, python_venv_directory) = self + .active_pane + .read(cx) + .active_item() + .and_then(|item| item.downcast::()) + .map(|terminal_view| { + let terminal = terminal_view.read(cx).terminal().read(cx); + ( + terminal + .working_directory() + .or_else(|| default_working_directory(workspace, cx)), + terminal.python_venv_directory.clone(), + ) + }) + .unwrap_or((None, None)); + let kind = TerminalKind::Shell(working_directory); + let window = cx.window_handle(); + let terminal = project + .update(cx, |project, cx| { + project.create_terminal_with_venv(kind, python_venv_directory, window, cx) + }) + .ok()?; + + let terminal_view = Box::new(cx.new_view(|cx| { + TerminalView::new( + terminal.clone(), + weak_workspace.clone(), + database_id, + project.downgrade(), + cx, + ) + })); + let pane = new_terminal_pane( + weak_workspace, + project, + self.active_pane.read(cx).is_zoomed(), + cx, + ); + self.apply_tab_bar_buttons(&pane, cx); + pane.update(cx, |pane, cx| { + pane.add_item(terminal_view, true, true, None, cx); + }); + + Some(pane) + } + pub fn open_terminal( workspace: &mut Workspace, action: &workspace::OpenTerminal, @@ -396,31 +429,112 @@ impl TerminalPanel { fn spawn_task(&mut self, spawn_in_terminal: &SpawnInTerminal, cx: &mut ViewContext) { let mut spawn_task = spawn_in_terminal.clone(); - // Set up shell args unconditionally, as tasks are always spawned inside of a shell. + let Ok(is_local) = self + .workspace + .update(cx, |workspace, cx| workspace.project().read(cx).is_local()) + else { + return; + }; + if let ControlFlow::Break(_) = + Self::fill_command(is_local, spawn_in_terminal, &mut spawn_task) + { + return; + } + let spawn_task = spawn_task; + + let allow_concurrent_runs = spawn_in_terminal.allow_concurrent_runs; + let use_new_terminal = spawn_in_terminal.use_new_terminal; + + if allow_concurrent_runs && use_new_terminal { + self.spawn_in_new_terminal(spawn_task, cx) + .detach_and_log_err(cx); + return; + } + + let terminals_for_task = self.terminals_for_task(&spawn_in_terminal.full_label, cx); + if terminals_for_task.is_empty() { + self.spawn_in_new_terminal(spawn_task, cx) + .detach_and_log_err(cx); + return; + } + let (existing_item_index, task_pane, existing_terminal) = terminals_for_task + .last() + .expect("covered no terminals case above") + .clone(); + let id = spawn_in_terminal.id.clone(); + cx.spawn(move |this, mut cx| async move { + if allow_concurrent_runs { + debug_assert!( + !use_new_terminal, + "Should have handled 'allow_concurrent_runs && use_new_terminal' case above" + ); + this.update(&mut cx, |terminal_panel, cx| { + terminal_panel.replace_terminal( + spawn_task, + task_pane, + existing_item_index, + existing_terminal, + cx, + ) + })? + .await; + } else { + this.update(&mut cx, |this, cx| { + this.deferred_tasks.insert( + id, + cx.spawn(|terminal_panel, mut cx| async move { + wait_for_terminals_tasks(terminals_for_task, &mut cx).await; + let Ok(Some(new_terminal_task)) = + terminal_panel.update(&mut cx, |terminal_panel, cx| { + if use_new_terminal { + terminal_panel + .spawn_in_new_terminal(spawn_task, cx) + .detach_and_log_err(cx); + None + } else { + Some(terminal_panel.replace_terminal( + spawn_task, + task_pane, + existing_item_index, + existing_terminal, + cx, + )) + } + }) + else { + return; + }; + new_terminal_task.await; + }), + ); + }) + .ok(); + } + anyhow::Result::<_, anyhow::Error>::Ok(()) + }) + .detach() + } + + pub fn fill_command( + is_local: bool, + spawn_in_terminal: &SpawnInTerminal, + spawn_task: &mut SpawnInTerminal, + ) -> ControlFlow<()> { let Some((shell, mut user_args)) = (match spawn_in_terminal.shell.clone() { Shell::System => { - match self - .workspace - .update(cx, |workspace, cx| workspace.project().read(cx).is_local()) - { - Ok(local) => { - if local { - retrieve_system_shell().map(|shell| (shell, Vec::new())) - } else { - Some(("\"${SHELL:-sh}\"".to_string(), Vec::new())) - } - } - Err(_no_window_e) => return, + if is_local { + retrieve_system_shell().map(|shell| (shell, Vec::new())) + } else { + Some(("\"${SHELL:-sh}\"".to_string(), Vec::new())) } } Shell::Program(shell) => Some((shell, Vec::new())), Shell::WithArguments { program, args, .. } => Some((program, args)), }) else { - return; + return ControlFlow::Break(()); }; #[cfg(target_os = "windows")] let windows_shell_type = to_windows_shell_type(&shell); - #[cfg(not(target_os = "windows"))] { spawn_task.command_label = format!("{shell} -i -c '{}'", spawn_task.command_label); @@ -442,7 +556,6 @@ impl TerminalPanel { } } } - let task_command = std::mem::replace(&mut spawn_task.command, shell); let task_args = std::mem::take(&mut spawn_task.args); let combined_command = task_args @@ -455,7 +568,6 @@ impl TerminalPanel { command.push_str(&to_windows_shell_variable(windows_shell_type, arg)); command }); - #[cfg(not(target_os = "windows"))] user_args.extend(["-i".to_owned(), "-c".to_owned(), combined_command]); #[cfg(target_os = "windows")] @@ -473,57 +585,9 @@ impl TerminalPanel { } } spawn_task.args = user_args; - let spawn_task = spawn_task; + // Set up shell args unconditionally, as tasks are always spawned inside of a shell. - let allow_concurrent_runs = spawn_in_terminal.allow_concurrent_runs; - let use_new_terminal = spawn_in_terminal.use_new_terminal; - - if allow_concurrent_runs && use_new_terminal { - self.spawn_in_new_terminal(spawn_task, cx) - .detach_and_log_err(cx); - return; - } - - let terminals_for_task = self.terminals_for_task(&spawn_in_terminal.full_label, cx); - if terminals_for_task.is_empty() { - self.spawn_in_new_terminal(spawn_task, cx) - .detach_and_log_err(cx); - return; - } - let (existing_item_index, existing_terminal) = terminals_for_task - .last() - .expect("covered no terminals case above") - .clone(); - if allow_concurrent_runs { - debug_assert!( - !use_new_terminal, - "Should have handled 'allow_concurrent_runs && use_new_terminal' case above" - ); - self.replace_terminal(spawn_task, existing_item_index, existing_terminal, cx); - } else { - self.deferred_tasks.insert( - spawn_in_terminal.id.clone(), - cx.spawn(|terminal_panel, mut cx| async move { - wait_for_terminals_tasks(terminals_for_task, &mut cx).await; - terminal_panel - .update(&mut cx, |terminal_panel, cx| { - if use_new_terminal { - terminal_panel - .spawn_in_new_terminal(spawn_task, cx) - .detach_and_log_err(cx); - } else { - terminal_panel.replace_terminal( - spawn_task, - existing_item_index, - existing_terminal, - cx, - ); - } - }) - .ok(); - }), - ); - } + ControlFlow::Continue(()) } pub fn spawn_in_new_terminal( @@ -532,7 +596,17 @@ impl TerminalPanel { cx: &mut ViewContext, ) -> Task>> { let reveal = spawn_task.reveal; - self.add_terminal(TerminalKind::Task(spawn_task), reveal, cx) + let reveal_target = spawn_task.reveal_target; + let kind = TerminalKind::Task(spawn_task); + match reveal_target { + RevealTarget::Center => self + .workspace + .update(cx, |workspace, cx| { + Self::add_center_terminal(workspace, kind, cx) + }) + .unwrap_or_else(|e| Task::ready(Err(e))), + RevealTarget::Dock => self.add_terminal(kind, reveal, cx), + } } /// Create a new Terminal in the current working directory or the user's home directory @@ -558,63 +632,125 @@ impl TerminalPanel { &self, label: &str, cx: &mut AppContext, - ) -> Vec<(usize, View)> { - self.pane - .read(cx) - .items() - .enumerate() - .filter_map(|(index, item)| Some((index, item.act_as::(cx)?))) - .filter_map(|(index, terminal_view)| { - let task_state = terminal_view.read(cx).terminal().read(cx).task()?; - if &task_state.full_label == label { - Some((index, terminal_view)) - } else { - None - } - }) + ) -> Vec<(usize, View, View)> { + let Some(workspace) = self.workspace.upgrade() else { + return Vec::new(); + }; + + let pane_terminal_views = |pane: View| { + pane.read(cx) + .items() + .enumerate() + .filter_map(|(index, item)| Some((index, item.act_as::(cx)?))) + .filter_map(|(index, terminal_view)| { + let task_state = terminal_view.read(cx).terminal().read(cx).task()?; + if &task_state.full_label == label { + Some((index, terminal_view)) + } else { + None + } + }) + .map(move |(index, terminal_view)| (index, pane.clone(), terminal_view)) + }; + + self.center + .panes() + .into_iter() + .cloned() + .flat_map(pane_terminal_views) + .chain( + workspace + .read(cx) + .panes() + .into_iter() + .cloned() + .flat_map(pane_terminal_views), + ) + .sorted_by_key(|(_, _, terminal_view)| terminal_view.entity_id()) .collect() } - fn activate_terminal_view(&self, item_index: usize, focus: bool, cx: &mut WindowContext) { - self.pane.update(cx, |pane, cx| { + fn activate_terminal_view( + &self, + pane: &View, + item_index: usize, + focus: bool, + cx: &mut WindowContext, + ) { + pane.update(cx, |pane, cx| { pane.activate_item(item_index, true, focus, cx) }) } + pub fn add_center_terminal( + workspace: &mut Workspace, + kind: TerminalKind, + cx: &mut ViewContext, + ) -> Task>> { + if !is_enabled_in_workspace(workspace, cx) { + return Task::ready(Err(anyhow!( + "terminal not yet supported for remote projects" + ))); + } + let window = cx.window_handle(); + let project = workspace.project().downgrade(); + cx.spawn(move |workspace, mut cx| async move { + let terminal = project + .update(&mut cx, |project, cx| { + project.create_terminal(kind, window, cx) + })? + .await?; + + workspace.update(&mut cx, |workspace, cx| { + let view = cx.new_view(|cx| { + TerminalView::new( + terminal.clone(), + workspace.weak_handle(), + workspace.database_id(), + workspace.project().downgrade(), + cx, + ) + }); + workspace.add_item_to_active_pane(Box::new(view), None, true, cx); + })?; + Ok(terminal) + }) + } + fn add_terminal( &mut self, kind: TerminalKind, reveal_strategy: RevealStrategy, cx: &mut ViewContext, ) -> Task>> { - if !self.enabled { - return Task::ready(Err(anyhow::anyhow!( - "terminal not yet supported for remote projects" - ))); - } - let workspace = self.workspace.clone(); - self.pending_terminals_to_add += 1; - cx.spawn(|terminal_panel, mut cx| async move { - let pane = terminal_panel.update(&mut cx, |this, _| this.pane.clone())?; + if workspace.update(&mut cx, |workspace, cx| { + !is_enabled_in_workspace(workspace, cx) + })? { + anyhow::bail!("terminal not yet supported for remote projects"); + } + let pane = terminal_panel.update(&mut cx, |terminal_panel, _| { + terminal_panel.pending_terminals_to_add += 1; + terminal_panel.active_pane.clone() + })?; + let project = workspace.update(&mut cx, |workspace, _| workspace.project().clone())?; + let window = cx.window_handle(); + let terminal = project + .update(&mut cx, |project, cx| { + project.create_terminal(kind, window, cx) + })? + .await?; let result = workspace.update(&mut cx, |workspace, cx| { - let window = cx.window_handle(); - let terminal = workspace - .project() - .update(cx, |project, cx| project.create_terminal(kind, window, cx))?; let terminal_view = Box::new(cx.new_view(|cx| { TerminalView::new( terminal.clone(), workspace.weak_handle(), workspace.database_id(), + workspace.project().downgrade(), cx, ) })); - pane.update(cx, |pane, cx| { - let focus = pane.has_focus(cx); - pane.add_item(terminal_view, true, focus, None, cx); - }); match reveal_strategy { RevealStrategy::Always => { @@ -625,6 +761,13 @@ impl TerminalPanel { } RevealStrategy::Never => {} } + + pane.update(cx, |pane, cx| { + let focus = + pane.has_focus(cx) || matches!(reveal_strategy, RevealStrategy::Always); + pane.add_item(terminal_view, true, focus, None, cx); + }); + Ok(terminal) })?; terminal_panel.update(&mut cx, |this, cx| { @@ -636,113 +779,338 @@ impl TerminalPanel { } fn serialize(&mut self, cx: &mut ViewContext) { - let mut items_to_serialize = HashSet::default(); - let items = self - .pane - .read(cx) - .items() - .filter_map(|item| { - let terminal_view = item.act_as::(cx)?; - if terminal_view.read(cx).terminal().read(cx).task().is_some() { - None - } else { - let id = item.item_id().as_u64(); - items_to_serialize.insert(id); - Some(id) - } - }) - .collect::>(); - let active_item_id = self - .pane - .read(cx) - .active_item() - .map(|item| item.item_id().as_u64()) - .filter(|active_id| items_to_serialize.contains(active_id)); let height = self.height; let width = self.width; - self.pending_serialization = cx.background_executor().spawn( - async move { - KEY_VALUE_STORE - .write_kvp( - TERMINAL_PANEL_KEY.into(), - serde_json::to_string(&SerializedTerminalPanel { - items, - active_item_id, - height, - width, - })?, - ) - .await?; - anyhow::Ok(()) - } - .log_err(), - ); + self.pending_serialization = cx.spawn(|terminal_panel, mut cx| async move { + cx.background_executor() + .timer(Duration::from_millis(50)) + .await; + let terminal_panel = terminal_panel.upgrade()?; + let items = terminal_panel + .update(&mut cx, |terminal_panel, cx| { + SerializedItems::WithSplits(serialize_pane_group( + &terminal_panel.center, + &terminal_panel.active_pane, + cx, + )) + }) + .ok()?; + cx.background_executor() + .spawn( + async move { + KEY_VALUE_STORE + .write_kvp( + TERMINAL_PANEL_KEY.into(), + serde_json::to_string(&SerializedTerminalPanel { + items, + active_item_id: None, + height, + width, + })?, + ) + .await?; + anyhow::Ok(()) + } + .log_err(), + ) + .await; + Some(()) + }); } fn replace_terminal( &self, spawn_task: SpawnInTerminal, + task_pane: View, terminal_item_index: usize, terminal_to_replace: View, - cx: &mut ViewContext<'_, Self>, - ) -> Option<()> { - let project = self - .workspace - .update(cx, |workspace, _| workspace.project().clone()) - .ok()?; - + cx: &mut ViewContext, + ) -> Task> { let reveal = spawn_task.reveal; + let reveal_target = spawn_task.reveal_target; let window = cx.window_handle(); - let new_terminal = project.update(cx, |project, cx| { - project - .create_terminal(TerminalKind::Task(spawn_task), window, cx) - .log_err() - })?; - terminal_to_replace.update(cx, |terminal_to_replace, cx| { - terminal_to_replace.set_terminal(new_terminal, cx); - }); - - match reveal { - RevealStrategy::Always => { - self.activate_terminal_view(terminal_item_index, true, cx); - let task_workspace = self.workspace.clone(); - cx.spawn(|_, mut cx| async move { - task_workspace - .update(&mut cx, |workspace, cx| workspace.focus_panel::(cx)) + let task_workspace = self.workspace.clone(); + cx.spawn(move |terminal_panel, mut cx| async move { + let project = terminal_panel + .update(&mut cx, |this, cx| { + this.workspace + .update(cx, |workspace, _| workspace.project().clone()) .ok() }) - .detach(); - } - RevealStrategy::NoFocus => { - self.activate_terminal_view(terminal_item_index, false, cx); - let task_workspace = self.workspace.clone(); - cx.spawn(|_, mut cx| async move { - task_workspace - .update(&mut cx, |workspace, cx| workspace.open_panel::(cx)) - .ok() + .ok() + .flatten()?; + let new_terminal = project + .update(&mut cx, |project, cx| { + project.create_terminal(TerminalKind::Task(spawn_task), window, cx) }) - .detach(); - } - RevealStrategy::Never => {} - } + .ok()? + .await + .log_err()?; + terminal_to_replace + .update(&mut cx, |terminal_to_replace, cx| { + terminal_to_replace.set_terminal(new_terminal, cx); + }) + .ok()?; - Some(()) + match reveal { + RevealStrategy::Always => match reveal_target { + RevealTarget::Center => { + task_workspace + .update(&mut cx, |workspace, cx| { + workspace + .active_item(cx) + .context("retrieving active terminal item in the workspace") + .log_err()? + .focus_handle(cx) + .focus(cx); + Some(()) + }) + .ok()??; + } + RevealTarget::Dock => { + terminal_panel + .update(&mut cx, |terminal_panel, cx| { + terminal_panel.activate_terminal_view( + &task_pane, + terminal_item_index, + true, + cx, + ) + }) + .ok()?; + + cx.spawn(|mut cx| async move { + task_workspace + .update(&mut cx, |workspace, cx| workspace.focus_panel::(cx)) + .ok() + }) + .detach(); + } + }, + RevealStrategy::NoFocus => match reveal_target { + RevealTarget::Center => { + task_workspace + .update(&mut cx, |workspace, cx| { + workspace.active_pane().focus_handle(cx).focus(cx); + }) + .ok()?; + } + RevealTarget::Dock => { + terminal_panel + .update(&mut cx, |terminal_panel, cx| { + terminal_panel.activate_terminal_view( + &task_pane, + terminal_item_index, + false, + cx, + ) + }) + .ok()?; + + cx.spawn(|mut cx| async move { + task_workspace + .update(&mut cx, |workspace, cx| workspace.open_panel::(cx)) + .ok() + }) + .detach(); + } + }, + RevealStrategy::Never => {} + } + + Some(()) + }) } fn has_no_terminals(&self, cx: &WindowContext) -> bool { - self.pane.read(cx).items_len() == 0 && self.pending_terminals_to_add == 0 + self.active_pane.read(cx).items_len() == 0 && self.pending_terminals_to_add == 0 } pub fn assistant_enabled(&self) -> bool { self.assistant_enabled } + + fn is_enabled(&self, cx: &WindowContext) -> bool { + self.workspace.upgrade().map_or(false, |workspace| { + is_enabled_in_workspace(workspace.read(cx), cx) + }) + } +} + +fn is_enabled_in_workspace(workspace: &Workspace, cx: &WindowContext) -> bool { + workspace.project().read(cx).supports_terminal(cx) +} + +pub fn new_terminal_pane( + workspace: WeakView, + project: Model, + zoomed: bool, + cx: &mut ViewContext, +) -> View { + let is_local = project.read(cx).is_local(); + let terminal_panel = cx.view().clone(); + let pane = cx.new_view(|cx| { + let mut pane = Pane::new( + workspace.clone(), + project.clone(), + Default::default(), + None, + NewTerminal.boxed_clone(), + cx, + ); + pane.set_zoomed(zoomed, cx); + pane.set_can_navigate(false, cx); + pane.display_nav_history_buttons(None); + pane.set_should_display_tab_bar(|_| true); + pane.set_zoom_out_on_close(false); + + let split_closure_terminal_panel = terminal_panel.downgrade(); + pane.set_can_split(Some(Arc::new(move |pane, dragged_item, cx| { + if let Some(tab) = dragged_item.downcast_ref::() { + let is_current_pane = &tab.pane == cx.view(); + let Some(can_drag_away) = split_closure_terminal_panel + .update(cx, |terminal_panel, _| { + let current_panes = terminal_panel.center.panes(); + !current_panes.contains(&&tab.pane) + || current_panes.len() > 1 + || (!is_current_pane || pane.items_len() > 1) + }) + .ok() + else { + return false; + }; + if can_drag_away { + let item = if is_current_pane { + pane.item_for_index(tab.ix) + } else { + tab.pane.read(cx).item_for_index(tab.ix) + }; + if let Some(item) = item { + return item.downcast::().is_some(); + } + } + } + false + }))); + + let buffer_search_bar = cx.new_view(search::BufferSearchBar::new); + let breadcrumbs = cx.new_view(|_| Breadcrumbs::new()); + pane.toolbar().update(cx, |toolbar, cx| { + toolbar.add_item(buffer_search_bar, cx); + toolbar.add_item(breadcrumbs, cx); + }); + + let drop_closure_project = project.downgrade(); + let drop_closure_terminal_panel = terminal_panel.downgrade(); + pane.set_custom_drop_handle(cx, move |pane, dropped_item, cx| { + let Some(project) = drop_closure_project.upgrade() else { + return ControlFlow::Break(()); + }; + if let Some(tab) = dropped_item.downcast_ref::() { + let this_pane = cx.view().clone(); + let item = if tab.pane == this_pane { + pane.item_for_index(tab.ix) + } else { + tab.pane.read(cx).item_for_index(tab.ix) + }; + if let Some(item) = item { + if item.downcast::().is_some() { + let source = tab.pane.clone(); + let item_id_to_move = item.item_id(); + + let Ok(new_split_pane) = pane + .drag_split_direction() + .map(|split_direction| { + drop_closure_terminal_panel.update(cx, |terminal_panel, cx| { + let is_zoomed = if terminal_panel.active_pane == this_pane { + pane.is_zoomed() + } else { + terminal_panel.active_pane.read(cx).is_zoomed() + }; + let new_pane = new_terminal_pane( + workspace.clone(), + project.clone(), + is_zoomed, + cx, + ); + terminal_panel.apply_tab_bar_buttons(&new_pane, cx); + terminal_panel.center.split( + &this_pane, + &new_pane, + split_direction, + )?; + anyhow::Ok(new_pane) + }) + }) + .transpose() + else { + return ControlFlow::Break(()); + }; + + match new_split_pane.transpose() { + // Source pane may be the one currently updated, so defer the move. + Ok(Some(new_pane)) => cx + .spawn(|_, mut cx| async move { + cx.update(|cx| { + move_item( + &source, + &new_pane, + item_id_to_move, + new_pane.read(cx).active_item_index(), + cx, + ); + }) + .ok(); + }) + .detach(), + // If we drop into existing pane or current pane, + // regular pane drop handler will take care of it, + // using the right tab index for the operation. + Ok(None) => return ControlFlow::Continue(()), + err @ Err(_) => { + err.log_err(); + return ControlFlow::Break(()); + } + }; + } else if let Some(project_path) = item.project_path(cx) { + if let Some(entry_path) = project.read(cx).absolute_path(&project_path, cx) + { + add_paths_to_terminal(pane, &[entry_path], cx); + } + } + } + } else if let Some(&entry_id) = dropped_item.downcast_ref::() { + if let Some(entry_path) = project + .read(cx) + .path_for_entry(entry_id, cx) + .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)) + { + add_paths_to_terminal(pane, &[entry_path], cx); + } + } else if is_local { + if let Some(paths) = dropped_item.downcast_ref::() { + add_paths_to_terminal(pane, paths.paths(), cx); + } + } + + ControlFlow::Break(()) + }); + + pane + }); + + cx.subscribe(&pane, TerminalPanel::handle_pane_event) + .detach(); + cx.observe(&pane, |_, _, cx| cx.notify()).detach(); + + pane } async fn wait_for_terminals_tasks( - terminals_for_task: Vec<(usize, View)>, + terminals_for_task: Vec<(usize, View, View)>, cx: &mut AsyncWindowContext, ) { - let pending_tasks = terminals_for_task.iter().filter_map(|(_, terminal)| { + let pending_tasks = terminals_for_task.iter().filter_map(|(_, _, terminal)| { terminal .update(cx, |terminal_view, cx| { terminal_view @@ -754,7 +1122,7 @@ async fn wait_for_terminals_tasks( let _: Vec<()> = join_all(pending_tasks).await; } -fn add_paths_to_terminal(pane: &mut Pane, paths: &[PathBuf], cx: &mut ViewContext<'_, Pane>) { +fn add_paths_to_terminal(pane: &mut Pane, paths: &[PathBuf], cx: &mut ViewContext) { if let Some(terminal_view) = pane .active_item() .and_then(|item| item.downcast::()) @@ -777,7 +1145,7 @@ impl Render for TerminalPanel { let mut registrar = DivRegistrar::new( |panel, cx| { panel - .pane + .active_pane .read(cx) .toolbar() .read(cx) @@ -786,13 +1154,127 @@ impl Render for TerminalPanel { cx, ); BufferSearchBar::register(&mut registrar); - registrar.into_div().size_full().child(self.pane.clone()) + let registrar = registrar.into_div(); + self.workspace + .update(cx, |workspace, cx| { + registrar.size_full().child(self.center.render( + workspace.project(), + &HashMap::default(), + None, + &self.active_pane, + workspace.zoomed_item(), + workspace.app_state(), + cx, + )) + }) + .ok() + .map(|div| { + div.on_action({ + cx.listener(|terminal_panel, action: &ActivatePaneInDirection, cx| { + if let Some(pane) = terminal_panel.center.find_pane_in_direction( + &terminal_panel.active_pane, + action.0, + cx, + ) { + cx.focus_view(&pane); + } else { + terminal_panel + .workspace + .update(cx, |workspace, cx| { + workspace.activate_pane_in_direction(action.0, cx) + }) + .ok(); + } + }) + }) + .on_action( + cx.listener(|terminal_panel, _action: &ActivateNextPane, cx| { + let panes = terminal_panel.center.panes(); + if let Some(ix) = panes + .iter() + .position(|pane| **pane == terminal_panel.active_pane) + { + let next_ix = (ix + 1) % panes.len(); + cx.focus_view(&panes[next_ix]); + } + }), + ) + .on_action( + cx.listener(|terminal_panel, _action: &ActivatePreviousPane, cx| { + let panes = terminal_panel.center.panes(); + if let Some(ix) = panes + .iter() + .position(|pane| **pane == terminal_panel.active_pane) + { + let prev_ix = cmp::min(ix.wrapping_sub(1), panes.len() - 1); + cx.focus_view(&panes[prev_ix]); + } + }), + ) + .on_action(cx.listener(|terminal_panel, action: &ActivatePane, cx| { + let panes = terminal_panel.center.panes(); + if let Some(&pane) = panes.get(action.0) { + cx.focus_view(pane); + } else { + if let Some(new_pane) = + terminal_panel.new_pane_with_cloned_active_terminal(cx) + { + terminal_panel + .center + .split( + &terminal_panel.active_pane, + &new_pane, + SplitDirection::Right, + ) + .log_err(); + cx.focus_view(&new_pane); + } + } + })) + .on_action( + cx.listener(|terminal_panel, action: &SwapPaneInDirection, cx| { + if let Some(to) = terminal_panel + .center + .find_pane_in_direction(&terminal_panel.active_pane, action.0, cx) + .cloned() + { + terminal_panel.center.swap(&terminal_panel.active_pane, &to); + cx.notify(); + } + }), + ) + .on_action(cx.listener(|terminal_panel, action: &MoveItemToPane, cx| { + let Some(&target_pane) = terminal_panel.center.panes().get(action.destination) + else { + return; + }; + move_active_item( + &terminal_panel.active_pane, + target_pane, + action.focus, + true, + cx, + ); + })) + .on_action(cx.listener( + |terminal_panel, action: &MoveItemToPaneInDirection, cx| { + let source_pane = &terminal_panel.active_pane; + if let Some(destination_pane) = terminal_panel + .center + .find_pane_in_direction(source_pane, action.direction, cx) + { + move_active_item(source_pane, destination_pane, action.focus, true, cx); + }; + }, + )) + }) + .unwrap_or_else(|| div()) } } impl FocusableView for TerminalPanel { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { - self.pane.focus_handle(cx) + self.active_pane.focus_handle(cx) } } @@ -844,11 +1326,16 @@ impl Panel for TerminalPanel { } fn is_zoomed(&self, cx: &WindowContext) -> bool { - self.pane.read(cx).is_zoomed() + self.active_pane.read(cx).is_zoomed() } fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext) { - self.pane.update(cx, |pane, cx| pane.set_zoomed(zoomed, cx)); + for pane in self.center.panes() { + pane.update(cx, |pane, cx| { + pane.set_zoomed(zoomed, cx); + }) + } + cx.notify(); } fn set_active(&mut self, active: bool, cx: &mut ViewContext) { @@ -862,13 +1349,18 @@ impl Panel for TerminalPanel { return; }; - this.add_terminal(kind, RevealStrategy::Never, cx) + this.add_terminal(kind, RevealStrategy::Always, cx) .detach_and_log_err(cx) }) } fn icon_label(&self, cx: &WindowContext) -> Option { - let count = self.pane.read(cx).items_len(); + let count = self + .center + .panes() + .into_iter() + .map(|pane| pane.read(cx).items_len()) + .sum::(); if count == 0 { None } else { @@ -881,7 +1373,9 @@ impl Panel for TerminalPanel { } fn icon(&self, cx: &WindowContext) -> Option { - if (self.enabled || !self.has_no_terminals(cx)) && TerminalSettings::get_global(cx).button { + if (self.is_enabled(cx) || !self.has_no_terminals(cx)) + && TerminalSettings::get_global(cx).button + { Some(IconName::Terminal) } else { None @@ -897,7 +1391,11 @@ impl Panel for TerminalPanel { } fn pane(&self) -> Option> { - Some(self.pane.clone()) + Some(self.active_pane.clone()) + } + + fn activation_priority(&self) -> u32 { + 1 } } @@ -919,14 +1417,6 @@ impl Render for InlineAssistTabBarButton { } } -#[derive(Serialize, Deserialize)] -struct SerializedTerminalPanel { - items: Vec, - active_item_id: Option, - width: Option, - height: Option, -} - fn retrieve_system_shell() -> Option { #[cfg(not(target_os = "windows"))] { diff --git a/crates/terminal_view/src/terminal_tab_tooltip.rs b/crates/terminal_view/src/terminal_tab_tooltip.rs new file mode 100644 index 0000000000..69150f0b9a --- /dev/null +++ b/crates/terminal_view/src/terminal_tab_tooltip.rs @@ -0,0 +1,36 @@ +use gpui::{IntoElement, Render, ViewContext}; +use ui::{prelude::*, tooltip_container, Divider}; + +pub struct TerminalTooltip { + title: SharedString, + pid: u32, +} + +impl TerminalTooltip { + pub fn new(title: impl Into, pid: u32) -> Self { + Self { + title: title.into(), + pid, + } + } +} + +impl Render for TerminalTooltip { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + tooltip_container(cx, move |this, _cx| { + this.occlude() + .on_mouse_move(|_, cx| cx.stop_propagation()) + .child( + v_flex() + .gap_1() + .child(Label::new(self.title.clone())) + .child(Divider::horizontal()) + .child( + Label::new(format!("Process ID (PID): {}", self.pid)) + .color(Color::Muted) + .size(LabelSize::Small), + ), + ) + }) + } +} diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 6a23e45f54..63958bd839 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1,6 +1,7 @@ mod persistence; pub mod terminal_element; pub mod terminal_panel; +pub mod terminal_tab_tooltip; use collections::HashSet; use editor::{actions::SelectAll, scroll::Autoscroll, Editor}; @@ -9,7 +10,7 @@ use gpui::{ anchored, deferred, div, impl_actions, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, KeyContext, KeyDownEvent, Keystroke, Model, MouseButton, MouseDownEvent, Pixels, Render, ScrollWheelEvent, Styled, Subscription, Task, View, - VisualContext, WeakView, + VisualContext, WeakModel, WeakView, }; use language::Bias; use persistence::TERMINAL_DB; @@ -26,15 +27,20 @@ use terminal::{ }; use terminal_element::{is_blank, TerminalElement}; use terminal_panel::TerminalPanel; +use terminal_tab_tooltip::TerminalTooltip; use ui::{h_flex, prelude::*, ContextMenu, Icon, IconName, Label, Tooltip}; -use util::{paths::PathWithPosition, ResultExt}; +use util::{ + paths::{PathWithPosition, SanitizedPath}, + ResultExt, +}; use workspace::{ - item::{BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams}, - notifications::NotifyResultExt, + item::{ + BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent, + }, register_serializable_item, searchable::{SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle}, - CloseActiveItem, NewCenterTerminal, NewTerminal, OpenVisible, Pane, ToolbarItemLocation, - Workspace, WorkspaceId, + CloseActiveItem, NewCenterTerminal, NewTerminal, OpenVisible, ToolbarItemLocation, Workspace, + WorkspaceId, }; use anyhow::Context; @@ -78,7 +84,7 @@ pub fn init(cx: &mut AppContext) { register_serializable_item::(cx); - cx.observe_new_views(|workspace: &mut Workspace, _| { + cx.observe_new_views(|workspace: &mut Workspace, _cx| { workspace.register_action(TerminalView::deploy); }) .detach(); @@ -98,6 +104,7 @@ pub struct BlockContext<'a, 'b> { pub struct TerminalView { terminal: Model, workspace: WeakView, + project: WeakModel, focus_handle: FocusHandle, //Currently using iTerm bell, show bell emoji in tab until input is received has_bell: bool, @@ -109,7 +116,7 @@ pub struct TerminalView { blink_epoch: usize, can_navigate_to_selected_word: bool, workspace_id: Option, - show_title: bool, + show_breadcrumbs: bool, block_below_cursor: Option>, scroll_top: Pixels, _subscriptions: Vec, @@ -134,32 +141,15 @@ impl TerminalView { cx: &mut ViewContext, ) { let working_directory = default_working_directory(workspace, cx); - - let window = cx.window_handle(); - let terminal = workspace - .project() - .update(cx, |project, cx| { - project.create_terminal(TerminalKind::Shell(working_directory), window, cx) - }) - .notify_err(workspace, cx); - - if let Some(terminal) = terminal { - let view = cx.new_view(|cx| { - TerminalView::new( - terminal, - workspace.weak_handle(), - workspace.database_id(), - cx, - ) - }); - workspace.add_item_to_active_pane(Box::new(view), None, true, cx); - } + TerminalPanel::add_center_terminal(workspace, TerminalKind::Shell(working_directory), cx) + .detach_and_log_err(cx); } pub fn new( terminal: Model, workspace: WeakView, workspace_id: Option, + project: WeakModel, cx: &mut ViewContext, ) -> Self { let workspace_handle = workspace.clone(); @@ -179,6 +169,7 @@ impl TerminalView { Self { terminal, workspace: workspace_handle, + project, has_bell: false, focus_handle, context_menu: None, @@ -189,7 +180,7 @@ impl TerminalView { blink_epoch: 0, can_navigate_to_selected_word: false, workspace_id, - show_title: TerminalSettings::get_global(cx).toolbar.title, + show_breadcrumbs: TerminalSettings::get_global(cx).toolbar.breadcrumbs, block_below_cursor: None, scroll_top: Pixels::ZERO, _subscriptions: vec![ @@ -259,7 +250,7 @@ impl TerminalView { fn settings_changed(&mut self, cx: &mut ViewContext) { let settings = TerminalSettings::get_global(cx); - self.show_title = settings.toolbar.title; + self.show_breadcrumbs = settings.toolbar.breadcrumbs; let new_cursor_shape = settings.cursor_shape.unwrap_or_default(); let old_cursor_shape = self.cursor_shape; @@ -436,7 +427,7 @@ impl TerminalView { cx.notify(); } - pub fn should_show_cursor(&self, focused: bool, cx: &mut gpui::ViewContext) -> bool { + pub fn should_show_cursor(&self, focused: bool, cx: &mut ViewContext) -> bool { //Don't blink the cursor when not focused, blinking is disabled, or paused if !focused || self.blinking_paused @@ -622,7 +613,7 @@ impl TerminalView { dispatch_context } - fn set_terminal(&mut self, terminal: Model, cx: &mut ViewContext<'_, TerminalView>) { + fn set_terminal(&mut self, terminal: Model, cx: &mut ViewContext) { self._terminal_subscriptions = subscribe_for_terminal_events(&terminal, self.workspace.clone(), cx); self.terminal = terminal; @@ -632,7 +623,7 @@ impl TerminalView { fn subscribe_for_terminal_events( terminal: &Model, workspace: WeakView, - cx: &mut ViewContext<'_, TerminalView>, + cx: &mut ViewContext, ) -> Vec { let terminal_subscription = cx.observe(terminal, |_, _, cx| cx.notify()); let terminal_events_subscription = @@ -796,10 +787,19 @@ fn possible_open_paths_metadata( cx: &mut ViewContext, ) -> Task> { cx.background_executor().spawn(async move { - let mut paths_with_metadata = Vec::with_capacity(potential_paths.len()); + let mut canonical_paths = HashSet::default(); + for path in potential_paths { + if let Ok(canonical) = fs.canonicalize(&path).await { + let sanitized = SanitizedPath::from(canonical); + canonical_paths.insert(sanitized.as_path().to_path_buf()); + } else { + canonical_paths.insert(path); + } + } - #[cfg(not(target_os = "windows"))] - let mut fetch_metadata_tasks = potential_paths + let mut paths_with_metadata = Vec::with_capacity(canonical_paths.len()); + + let mut fetch_metadata_tasks = canonical_paths .into_iter() .map(|potential_path| async { let metadata = fs.metadata(&potential_path).await.ok().flatten(); @@ -814,20 +814,6 @@ fn possible_open_paths_metadata( }) .collect::>(); - #[cfg(target_os = "windows")] - let mut fetch_metadata_tasks = potential_paths - .iter() - .map(|potential_path| async { - let metadata = fs.metadata(potential_path).await.ok().flatten(); - let path = PathBuf::from( - potential_path - .to_string_lossy() - .trim_start_matches("\\\\?\\"), - ); - (PathWithPosition { path, row, column }, metadata) - }) - .collect::>(); - while let Some((path, metadata)) = fetch_metadata_tasks.next().await { if let Some(metadata) = metadata { paths_with_metadata.push((path, metadata)); @@ -850,19 +836,19 @@ fn possible_open_targets( let column = path_position.column; let maybe_path = path_position.path; - let abs_path = if maybe_path.is_absolute() { - Some(maybe_path) + let potential_paths = if maybe_path.is_absolute() { + HashSet::from_iter([maybe_path]) } else if maybe_path.starts_with("~") { maybe_path .strip_prefix("~") .ok() .and_then(|maybe_path| Some(dirs::home_dir()?.join(maybe_path))) + .map_or_else(HashSet::default, |p| HashSet::from_iter([p])) } else { let mut potential_cwd_and_workspace_paths = HashSet::default(); if let Some(cwd) = cwd { let abs_path = Path::join(cwd, &maybe_path); - let canonicalized_path = abs_path.canonicalize().unwrap_or(abs_path); - potential_cwd_and_workspace_paths.insert(canonicalized_path); + potential_cwd_and_workspace_paths.insert(abs_path); } if let Some(workspace) = workspace.upgrade() { workspace.update(cx, |workspace, cx| { @@ -887,25 +873,10 @@ fn possible_open_targets( } }); } - - return possible_open_paths_metadata( - fs, - row, - column, - potential_cwd_and_workspace_paths, - cx, - ); + potential_cwd_and_workspace_paths }; - let canonicalized_paths = match abs_path { - Some(abs_path) => match abs_path.canonicalize() { - Ok(path) => HashSet::from_iter([path]), - Err(_) => HashSet::default(), - }, - None => HashSet::default(), - }; - - possible_open_paths_metadata(fs, row, column, canonicalized_paths, cx) + possible_open_paths_metadata(fs, row, column, potential_paths, cx) } fn regex_to_literal(regex: &str) -> String { @@ -1018,7 +989,7 @@ impl Render for TerminalView { deferred( anchored() .position(*position) - .anchor(gpui::AnchorCorner::TopLeft) + .anchor(gpui::Corner::TopLeft) .child(menu.clone()), ) .with_priority(1) @@ -1029,8 +1000,17 @@ impl Render for TerminalView { impl Item for TerminalView { type Event = ItemEvent; - fn tab_tooltip_text(&self, cx: &AppContext) -> Option { - Some(self.terminal().read(cx).title(false).into()) + fn tab_tooltip_content(&self, cx: &AppContext) -> Option { + let terminal = self.terminal().read(cx); + let title = terminal.title(false); + let pid = terminal.pty_info.pid_getter().fallback_pid(); + + Some(TabTooltipContent::Custom(Box::new( + move |cx: &mut WindowContext| { + cx.new_view(|_| TerminalTooltip::new(title.clone(), pid)) + .into() + }, + ))) } fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { @@ -1044,8 +1024,8 @@ impl Item for TerminalView { .shape(ui::IconButtonShape::Square) .tooltip(|cx| Tooltip::text("Rerun task", cx)) .on_click(move |_, cx| { - cx.dispatch_action(Box::new(tasks_ui::Rerun { - task_id: Some(task_id.clone()), + cx.dispatch_action(Box::new(zed_actions::Rerun { + task_id: Some(task_id.0.clone()), allow_concurrent_runs: Some(true), use_new_terminal: Some(false), reevaluate_context: false, @@ -1109,21 +1089,37 @@ impl Item for TerminalView { fn clone_on_split( &self, - _workspace_id: Option, - _cx: &mut ViewContext, + workspace_id: Option, + cx: &mut ViewContext, ) -> Option> { - //From what I can tell, there's no way to tell the current working - //Directory of the terminal from outside the shell. There might be - //solutions to this, but they are non-trivial and require more IPC + let window = cx.window_handle(); + let terminal = self + .project + .update(cx, |project, cx| { + let terminal = self.terminal().read(cx); + let working_directory = terminal + .working_directory() + .or_else(|| Some(project.active_project_directory(cx)?.to_path_buf())); + let python_venv_directory = terminal.python_venv_directory.clone(); + project.create_terminal_with_venv( + TerminalKind::Shell(working_directory), + python_venv_directory, + window, + cx, + ) + }) + .ok()? + .log_err()?; - // Some(TerminalContainer::new( - // Err(anyhow::anyhow!("failed to instantiate terminal")), - // workspace_id, - // cx, - // )) - - // TODO - None + Some(cx.new_view(|cx| { + TerminalView::new( + terminal, + self.workspace.clone(), + workspace_id, + self.project.clone(), + cx, + ) + })) } fn is_dirty(&self, cx: &gpui::AppContext) -> bool { @@ -1145,8 +1141,8 @@ impl Item for TerminalView { Some(Box::new(handle.clone())) } - fn breadcrumb_location(&self) -> ToolbarItemLocation { - if self.show_title { + fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation { + if self.show_breadcrumbs && !self.terminal().read(cx).breadcrumb_text.trim().is_empty() { ToolbarItemLocation::PrimaryLeft } else { ToolbarItemLocation::Hidden @@ -1222,10 +1218,10 @@ impl SerializableItem for TerminalView { workspace: WeakView, workspace_id: workspace::WorkspaceId, item_id: workspace::ItemId, - cx: &mut ViewContext, + cx: &mut WindowContext, ) -> Task>> { let window = cx.window_handle(); - cx.spawn(|pane, mut cx| async move { + cx.spawn(|mut cx| async move { let cwd = cx .update(|cx| { let from_db = TERMINAL_DB @@ -1246,11 +1242,21 @@ impl SerializableItem for TerminalView { .ok() .flatten(); - let terminal = project.update(&mut cx, |project, cx| { - project.create_terminal(TerminalKind::Shell(cwd), window, cx) - })??; - pane.update(&mut cx, |_, cx| { - cx.new_view(|cx| TerminalView::new(terminal, workspace, Some(workspace_id), cx)) + let terminal = project + .update(&mut cx, |project, cx| { + project.create_terminal(TerminalKind::Shell(cwd), window, cx) + })? + .await?; + cx.update(|cx| { + cx.new_view(|cx| { + TerminalView::new( + terminal, + workspace, + Some(workspace_id), + project.downgrade(), + cx, + ) + }) }) }) } @@ -1377,11 +1383,14 @@ impl SearchableItem for TerminalView { ///Gets the working directory for the given workspace, respecting the user's settings. /// None implies "~" on whichever machine we end up on. -pub fn default_working_directory(workspace: &Workspace, cx: &AppContext) -> Option { +pub(crate) fn default_working_directory(workspace: &Workspace, cx: &AppContext) -> Option { match &TerminalSettings::get_global(cx).working_directory { - WorkingDirectory::CurrentProjectDirectory => { - workspace.project().read(cx).active_project_directory(cx) - } + WorkingDirectory::CurrentProjectDirectory => workspace + .project() + .read(cx) + .active_project_directory(cx) + .as_deref() + .map(Path::to_path_buf), WorkingDirectory::FirstProjectDirectory => first_project_directory(workspace, cx), WorkingDirectory::AlwaysHome => None, WorkingDirectory::Always { directory } => { diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index 94c373d630..fffece26b2 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -84,6 +84,31 @@ impl Selection { } self.goal = new_goal; } + + pub fn set_tail(&mut self, tail: T, new_goal: SelectionGoal) { + if tail.cmp(&self.head()) <= Ordering::Equal { + if self.reversed { + self.end = self.start; + self.reversed = false; + } + self.start = tail; + } else { + if !self.reversed { + self.start = self.end; + self.reversed = true; + } + self.end = tail; + } + self.goal = new_goal; + } + + pub fn swap_head_tail(&mut self) { + if self.reversed { + self.reversed = false; + } else { + std::mem::swap(&mut self.start, &mut self.end); + } + } } impl Selection { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b037327f7e..d869791864 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -3047,6 +3047,12 @@ pub trait FromAnchor { fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self; } +impl FromAnchor for Anchor { + fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self { + *anchor + } +} + impl FromAnchor for Point { fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { snapshot.summary_for_anchor(anchor) diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index c3e3a197cb..3f2930c9ff 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -28,7 +28,7 @@ log.workspace = true palette = { workspace = true, default-features = false, features = ["std"] } parking_lot.workspace = true refineable.workspace = true -schemars = { workspace = true, features = ["indexmap"] } +schemars = { workspace = true, features = ["indexmap2"] } serde.workspace = true serde_derive.workspace = true serde_json.workspace = true diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 05dd6cd1e7..b9780a304a 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -58,7 +58,7 @@ impl ThemeColors { tab_active_background: neutral().light().step_1(), search_match_background: neutral().light().step_5(), panel_background: neutral().light().step_2(), - panel_focused_border: blue().light().step_5(), + panel_focused_border: blue().light().step_10(), panel_indent_guide: neutral().light_alpha().step_5(), panel_indent_guide_hover: neutral().light_alpha().step_6(), panel_indent_guide_active: neutral().light_alpha().step_6(), @@ -164,7 +164,7 @@ impl ThemeColors { tab_active_background: neutral().dark().step_1(), search_match_background: neutral().dark().step_5(), panel_background: neutral().dark().step_2(), - panel_focused_border: blue().dark().step_5(), + panel_focused_border: blue().dark().step_12(), panel_indent_guide: neutral().dark_alpha().step_4(), panel_indent_guide_hover: neutral().dark_alpha().step_6(), panel_indent_guide_active: neutral().dark_alpha().step_6(), diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 440ac332c4..3338665366 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -270,7 +270,7 @@ pub struct ThemeColorsContent { /// Fill Color. Used for the muted or deemphasized fill color of an icon. /// - /// This might be used to show an icon in an inactive pane, or to demphasize a series of icons to give them less visual weight. + /// This might be used to show an icon in an inactive pane, or to deemphasize a series of icons to give them less visual weight. #[serde(rename = "icon.muted")] pub icon_muted: Option, diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 99c1656215..62ab46610a 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -97,7 +97,7 @@ pub struct ThemeColors { pub icon: Hsla, /// Fill Color. Used for the muted or deemphasized fill color of an icon. /// - /// This might be used to show an icon in an inactive pane, or to demphasize a series of icons to give them less visual weight. + /// This might be used to show an icon in an inactive pane, or to deemphasize a series of icons to give them less visual weight. pub icon_muted: Hsla, /// Fill Color. Used for the disabled fill color of an icon. /// diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index e0d4fb4244..2a4802b4eb 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -71,7 +71,7 @@ impl From for Appearance { } } -/// Which themes should be loaded. This is used primarlily for testing. +/// Which themes should be loaded. This is used primarily for testing. pub enum LoadThemes { /// Only load the base theme. /// @@ -315,6 +315,22 @@ impl Theme { pub fn window_background_appearance(&self) -> WindowBackgroundAppearance { self.styles.window_background_appearance } + + /// Darkens the color by reducing its lightness. + /// The resulting lightness is clamped to ensure it doesn't go below 0.0. + /// + /// The first value darkens light appearance mode, the second darkens appearance dark mode. + /// + /// Note: This is a tentative solution and may be replaced with a more robust color system. + pub fn darken(&self, color: Hsla, light_amount: f32, dark_amount: f32) -> Hsla { + let amount = match self.appearance { + Appearance::Light => light_amount, + Appearance::Dark => dark_amount, + }; + let mut hsla = color; + hsla.l = (hsla.l - amount).max(0.0); + hsla + } } /// Compounds a color with an alpha value. diff --git a/crates/theme_extension/Cargo.toml b/crates/theme_extension/Cargo.toml new file mode 100644 index 0000000000..1e12f037b9 --- /dev/null +++ b/crates/theme_extension/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "theme_extension" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/theme_extension.rs" + +[dependencies] +anyhow.workspace = true +extension.workspace = true +fs.workspace = true +gpui.workspace = true +theme.workspace = true diff --git a/crates/theme_extension/LICENSE-GPL b/crates/theme_extension/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/theme_extension/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/theme_extension/src/theme_extension.rs b/crates/theme_extension/src/theme_extension.rs new file mode 100644 index 0000000000..0266db324b --- /dev/null +++ b/crates/theme_extension/src/theme_extension.rs @@ -0,0 +1,47 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::Result; +use extension::{ExtensionHostProxy, ExtensionThemeProxy}; +use fs::Fs; +use gpui::{AppContext, BackgroundExecutor, SharedString, Task}; +use theme::{ThemeRegistry, ThemeSettings}; + +pub fn init( + extension_host_proxy: Arc, + theme_registry: Arc, + executor: BackgroundExecutor, +) { + extension_host_proxy.register_theme_proxy(ThemeRegistryProxy { + theme_registry, + executor, + }); +} + +struct ThemeRegistryProxy { + theme_registry: Arc, + executor: BackgroundExecutor, +} + +impl ExtensionThemeProxy for ThemeRegistryProxy { + fn list_theme_names(&self, theme_path: PathBuf, fs: Arc) -> Task>> { + self.executor.spawn(async move { + let themes = theme::read_user_theme(&theme_path, fs).await?; + Ok(themes.themes.into_iter().map(|theme| theme.name).collect()) + }) + } + + fn remove_user_themes(&self, themes: Vec) { + self.theme_registry.remove_user_themes(&themes); + } + + fn load_user_theme(&self, theme_path: PathBuf, fs: Arc) -> Task> { + let theme_registry = self.theme_registry.clone(); + self.executor + .spawn(async move { theme_registry.load_user_theme(&theme_path, fs).await }) + } + + fn reload_current_theme(&self, cx: &mut AppContext) { + ThemeSettings::reload_current_theme(cx) + } +} diff --git a/crates/theme_importer/Cargo.toml b/crates/theme_importer/Cargo.toml index 9140ffceb7..2b33791559 100644 --- a/crates/theme_importer/Cargo.toml +++ b/crates/theme_importer/Cargo.toml @@ -16,7 +16,7 @@ indexmap.workspace = true log.workspace = true palette.workspace = true rust-embed.workspace = true -schemars = { workspace = true, features = ["indexmap"] } +schemars = { workspace = true, features = ["indexmap2"] } serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true diff --git a/crates/theme_importer/src/main.rs b/crates/theme_importer/src/main.rs index d92966ae24..db287956c5 100644 --- a/crates/theme_importer/src/main.rs +++ b/crates/theme_importer/src/main.rs @@ -19,6 +19,8 @@ use theme::{Appearance, AppearanceContent, ThemeFamilyContent}; use crate::vscode::VsCodeTheme; use crate::vscode::VsCodeThemeConverter; +const ZED_THEME_SCHEMA_URL: &str = "https://zed.dev/public/schema/themes/v0.2.0.json"; + #[derive(Debug, Deserialize)] struct FamilyMetadata { pub name: String, @@ -69,34 +71,53 @@ pub struct ThemeMetadata { #[derive(Parser)] #[command(author, version, about, long_about = None)] struct Args { - /// The path to the theme to import. - theme_path: PathBuf, - - /// Whether to warn when values are missing from the theme. - #[arg(long)] - warn_on_missing: bool, - - /// The path to write the output to. - #[arg(long, short)] - output: Option, - #[command(subcommand)] - command: Option, + command: Command, } -#[derive(Subcommand)] +#[derive(PartialEq, Subcommand)] enum Command { /// Prints the JSON schema for a theme. PrintSchema, + /// Converts a VSCode theme to Zed format [default] + Convert { + /// The path to the theme to import. + theme_path: PathBuf, + + /// Whether to warn when values are missing from the theme. + #[arg(long)] + warn_on_missing: bool, + + /// The path to write the output to. + #[arg(long, short)] + output: Option, + }, } fn main() -> Result<()> { let args = Args::parse(); + match args.command { + Command::PrintSchema => { + let theme_family_schema = schema_for!(ThemeFamilyContent); + println!( + "{}", + serde_json::to_string_pretty(&theme_family_schema).unwrap() + ); + Ok(()) + } + Command::Convert { + theme_path, + warn_on_missing, + output, + } => convert(theme_path, output, warn_on_missing), + } +} + +fn convert(theme_file_path: PathBuf, output: Option, warn_on_missing: bool) -> Result<()> { let log_config = { let mut config = simplelog::ConfigBuilder::new(); - - if !args.warn_on_missing { + if !warn_on_missing { config.add_filter_ignore_str("theme_printer"); } @@ -111,28 +132,11 @@ fn main() -> Result<()> { ) .expect("could not initialize logger"); - if let Some(command) = args.command { - match command { - Command::PrintSchema => { - let theme_family_schema = schema_for!(ThemeFamilyContent); - - println!( - "{}", - serde_json::to_string_pretty(&theme_family_schema).unwrap() - ); - - return Ok(()); - } - } - } - - let theme_file_path = args.theme_path; - let theme_file = match File::open(&theme_file_path) { Ok(file) => file, Err(err) => { log::info!("Failed to open file at path: {:?}", theme_file_path); - return Err(err)?; + return Err(err.into()); } }; @@ -148,10 +152,14 @@ fn main() -> Result<()> { let converter = VsCodeThemeConverter::new(vscode_theme, theme_metadata, IndexMap::new()); let theme = converter.convert()?; - + let mut theme = serde_json::to_value(theme).unwrap(); + theme.as_object_mut().unwrap().insert( + "$schema".to_string(), + serde_json::Value::String(ZED_THEME_SCHEMA_URL.to_string()), + ); let theme_json = serde_json::to_string_pretty(&theme).unwrap(); - if let Some(output) = args.output { + if let Some(output) = output { let mut file = File::create(output)?; file.write_all(theme_json.as_bytes())?; } else { diff --git a/crates/theme_importer/src/vscode/converter.rs b/crates/theme_importer/src/vscode/converter.rs index cca4b56321..a1a6c7a27c 100644 --- a/crates/theme_importer/src/vscode/converter.rs +++ b/crates/theme_importer/src/vscode/converter.rs @@ -159,7 +159,9 @@ impl VsCodeThemeConverter { .active_background .clone() .or(vscode_tab_inactive_background.clone()), + search_match_background: vscode_colors.editor.find_match_background.clone(), panel_background: vscode_colors.panel.background.clone(), + pane_group_border: vscode_colors.editor_group.border.clone(), scrollbar_thumb_background: vscode_scrollbar_slider_background.clone(), scrollbar_thumb_hover_background: vscode_colors .scrollbar_slider @@ -168,7 +170,6 @@ impl VsCodeThemeConverter { scrollbar_thumb_border: vscode_scrollbar_slider_background.clone(), scrollbar_track_background: vscode_editor_background.clone(), scrollbar_track_border: vscode_colors.editor_overview_ruler.border.clone(), - pane_group_border: vscode_colors.editor_group.border.clone(), editor_foreground: vscode_editor_foreground .clone() .or(vscode_token_colors_foreground.clone()), @@ -179,6 +180,10 @@ impl VsCodeThemeConverter { editor_active_line_number: vscode_colors.editor.foreground.clone(), editor_wrap_guide: vscode_panel_border.clone(), editor_active_wrap_guide: vscode_panel_border.clone(), + editor_document_highlight_bracket_background: vscode_colors + .editor_bracket_match + .background + .clone(), terminal_background: vscode_colors.terminal.background.clone(), terminal_ansi_black: vscode_colors.terminal.ansi_black.clone(), terminal_ansi_bright_black: vscode_colors.terminal.ansi_bright_black.clone(), diff --git a/crates/theme_selector/Cargo.toml b/crates/theme_selector/Cargo.toml index ec7e9aa877..4125ed0f34 100644 --- a/crates/theme_selector/Cargo.toml +++ b/crates/theme_selector/Cargo.toml @@ -13,7 +13,6 @@ path = "src/theme_selector.rs" doctest = false [dependencies] -client.workspace = true fs.workspace = true fuzzy.workspace = true gpui.workspace = true @@ -21,9 +20,11 @@ log.workspace = true picker.workspace = true serde.workspace = true settings.workspace = true +telemetry.workspace = true theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index d0763c2793..3502afaa74 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -1,26 +1,18 @@ -use client::telemetry::Telemetry; use fs::Fs; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, impl_actions, AppContext, DismissEvent, EventEmitter, FocusableView, Render, - UpdateGlobal, View, ViewContext, VisualContext, WeakView, + actions, AppContext, DismissEvent, EventEmitter, FocusableView, Render, UpdateGlobal, View, + ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; -use serde::Deserialize; use settings::{update_settings_file, SettingsStore}; use std::sync::Arc; use theme::{Appearance, Theme, ThemeMeta, ThemeRegistry, ThemeSettings}; use ui::{prelude::*, v_flex, ListItem, ListItemSpacing}; use util::ResultExt; use workspace::{ui::HighlightedLabel, ModalView, Workspace}; +use zed_actions::theme_selector::Toggle; -#[derive(PartialEq, Clone, Default, Debug, Deserialize)] -pub struct Toggle { - /// A list of theme names to filter the theme selector down to. - pub themes_filter: Option>, -} - -impl_actions!(theme_selector, [Toggle]); actions!(theme_selector, [Reload]); pub fn init(cx: &mut AppContext) { @@ -34,12 +26,10 @@ pub fn init(cx: &mut AppContext) { pub fn toggle(workspace: &mut Workspace, toggle: &Toggle, cx: &mut ViewContext) { let fs = workspace.app_state().fs.clone(); - let telemetry = workspace.client().telemetry().clone(); workspace.toggle_modal(cx, |cx| { let delegate = ThemeSelectorDelegate::new( cx.view().downgrade(), fs, - telemetry, toggle.themes_filter.as_ref(), cx, ); @@ -81,7 +71,6 @@ pub struct ThemeSelectorDelegate { original_theme: Arc, selection_completed: bool, selected_index: usize, - telemetry: Arc, view: WeakView, } @@ -89,7 +78,6 @@ impl ThemeSelectorDelegate { fn new( weak_view: WeakView, fs: Arc, - telemetry: Arc, themes_filter: Option<&Vec>, cx: &mut ViewContext, ) -> Self { @@ -130,7 +118,6 @@ impl ThemeSelectorDelegate { original_theme: original_theme.clone(), selected_index: 0, selection_completed: false, - telemetry, view: weak_view, }; @@ -187,8 +174,7 @@ impl PickerDelegate for ThemeSelectorDelegate { let theme_name = cx.theme().name.clone(); - self.telemetry - .report_setting_event("theme", theme_name.to_string()); + telemetry::event!("Settings Changed", setting = "theme", value = theme_name); let appearance = Appearance::from(cx.appearance()); @@ -237,11 +223,7 @@ impl PickerDelegate for ThemeSelectorDelegate { .themes .iter() .enumerate() - .map(|(id, meta)| StringMatchCandidate { - id, - char_bag: meta.name.as_ref().into(), - string: meta.name.to_string(), - }) + .map(|(id, meta)| StringMatchCandidate::new(id, &meta.name)) .collect::>(); cx.spawn(|this, mut cx| async move { @@ -292,7 +274,7 @@ impl PickerDelegate for ThemeSelectorDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child(HighlightedLabel::new( theme_match.string.clone(), theme_match.positions.clone(), diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index df991613ae..df6e0afd17 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -19,7 +19,6 @@ test-support = [ "call/test-support", "client/test-support", "collections/test-support", - "editor/test-support", "gpui/test-support", "http_client/test-support", "project/test-support", @@ -31,24 +30,20 @@ test-support = [ auto_update.workspace = true call.workspace = true client.workspace = true -command_palette.workspace = true -extensions_ui.workspace = true -feedback.workspace = true feature_flags.workspace = true gpui.workspace = true notifications.workspace = true project.workspace = true -recent_projects.workspace = true remote.workspace = true rpc.workspace = true serde.workspace = true +settings.workspace = true smallvec.workspace = true story = { workspace = true, optional = true } theme.workspace = true -theme_selector.workspace = true ui.workspace = true util.workspace = true -vcs_menu.workspace = true +telemetry.workspace = true workspace.workspace = true zed_actions.workspace = true @@ -59,7 +54,6 @@ windows.workspace = true call = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } -editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } notifications = { workspace = true, features = ["test-support"] } diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index 13ee10c141..1c3e67c095 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -1,139 +1,270 @@ -use ui::{prelude::*, ContextMenu, NumericStepper, PopoverMenu, PopoverMenuHandle, Tooltip}; +use gpui::{impl_actions, OwnedMenu, OwnedMenuItem, View}; +use serde::Deserialize; +use smallvec::SmallVec; +use ui::{prelude::*, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip}; + +impl_actions!( + app_menu, + [OpenApplicationMenu, NavigateApplicationMenuInDirection,] +); + +#[derive(Clone, Deserialize, PartialEq, Default)] +pub struct OpenApplicationMenu(String); + +#[derive(Clone, Deserialize, PartialEq, Default)] +pub struct NavigateApplicationMenuInDirection(String); + +#[derive(Clone)] +struct MenuEntry { + menu: OwnedMenu, + handle: PopoverMenuHandle, +} pub struct ApplicationMenu { - context_menu_handle: PopoverMenuHandle, + entries: SmallVec<[MenuEntry; 8]>, + pending_menu_open: Option, } impl ApplicationMenu { - pub fn new(_: &mut ViewContext) -> Self { + pub fn new(cx: &mut ViewContext) -> Self { + let menus = cx.get_menus().unwrap_or_default(); Self { - context_menu_handle: PopoverMenuHandle::default(), + entries: menus + .into_iter() + .map(|menu| MenuEntry { + menu, + handle: PopoverMenuHandle::default(), + }) + .collect(), + pending_menu_open: None, } } + + fn sanitize_menu_items(items: Vec) -> Vec { + let mut cleaned = Vec::new(); + let mut last_was_separator = false; + + for item in items { + match item { + OwnedMenuItem::Separator => { + if !last_was_separator { + cleaned.push(item); + last_was_separator = true; + } + } + OwnedMenuItem::Submenu(submenu) => { + // Skip empty submenus + if !submenu.items.is_empty() { + cleaned.push(OwnedMenuItem::Submenu(submenu)); + last_was_separator = false; + } + } + item => { + cleaned.push(item); + last_was_separator = false; + } + } + } + + // Remove trailing separator + if let Some(OwnedMenuItem::Separator) = cleaned.last() { + cleaned.pop(); + } + + cleaned + } + + fn build_menu_from_items(entry: MenuEntry, cx: &mut WindowContext) -> View { + ContextMenu::build(cx, |menu, cx| { + // Grab current focus handle so menu can shown items in context with the focused element + let menu = menu.when_some(cx.focused(), |menu, focused| menu.context(focused)); + let sanitized_items = Self::sanitize_menu_items(entry.menu.items); + + sanitized_items + .into_iter() + .fold(menu, |menu, item| match item { + OwnedMenuItem::Separator => menu.separator(), + OwnedMenuItem::Action { name, action, .. } => menu.action(name, action), + OwnedMenuItem::Submenu(submenu) => { + submenu + .items + .into_iter() + .fold(menu, |menu, item| match item { + OwnedMenuItem::Separator => menu.separator(), + OwnedMenuItem::Action { name, action, .. } => { + menu.action(name, action) + } + OwnedMenuItem::Submenu(_) => menu, + }) + } + }) + }) + } + + fn render_application_menu(&self, entry: &MenuEntry) -> impl IntoElement { + let handle = entry.handle.clone(); + + let menu_name = entry.menu.name.clone(); + let entry = entry.clone(); + + // Application menu must have same ids as first menu item in standard menu + div() + .id(SharedString::from(format!("{}-menu-item", menu_name))) + .occlude() + .child( + PopoverMenu::new(SharedString::from(format!("{}-menu-popover", menu_name))) + .menu(move |cx| Self::build_menu_from_items(entry.clone(), cx).into()) + .trigger( + IconButton::new( + SharedString::from(format!("{}-menu-trigger", menu_name)), + ui::IconName::Menu, + ) + .style(ButtonStyle::Subtle) + .icon_size(IconSize::Small) + .when(!handle.is_deployed(), |this| { + this.tooltip(|cx| Tooltip::text("Open Application Menu", cx)) + }), + ) + .with_handle(handle), + ) + } + + fn render_standard_menu(&self, entry: &MenuEntry) -> impl IntoElement { + let current_handle = entry.handle.clone(); + + let menu_name = entry.menu.name.clone(); + let entry = entry.clone(); + + let all_handles: Vec<_> = self + .entries + .iter() + .map(|entry| entry.handle.clone()) + .collect(); + + div() + .id(SharedString::from(format!("{}-menu-item", menu_name))) + .occlude() + .child( + PopoverMenu::new(SharedString::from(format!("{}-menu-popover", menu_name))) + .menu(move |cx| Self::build_menu_from_items(entry.clone(), cx).into()) + .trigger( + Button::new( + SharedString::from(format!("{}-menu-trigger", menu_name)), + menu_name.clone(), + ) + .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small), + ) + .with_handle(current_handle.clone()), + ) + .on_hover(move |hover_enter, cx| { + if *hover_enter && !current_handle.is_deployed() { + all_handles.iter().for_each(|h| h.hide(cx)); + + // We need to defer this so that this menu handle can take focus from the previous menu + let handle = current_handle.clone(); + cx.defer(move |cx| handle.show(cx)); + } + }) + } + + #[cfg(not(target_os = "macos"))] + pub fn open_menu(&mut self, action: &OpenApplicationMenu, _cx: &mut ViewContext) { + self.pending_menu_open = Some(action.0.clone()); + } + + #[cfg(not(target_os = "macos"))] + pub fn navigate_menus_in_direction( + &mut self, + action: &NavigateApplicationMenuInDirection, + cx: &mut ViewContext, + ) { + let current_index = self + .entries + .iter() + .position(|entry| entry.handle.is_deployed()); + let Some(current_index) = current_index else { + return; + }; + + let next_index = match action.0.as_str() { + "Left" => { + if current_index == 0 { + self.entries.len() - 1 + } else { + current_index - 1 + } + } + "Right" => { + if current_index == self.entries.len() - 1 { + 0 + } else { + current_index + 1 + } + } + _ => return, + }; + + self.entries[current_index].handle.hide(cx); + + // We need to defer this so that this menu handle can take focus from the previous menu + let next_handle = self.entries[next_index].handle.clone(); + cx.defer(move |_, cx| next_handle.show(cx)); + } + + pub fn all_menus_shown(&self) -> bool { + self.entries.iter().any(|entry| entry.handle.is_deployed()) + || self.pending_menu_open.is_some() + } } impl Render for ApplicationMenu { - fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - PopoverMenu::new("application-menu") - .menu(move |cx| { - ContextMenu::build(cx, move |menu, cx| { - menu.header("Workspace") - .action("Open Command Palette", Box::new(command_palette::Toggle)) - .when_some(cx.focused(), |menu, focused| menu.context(focused)) - .custom_row(move |cx| { - h_flex() - .gap_2() - .w_full() - .justify_between() - .cursor(gpui::CursorStyle::Arrow) - .child(Label::new("Buffer Font Size")) - .child( - NumericStepper::new( - "buffer-font-size", - theme::get_buffer_font_size(cx).to_string(), - |_, cx| { - cx.dispatch_action(Box::new( - zed_actions::DecreaseBufferFontSize, - )) - }, - |_, cx| { - cx.dispatch_action(Box::new( - zed_actions::IncreaseBufferFontSize, - )) - }, - ) - .reserve_space_for_reset(true) - .when( - theme::has_adjusted_buffer_font_size(cx), - |stepper| { - stepper.on_reset(|_, cx| { - cx.dispatch_action(Box::new( - zed_actions::ResetBufferFontSize, - )) - }) - }, - ), - ) - .into_any_element() - }) - .custom_row(move |cx| { - h_flex() - .gap_2() - .w_full() - .justify_between() - .cursor(gpui::CursorStyle::Arrow) - .child(Label::new("UI Font Size")) - .child( - NumericStepper::new( - "ui-font-size", - theme::get_ui_font_size(cx).to_string(), - |_, cx| { - cx.dispatch_action(Box::new( - zed_actions::DecreaseUiFontSize, - )) - }, - |_, cx| { - cx.dispatch_action(Box::new( - zed_actions::IncreaseUiFontSize, - )) - }, - ) - .reserve_space_for_reset(true) - .when( - theme::has_adjusted_ui_font_size(cx), - |stepper| { - stepper.on_reset(|_, cx| { - cx.dispatch_action(Box::new( - zed_actions::ResetUiFontSize, - )) - }) - }, - ), - ) - .into_any_element() - }) - .header("Project") - .action( - "Add Folder to Project...", - Box::new(workspace::AddFolderToProject), - ) - .action("Open a new Project...", Box::new(workspace::Open)) - .action( - "Open Recent Projects...", - Box::new(recent_projects::OpenRecent { - create_new_window: false, - }), - ) - .header("Help") - .action("About Zed", Box::new(zed_actions::About)) - .action("Welcome", Box::new(workspace::Welcome)) - .link( - "Documentation", - Box::new(zed_actions::OpenBrowser { - url: "https://zed.dev/docs".into(), - }), - ) - .action("Give Feedback", Box::new(feedback::GiveFeedback)) - .action("Check for Updates", Box::new(auto_update::Check)) - .action("View Telemetry", Box::new(zed_actions::OpenTelemetryLog)) - .action( - "View Dependency Licenses", - Box::new(zed_actions::OpenLicenses), - ) - .separator() - .action("Quit", Box::new(zed_actions::Quit)) - }) - .into() + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let all_menus_shown = self.all_menus_shown(); + + if let Some(pending_menu_open) = self.pending_menu_open.take() { + if let Some(entry) = self + .entries + .iter() + .find(|entry| entry.menu.name == pending_menu_open && !entry.handle.is_deployed()) + { + let handle_to_show = entry.handle.clone(); + let handles_to_hide: Vec<_> = self + .entries + .iter() + .filter(|e| e.menu.name != pending_menu_open && e.handle.is_deployed()) + .map(|e| e.handle.clone()) + .collect(); + + if handles_to_hide.is_empty() { + // We need to wait for the next frame to show all menus first, + // before we can handle show/hide operations + cx.window_context().on_next_frame(move |cx| { + handles_to_hide.iter().for_each(|handle| handle.hide(cx)); + cx.defer(move |cx| handle_to_show.show(cx)); + }); + } else { + // Since menus are already shown, we can directly handle show/hide operations + handles_to_hide.iter().for_each(|handle| handle.hide(cx)); + cx.defer(move |_, cx| handle_to_show.show(cx)); + } + } + } + + div() + .key_context("ApplicationMenu") + .flex() + .flex_row() + .gap_x_1() + .when(!all_menus_shown && !self.entries.is_empty(), |this| { + this.child(self.render_application_menu(&self.entries[0])) + }) + .when(all_menus_shown, |this| { + this.children( + self.entries + .iter() + .map(|entry| self.render_standard_menu(entry)), + ) }) - .trigger( - IconButton::new("application-menu", ui::IconName::Menu) - .style(ButtonStyle::Subtle) - .icon_size(IconSize::Small) - .when(!self.context_menu_handle.is_deployed(), |this| { - this.tooltip(|cx| Tooltip::text("Open Application Menu", cx)) - }), - ) - .with_handle(self.context_menu_handle.clone()) - .into_any_element() } } diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 805c0e7202..8639e48c5e 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use call::{report_call_event_for_room, ActiveCall, ParticipantLocation, Room}; +use call::{ActiveCall, ParticipantLocation, Room}; use client::{proto::PeerId, User}; use gpui::{actions, AppContext, Task, WindowContext}; use gpui::{canvas, point, AnyElement, Hsla, IntoElement, MouseButton, Path, Styled}; @@ -19,22 +19,19 @@ actions!( fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut WindowContext) { let call = ActiveCall::global(cx).read(cx); if let Some(room) = call.room().cloned() { - let client = call.client(); let toggle_screen_sharing = room.update(cx, |room, cx| { if room.is_screen_sharing() { - report_call_event_for_room( - "disable screen share", - room.id(), - room.channel_id(), - &client, + telemetry::event!( + "Screen Share Disabled", + room_id = room.id(), + channel_id = room.channel_id(), ); Task::ready(room.unshare_screen(cx)) } else { - report_call_event_for_room( - "enable screen share", - room.id(), - room.channel_id(), - &client, + telemetry::event!( + "Screen Share Enabled", + room_id = room.id(), + channel_id = room.channel_id(), ); room.share_screen(cx) } @@ -46,14 +43,17 @@ fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut WindowContext) { fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) { let call = ActiveCall::global(cx).read(cx); if let Some(room) = call.room().cloned() { - let client = call.client(); room.update(cx, |room, cx| { let operation = if room.is_muted() { - "enable microphone" + "Microphone Enabled" } else { - "disable microphone" + "Microphone Disabled" }; - report_call_event_for_room(operation, room.id(), room.channel_id(), &client); + telemetry::event!( + operation, + room_id = room.id(), + channel_id = room.channel_id(), + ); room.toggle_mute(cx) }); @@ -73,17 +73,17 @@ fn render_color_ribbon(color: Hsla) -> impl Element { let height = bounds.size.height; let horizontal_offset = height; let vertical_offset = px(height.0 / 2.0); - let mut path = Path::new(bounds.lower_left()); + let mut path = Path::new(bounds.bottom_left()); path.curve_to( bounds.origin + point(horizontal_offset, vertical_offset), bounds.origin + point(px(0.0), vertical_offset), ); - path.line_to(bounds.upper_right() + point(-horizontal_offset, vertical_offset)); + path.line_to(bounds.top_right() + point(-horizontal_offset, vertical_offset)); path.curve_to( - bounds.lower_right(), - bounds.upper_right() + point(px(0.0), vertical_offset), + bounds.bottom_right(), + bounds.top_right() + point(px(0.0), vertical_offset), ); - path.line_to(bounds.lower_left()); + path.line_to(bounds.bottom_left()); cx.paint_path(path, color); }, ) @@ -284,9 +284,7 @@ impl TitleBar { let is_connecting_to_project = self .workspace - .update(cx, |workspace, cx| { - recent_projects::is_connecting_over_ssh(workspace, cx) - }) + .update(cx, |workspace, cx| workspace.has_active_modal(cx)) .unwrap_or(false); let room = room.read(cx); @@ -294,11 +292,12 @@ impl TitleBar { let is_local = project.is_local() || project.is_via_ssh(); let is_shared = is_local && project.is_shared(); let is_muted = room.is_muted(); + let muted_by_user = room.muted_by_user(); let is_deafened = room.is_deafened().unwrap_or(false); let is_screen_sharing = room.is_screen_sharing(); - let can_use_microphone = room.can_use_microphone(); + let can_use_microphone = room.can_use_microphone(cx); let can_share_projects = room.can_share_projects(); - let platform_supported = match self.platform_style { + let screen_sharing_supported = match self.platform_style { PlatformStyle::Mac => true, PlatformStyle::Linux | PlatformStyle::Windows => false, }; @@ -323,7 +322,7 @@ impl TitleBar { }) .style(ButtonStyle::Subtle) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .selected(is_shared) + .toggle_state(is_shared) .label_size(LabelSize::Small) .on_click(cx.listener(move |this, _, cx| { if is_shared { @@ -364,69 +363,78 @@ impl TitleBar { }, ) .tooltip(move |cx| { - Tooltip::text( - if !platform_supported { - "Cannot share microphone" - } else if is_muted { - "Unmute microphone" + if is_muted { + if is_deafened { + Tooltip::with_meta( + "Unmute Microphone", + None, + "Audio will be unmuted", + cx, + ) } else { - "Mute microphone" - }, - cx, - ) + Tooltip::text("Unmute Microphone", cx) + } + } else { + Tooltip::text("Mute Microphone", cx) + } }) .style(ButtonStyle::Subtle) .icon_size(IconSize::Small) - .selected(platform_supported && is_muted) - .disabled(!platform_supported) - .selected_style(ButtonStyle::Tinted(TintColor::Negative)) + .toggle_state(is_muted) + .selected_style(ButtonStyle::Tinted(TintColor::Error)) .on_click(move |_, cx| { toggle_mute(&Default::default(), cx); }) .into_any_element(), ); + + children.push( + IconButton::new( + "mute-sound", + if is_deafened { + ui::IconName::AudioOff + } else { + ui::IconName::AudioOn + }, + ) + .style(ButtonStyle::Subtle) + .selected_style(ButtonStyle::Tinted(TintColor::Error)) + .icon_size(IconSize::Small) + .toggle_state(is_deafened) + .tooltip(move |cx| { + if is_deafened { + let label = "Unmute Audio"; + + if !muted_by_user { + Tooltip::with_meta(label, None, "Microphone will be unmuted", cx) + } else { + Tooltip::text(label, cx) + } + } else { + let label = "Mute Audio"; + + if !muted_by_user { + Tooltip::with_meta(label, None, "Microphone will be muted", cx) + } else { + Tooltip::text(label, cx) + } + } + }) + .on_click(move |_, cx| toggle_deafen(&Default::default(), cx)) + .into_any_element(), + ); } - children.push( - IconButton::new( - "mute-sound", - if is_deafened { - ui::IconName::AudioOff - } else { - ui::IconName::AudioOn - }, - ) - .style(ButtonStyle::Subtle) - .selected_style(ButtonStyle::Tinted(TintColor::Negative)) - .icon_size(IconSize::Small) - .selected(is_deafened) - .disabled(!platform_supported) - .tooltip(move |cx| { - if !platform_supported { - Tooltip::text("Cannot share microphone", cx) - } else if can_use_microphone { - Tooltip::with_meta("Deafen Audio", None, "Mic will be muted", cx) - } else { - Tooltip::text("Deafen Audio", cx) - } - }) - .on_click(move |_, cx| toggle_deafen(&Default::default(), cx)) - .into_any_element(), - ); - - if can_share_projects { + if screen_sharing_supported { children.push( IconButton::new("screen-share", ui::IconName::Screen) .style(ButtonStyle::Subtle) .icon_size(IconSize::Small) - .selected(is_screen_sharing) - .disabled(!platform_supported) + .toggle_state(is_screen_sharing) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .tooltip(move |cx| { Tooltip::text( - if !platform_supported { - "Cannot share screen" - } else if is_screen_sharing { + if is_screen_sharing { "Stop Sharing Screen" } else { "Share Screen" diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 2ea9ddafd7..cab85f6f19 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -7,6 +7,10 @@ mod window_controls; mod stories; use crate::application_menu::ApplicationMenu; + +#[cfg(not(target_os = "macos"))] +use crate::application_menu::{NavigateApplicationMenuInDirection, OpenApplicationMenu}; + use crate::platforms::{platform_linux, platform_mac, platform_windows}; use auto_update::AutoUpdateStatus; use call::ActiveCall; @@ -17,9 +21,9 @@ use gpui::{ Interactivity, IntoElement, Model, MouseButton, ParentElement, Render, Stateful, StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; -use project::{Project, RepositoryEntry}; -use recent_projects::{OpenRemote, RecentProjects}; +use project::Project; use rpc::proto; +use settings::Settings as _; use smallvec::SmallVec; use std::sync::Arc; use theme::ActiveTheme; @@ -28,9 +32,8 @@ use ui::{ IconSize, IconWithIndicator, Indicator, PopoverMenu, Tooltip, }; use util::ResultExt; -use vcs_menu::{BranchList, OpenRecent as ToggleVcsMenu}; use workspace::{notifications::NotifyResultExt, Workspace}; -use zed_actions::OpenBrowser; +use zed_actions::{OpenBrowser, OpenRecent, OpenRemote}; #[cfg(feature = "stories")] pub use stories::*; @@ -54,7 +57,39 @@ actions!( pub fn init(cx: &mut AppContext) { cx.observe_new_views(|workspace: &mut Workspace, cx| { let item = cx.new_view(|cx| TitleBar::new("title-bar", workspace, cx)); - workspace.set_titlebar_item(item.into(), cx) + workspace.set_titlebar_item(item.into(), cx); + + #[cfg(not(target_os = "macos"))] + workspace.register_action(|workspace, action: &OpenApplicationMenu, cx| { + if let Some(titlebar) = workspace + .titlebar_item() + .and_then(|item| item.downcast::().ok()) + { + titlebar.update(cx, |titlebar, cx| { + if let Some(ref menu) = titlebar.application_menu { + menu.update(cx, |menu, cx| menu.open_menu(action, cx)); + } + }); + } + }); + + #[cfg(not(target_os = "macos"))] + workspace.register_action( + |workspace, action: &NavigateApplicationMenuInDirection, cx| { + if let Some(titlebar) = workspace + .titlebar_item() + .and_then(|item| item.downcast::().ok()) + { + titlebar.update(cx, |titlebar, cx| { + if let Some(ref menu) = titlebar.application_menu { + menu.update(cx, |menu, cx| { + menu.navigate_menus_in_direction(action, cx) + }); + } + }); + } + }, + ); }) .detach(); } @@ -135,10 +170,20 @@ impl Render for TitleBar { .child( h_flex() .gap_1() - .when_some(self.application_menu.clone(), |this, menu| this.child(menu)) - .children(self.render_project_host(cx)) - .child(self.render_project_name(cx)) - .children(self.render_project_branch(cx)) + .map(|title_bar| { + let mut render_project_items = true; + title_bar + .when_some(self.application_menu.clone(), |title_bar, menu| { + render_project_items = !menu.read(cx).all_menus_shown(); + title_bar.child(menu) + }) + .when(render_project_items, |title_bar| { + title_bar + .children(self.render_project_host(cx)) + .child(self.render_project_name(cx)) + .children(self.render_project_branch(cx)) + }) + }) .on_mouse_down(MouseButton::Left, |_, cx| cx.stop_propagation()), ) .child(self.render_collaborator_list(cx)) @@ -217,7 +262,13 @@ impl TitleBar { let platform_style = PlatformStyle::platform(); let application_menu = match platform_style { - PlatformStyle::Mac => None, + PlatformStyle::Mac => { + if option_env!("ZED_USE_CROSS_PLATFORM_MENU").is_some() { + Some(cx.new_view(ApplicationMenu::new)) + } else { + None + } + } PlatformStyle::Linux | PlatformStyle::Windows => { Some(cx.new_view(ApplicationMenu::new)) } @@ -304,21 +355,24 @@ impl TitleBar { Some( ButtonLike::new("ssh-server-icon") .child( - IconWithIndicator::new( - Icon::new(IconName::Server) - .size(IconSize::XSmall) - .color(icon_color), - Some(Indicator::dot().color(indicator_color)), - ) - .indicator_border_color(Some(cx.theme().colors().title_bar_background)) - .into_any_element(), - ) - .child( - div() + h_flex() + .gap_2() .max_w_32() - .overflow_hidden() - .text_ellipsis() - .child(Label::new(nickname.clone()).size(LabelSize::Small)), + .child( + IconWithIndicator::new( + Icon::new(IconName::Server) + .size(IconSize::XSmall) + .color(icon_color), + Some(Indicator::dot().color(indicator_color)), + ) + .indicator_border_color(Some(cx.theme().colors().title_bar_background)) + .into_any_element(), + ) + .child( + Label::new(nickname.clone()) + .size(LabelSize::Small) + .text_ellipsis(), + ), ) .tooltip(move |cx| { Tooltip::with_meta("Remote Project", Some(&OpenRemote), meta.clone(), cx) @@ -397,7 +451,6 @@ impl TitleBar { "Open recent project".to_string() }; - let workspace = self.workspace.clone(); Button::new("project_name_trigger", name) .when(!is_project_selected, |b| b.color(Color::Muted)) .style(ButtonStyle::Subtle) @@ -405,18 +458,19 @@ impl TitleBar { .tooltip(move |cx| { Tooltip::for_action( "Recent Projects", - &recent_projects::OpenRecent { + &zed_actions::OpenRecent { create_new_window: false, }, cx, ) }) .on_click(cx.listener(move |_, _, cx| { - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - RecentProjects::open(workspace, false, cx); - }) - } + cx.dispatch_action( + OpenRecent { + create_new_window: false, + } + .boxed_clone(), + ); })) } @@ -433,7 +487,7 @@ impl TitleBar { let workspace = self.workspace.upgrade()?; let branch_name = entry .as_ref() - .and_then(RepositoryEntry::branch) + .and_then(|entry| entry.branch()) .map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?; Some( Button::new("project_branch_trigger", branch_name) @@ -443,14 +497,14 @@ impl TitleBar { .tooltip(move |cx| { Tooltip::with_meta( "Recent Branches", - Some(&ToggleVcsMenu), + Some(&zed_actions::branches::OpenRecent), "Local branches only", cx, ) }) .on_click(move |_, cx| { - let _ = workspace.update(cx, |this, cx| { - BranchList::open(this, &Default::default(), cx); + let _ = workspace.update(cx, |_this, cx| { + cx.dispatch_action(zed_actions::branches::OpenRecent.boxed_clone()); }); }), ) @@ -580,8 +634,11 @@ impl TitleBar { }) .action("Settings", zed_actions::OpenSettings.boxed_clone()) .action("Key Bindings", Box::new(zed_actions::OpenKeymap)) - .action("Themes…", theme_selector::Toggle::default().boxed_clone()) - .action("Extensions", extensions_ui::Extensions.boxed_clone()) + .action( + "Themes…", + zed_actions::theme_selector::Toggle::default().boxed_clone(), + ) + .action("Extensions", zed_actions::Extensions.boxed_clone()) .separator() .link( "Book Onboarding", @@ -599,7 +656,11 @@ impl TitleBar { .child( h_flex() .gap_0p5() - .child(Avatar::new(user.avatar_uri.clone())) + .children( + workspace::WorkspaceSettings::get_global(cx) + .show_user_picture + .then(|| Avatar::new(user.avatar_uri.clone())), + ) .child( Icon::new(IconName::ChevronDown) .size(IconSize::Small) @@ -609,15 +670,18 @@ impl TitleBar { .style(ButtonStyle::Subtle) .tooltip(move |cx| Tooltip::text("Toggle User Menu", cx)), ) - .anchor(gpui::AnchorCorner::TopRight) + .anchor(gpui::Corner::TopRight) } else { PopoverMenu::new("user-menu") .menu(|cx| { ContextMenu::build(cx, |menu, _| { menu.action("Settings", zed_actions::OpenSettings.boxed_clone()) .action("Key Bindings", Box::new(zed_actions::OpenKeymap)) - .action("Themes…", theme_selector::Toggle::default().boxed_clone()) - .action("Extensions", extensions_ui::Extensions.boxed_clone()) + .action( + "Themes…", + zed_actions::theme_selector::Toggle::default().boxed_clone(), + ) + .action("Extensions", zed_actions::Extensions.boxed_clone()) .separator() .link( "Book Onboarding", diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index e2d0b2c808..c49deed02c 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -4,14 +4,15 @@ use gpui::{ ViewContext, WeakModel, WeakView, }; use language::{Buffer, BufferEvent, LanguageName, Toolchain}; -use project::WorktreeId; -use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; +use project::{Project, WorktreeId}; +use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, SharedString, Tooltip}; use workspace::{item::ItemHandle, StatusItemView, Workspace}; use crate::ToolchainSelector; pub struct ActiveToolchain { active_toolchain: Option, + term: SharedString, workspace: WeakView, active_buffer: Option<(WorktreeId, WeakModel, Subscription)>, _update_toolchain_task: Task>, @@ -22,6 +23,7 @@ impl ActiveToolchain { Self { active_toolchain: None, active_buffer: None, + term: SharedString::new_static("Toolchain"), workspace: workspace.weak_handle(), _update_toolchain_task: Self::spawn_tracker_task(cx), @@ -44,7 +46,17 @@ impl ActiveToolchain { .update(&mut cx, |this, _| Some(this.language()?.name())) .ok() .flatten()?; - + let term = workspace + .update(&mut cx, |workspace, cx| { + let languages = workspace.project().read(cx).languages(); + Project::toolchain_term(languages.clone(), language_name.clone()) + }) + .ok()? + .await?; + let _ = this.update(&mut cx, |this, cx| { + this.term = term; + cx.notify(); + }); let worktree_id = active_file .update(&mut cx, |this, cx| Some(this.file()?.worktree_id(cx))) .ok() @@ -133,6 +145,7 @@ impl ActiveToolchain { impl Render for ActiveToolchain { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { div().when_some(self.active_toolchain.as_ref(), |el, active_toolchain| { + let term = self.term.clone(); el.child( Button::new("change-toolchain", active_toolchain.name.clone()) .label_size(LabelSize::Small) @@ -143,7 +156,7 @@ impl Render for ActiveToolchain { }); } })) - .tooltip(|cx| Tooltip::text("Select Toolchain", cx)), + .tooltip(move |cx| Tooltip::text(format!("Select {}", &term), cx)), ) }) } diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index 8a3368f816..2c886bb8c4 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -126,6 +126,7 @@ pub struct ToolchainSelectorDelegate { workspace: WeakView, worktree_id: WorktreeId, worktree_abs_path_root: Arc, + placeholder_text: Arc, _fetch_candidates_task: Task>, } @@ -144,6 +145,17 @@ impl ToolchainSelectorDelegate { let _fetch_candidates_task = cx.spawn({ let project = project.clone(); move |this, mut cx| async move { + let term = project + .update(&mut cx, |this, _| { + Project::toolchain_term(this.languages().clone(), language_name.clone()) + }) + .ok()? + .await?; + let placeholder_text = format!("Select a {}…", term.to_lowercase()).into(); + let _ = this.update(&mut cx, move |this, cx| { + this.delegate.placeholder_text = placeholder_text; + this.refresh_placeholder(cx); + }); let available_toolchains = project .update(&mut cx, |this, cx| { this.available_toolchains(worktree_id, language_name, cx) @@ -153,6 +165,7 @@ impl ToolchainSelectorDelegate { let _ = this.update(&mut cx, move |this, cx| { this.delegate.candidates = available_toolchains; + if let Some(active_toolchain) = active_toolchain { if let Some(position) = this .delegate @@ -170,7 +183,7 @@ impl ToolchainSelectorDelegate { Some(()) } }); - + let placeholder_text = "Select a toolchain…".to_string().into(); Self { toolchain_selector: language_selector, candidates: Default::default(), @@ -179,6 +192,7 @@ impl ToolchainSelectorDelegate { workspace, worktree_id, worktree_abs_path_root, + placeholder_text, _fetch_candidates_task, } } @@ -196,7 +210,7 @@ impl PickerDelegate for ToolchainSelectorDelegate { type ListItem = ListItem; fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { - "Select a toolchain...".into() + self.placeholder_text.clone() } fn match_count(&self) -> usize { @@ -282,7 +296,7 @@ impl PickerDelegate for ToolchainSelectorDelegate { .map(|(candidate_id, toolchain)| { let path = Self::relativize_path(toolchain.path, &worktree_root_path); let string = format!("{}{}", toolchain.name, path); - StringMatchCandidate::new(candidate_id, string) + StringMatchCandidate::new(candidate_id, &string) }) .collect::>(); match_strings( @@ -331,7 +345,7 @@ impl PickerDelegate for ToolchainSelectorDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child(HighlightedLabel::new(label, name_highlights)) .child( HighlightedLabel::new(path, path_highlights) diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 6d13e8a71a..f6626c745b 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -1,6 +1,5 @@ mod avatar; mod button; -mod checkbox; mod content_group; mod context_menu; mod disclosure; @@ -28,6 +27,7 @@ mod stack; mod tab; mod tab_bar; mod table; +mod toggle; mod tool_strip; mod tooltip; @@ -36,7 +36,6 @@ mod stories; pub use avatar::*; pub use button::*; -pub use checkbox::*; pub use content_group::*; pub use context_menu::*; pub use disclosure::*; @@ -64,6 +63,7 @@ pub use stack::*; pub use tab::*; pub use tab_bar::*; pub use table::*; +pub use toggle::*; pub use tool_strip::*; pub use tooltip::*; diff --git a/crates/ui/src/components/button/button.rs b/crates/ui/src/components/button/button.rs index fdf9b537bc..45f185a550 100644 --- a/crates/ui/src/components/button/button.rs +++ b/crates/ui/src/components/button/button.rs @@ -194,7 +194,7 @@ impl Button { } } -impl Selectable for Button { +impl Toggleable for Button { /// Sets the selected state of the button. /// /// This method allows the selection state of the button to be specified. @@ -213,8 +213,8 @@ impl Selectable for Button { /// ``` /// /// Use [`selected_style`](Button::selected_style) to change the style of the button when it is selected. - fn selected(mut self, selected: bool) -> Self { - self.base = self.base.selected(selected); + fn toggle_state(mut self, selected: bool) -> Self { + self.base = self.base.toggle_state(selected); self } } @@ -405,7 +405,7 @@ impl RenderOnce for Button { this.children(self.icon.map(|icon| { ButtonIcon::new(icon) .disabled(is_disabled) - .selected(is_selected) + .toggle_state(is_selected) .selected_icon(self.selected_icon) .selected_icon_color(self.selected_icon_color) .size(self.icon_size) @@ -429,7 +429,7 @@ impl RenderOnce for Button { this.children(self.icon.map(|icon| { ButtonIcon::new(icon) .disabled(is_disabled) - .selected(is_selected) + .toggle_state(is_selected) .selected_icon(self.selected_icon) .selected_icon_color(self.selected_icon_color) .size(self.icon_size) @@ -445,7 +445,7 @@ impl ComponentPreview for Button { "A button allows users to take actions, and make choices, with a single tap." } - fn examples(_: &WindowContext) -> Vec> { + fn examples(_: &mut WindowContext) -> Vec> { vec![ example_group_with_title( "Styles", @@ -474,9 +474,9 @@ impl ComponentPreview for Button { .style(ButtonStyle::Tinted(TintColor::Accent)), ), single_example( - "Negative", - Button::new("tinted_negative", "Negative") - .style(ButtonStyle::Tinted(TintColor::Negative)), + "Error", + Button::new("tinted_negative", "Error") + .style(ButtonStyle::Tinted(TintColor::Error)), ), single_example( "Warning", @@ -484,9 +484,9 @@ impl ComponentPreview for Button { .style(ButtonStyle::Tinted(TintColor::Warning)), ), single_example( - "Positive", - Button::new("tinted_positive", "Positive") - .style(ButtonStyle::Tinted(TintColor::Positive)), + "Success", + Button::new("tinted_positive", "Success") + .style(ButtonStyle::Tinted(TintColor::Success)), ), ], ), @@ -500,7 +500,7 @@ impl ComponentPreview for Button { ), single_example( "Selected", - Button::new("selected", "Selected").selected(true), + Button::new("selected", "Selected").toggle_state(true), ), ], ), @@ -527,8 +527,8 @@ impl ComponentPreview for Button { ), single_example( "Tinted Icons", - Button::new("icon_color", "Delete") - .style(ButtonStyle::Tinted(TintColor::Negative)) + Button::new("icon_color", "Error") + .style(ButtonStyle::Tinted(TintColor::Error)) .color(Color::Error) .icon_color(Color::Error) .icon(IconName::Trash) diff --git a/crates/ui/src/components/button/button_icon.rs b/crates/ui/src/components/button/button_icon.rs index f3aebe7f76..bb85f7d10a 100644 --- a/crates/ui/src/components/button/button_icon.rs +++ b/crates/ui/src/components/button/button_icon.rs @@ -65,8 +65,8 @@ impl Disableable for ButtonIcon { } } -impl Selectable for ButtonIcon { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for ButtonIcon { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index a285388cbd..26ef2fc19f 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -6,7 +6,7 @@ use smallvec::SmallVec; use crate::{prelude::*, DynamicSpacing, ElevationIndex}; /// A trait for buttons that can be Selected. Enables setting the [`ButtonStyle`] of a button when it is selected. -pub trait SelectableButton: Selectable { +pub trait SelectableButton: Toggleable { fn selected_style(self, style: ButtonStyle) -> Self; } @@ -49,9 +49,9 @@ pub enum IconPosition { pub enum TintColor { #[default] Accent, - Negative, + Error, Warning, - Positive, + Success, } impl TintColor { @@ -63,7 +63,7 @@ impl TintColor { label_color: cx.theme().colors().text, icon_color: cx.theme().colors().text, }, - TintColor::Negative => ButtonLikeStyles { + TintColor::Error => ButtonLikeStyles { background: cx.theme().status().error_background, border_color: cx.theme().status().error_border, label_color: cx.theme().colors().text, @@ -75,7 +75,7 @@ impl TintColor { label_color: cx.theme().colors().text, icon_color: cx.theme().colors().text, }, - TintColor::Positive => ButtonLikeStyles { + TintColor::Success => ButtonLikeStyles { background: cx.theme().status().success_background, border_color: cx.theme().status().success_border, label_color: cx.theme().colors().text, @@ -89,9 +89,9 @@ impl From for Color { fn from(tint: TintColor) -> Self { match tint { TintColor::Accent => Color::Accent, - TintColor::Negative => Color::Error, + TintColor::Error => Color::Error, TintColor::Warning => Color::Warning, - TintColor::Positive => Color::Success, + TintColor::Success => Color::Success, } } } @@ -202,7 +202,12 @@ impl ButtonStyle { icon_color: Color::Default.color(cx), } } - ButtonStyle::Tinted(tint) => tint.button_like_style(cx), + ButtonStyle::Tinted(tint) => { + let mut styles = tint.button_like_style(cx); + let theme = cx.theme(); + styles.background = theme.darken(styles.background, 0.05, 0.2); + styles + } ButtonStyle::Subtle => ButtonLikeStyles { background: cx.theme().colors().ghost_element_hover, border_color: transparent_black(), @@ -395,8 +400,8 @@ impl Disableable for ButtonLike { } } -impl Selectable for ButtonLike { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for ButtonLike { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } diff --git a/crates/ui/src/components/button/icon_button.rs b/crates/ui/src/components/button/icon_button.rs index bad10d6fb4..1c37140e29 100644 --- a/crates/ui/src/components/button/icon_button.rs +++ b/crates/ui/src/components/button/icon_button.rs @@ -22,6 +22,7 @@ pub struct IconButton { icon_size: IconSize, icon_color: Color, selected_icon: Option, + alpha: Option, } impl IconButton { @@ -33,6 +34,7 @@ impl IconButton { icon_size: IconSize::default(), icon_color: Color::Default, selected_icon: None, + alpha: None, }; this.base.base = this.base.base.debug_selector(|| format!("ICON-{:?}", icon)); this @@ -53,6 +55,11 @@ impl IconButton { self } + pub fn alpha(mut self, alpha: f32) -> Self { + self.alpha = Some(alpha); + self + } + pub fn selected_icon(mut self, icon: impl Into>) -> Self { self.selected_icon = icon.into(); self @@ -66,9 +73,9 @@ impl Disableable for IconButton { } } -impl Selectable for IconButton { - fn selected(mut self, selected: bool) -> Self { - self.base = self.base.selected(selected); +impl Toggleable for IconButton { + fn toggle_state(mut self, selected: bool) -> Self { + self.base = self.base.toggle_state(selected); self } } @@ -146,6 +153,7 @@ impl RenderOnce for IconButton { let is_selected = self.base.selected; let selected_style = self.base.selected_style; + let color = self.icon_color.color(cx).opacity(self.alpha.unwrap_or(1.0)); self.base .map(|this| match self.shape { IconButtonShape::Square => { @@ -157,11 +165,11 @@ impl RenderOnce for IconButton { .child( ButtonIcon::new(self.icon) .disabled(is_disabled) - .selected(is_selected) + .toggle_state(is_selected) .selected_icon(self.selected_icon) .when_some(selected_style, |this, style| this.selected_style(style)) .size(self.icon_size) - .color(self.icon_color), + .color(Color::Custom(color)), ) } } diff --git a/crates/ui/src/components/button/toggle_button.rs b/crates/ui/src/components/button/toggle_button.rs index 33577fc4e8..96bb37670b 100644 --- a/crates/ui/src/components/button/toggle_button.rs +++ b/crates/ui/src/components/button/toggle_button.rs @@ -57,9 +57,9 @@ impl ToggleButton { } } -impl Selectable for ToggleButton { - fn selected(mut self, selected: bool) -> Self { - self.base = self.base.selected(selected); +impl Toggleable for ToggleButton { + fn toggle_state(mut self, selected: bool) -> Self { + self.base = self.base.toggle_state(selected); self } } diff --git a/crates/ui/src/components/checkbox.rs b/crates/ui/src/components/checkbox.rs deleted file mode 100644 index 0a3fc6f650..0000000000 --- a/crates/ui/src/components/checkbox.rs +++ /dev/null @@ -1,248 +0,0 @@ -#![allow(missing_docs)] - -use gpui::{div, prelude::*, ElementId, IntoElement, Styled, WindowContext}; - -use crate::prelude::*; -use crate::{Color, Icon, IconName, Selection}; - -/// # Checkbox -/// -/// Checkboxes are used for multiple choices, not for mutually exclusive choices. -/// Each checkbox works independently from other checkboxes in the list, -/// therefore checking an additional box does not affect any other selections. -#[derive(IntoElement)] -pub struct Checkbox { - id: ElementId, - checked: Selection, - disabled: bool, - on_click: Option>, -} - -impl Checkbox { - pub fn new(id: impl Into, checked: Selection) -> Self { - Self { - id: id.into(), - checked, - disabled: false, - on_click: None, - } - } - - pub fn disabled(mut self, disabled: bool) -> Self { - self.disabled = disabled; - self - } - - pub fn on_click(mut self, handler: impl Fn(&Selection, &mut WindowContext) + 'static) -> Self { - self.on_click = Some(Box::new(handler)); - self - } -} - -impl RenderOnce for Checkbox { - fn render(self, cx: &mut WindowContext) -> impl IntoElement { - let group_id = format!("checkbox_group_{:?}", self.id); - - let icon = match self.checked { - Selection::Selected => Some(Icon::new(IconName::Check).size(IconSize::Small).color( - if self.disabled { - Color::Disabled - } else { - Color::Selected - }, - )), - Selection::Indeterminate => Some( - Icon::new(IconName::Dash) - .size(IconSize::Small) - .color(if self.disabled { - Color::Disabled - } else { - Color::Selected - }), - ), - Selection::Unselected => None, - }; - - let selected = - self.checked == Selection::Selected || self.checked == Selection::Indeterminate; - - let (bg_color, border_color) = match (self.disabled, selected) { - (true, _) => ( - cx.theme().colors().ghost_element_disabled, - cx.theme().colors().border_disabled, - ), - (false, true) => ( - cx.theme().colors().element_selected, - cx.theme().colors().border, - ), - (false, false) => ( - cx.theme().colors().element_background, - cx.theme().colors().border, - ), - }; - - h_flex() - .id(self.id) - .justify_center() - .items_center() - .size(DynamicSpacing::Base20.rems(cx)) - .group(group_id.clone()) - .child( - div() - .flex() - .flex_none() - .justify_center() - .items_center() - .m(DynamicSpacing::Base04.px(cx)) - .size(DynamicSpacing::Base16.rems(cx)) - .rounded_sm() - .bg(bg_color) - .border_1() - .border_color(border_color) - .when(!self.disabled, |this| { - this.group_hover(group_id.clone(), |el| { - el.bg(cx.theme().colors().element_hover) - }) - }) - .children(icon), - ) - .when_some( - self.on_click.filter(|_| !self.disabled), - |this, on_click| this.on_click(move |_, cx| on_click(&self.checked.inverse(), cx)), - ) - } -} - -impl ComponentPreview for Checkbox { - fn description() -> impl Into> { - "A checkbox lets people choose between a pair of opposing states, like enabled and disabled, using a different appearance to indicate each state." - } - - fn examples(_: &WindowContext) -> Vec> { - vec![ - example_group_with_title( - "Default", - vec![ - single_example( - "Unselected", - Checkbox::new("checkbox_unselected", Selection::Unselected), - ), - single_example( - "Indeterminate", - Checkbox::new("checkbox_indeterminate", Selection::Indeterminate), - ), - single_example( - "Selected", - Checkbox::new("checkbox_selected", Selection::Selected), - ), - ], - ), - example_group_with_title( - "Disabled", - vec![ - single_example( - "Unselected", - Checkbox::new("checkbox_disabled_unselected", Selection::Unselected) - .disabled(true), - ), - single_example( - "Indeterminate", - Checkbox::new("checkbox_disabled_indeterminate", Selection::Indeterminate) - .disabled(true), - ), - single_example( - "Selected", - Checkbox::new("checkbox_disabled_selected", Selection::Selected) - .disabled(true), - ), - ], - ), - ] - } -} - -use std::sync::Arc; - -/// A [`Checkbox`] that has a [`Label`]. -#[derive(IntoElement)] -pub struct CheckboxWithLabel { - id: ElementId, - label: Label, - checked: Selection, - on_click: Arc, -} - -impl CheckboxWithLabel { - pub fn new( - id: impl Into, - label: Label, - checked: Selection, - on_click: impl Fn(&Selection, &mut WindowContext) + 'static, - ) -> Self { - Self { - id: id.into(), - label, - checked, - on_click: Arc::new(on_click), - } - } -} - -impl RenderOnce for CheckboxWithLabel { - fn render(self, cx: &mut WindowContext) -> impl IntoElement { - h_flex() - .gap(DynamicSpacing::Base08.rems(cx)) - .child(Checkbox::new(self.id.clone(), self.checked).on_click({ - let on_click = self.on_click.clone(); - move |checked, cx| { - (on_click)(checked, cx); - } - })) - .child( - div() - .id(SharedString::from(format!("{}-label", self.id))) - .on_click(move |_event, cx| { - (self.on_click)(&self.checked.inverse(), cx); - }) - .child(self.label), - ) - } -} - -impl ComponentPreview for CheckboxWithLabel { - fn description() -> impl Into> { - "A checkbox with an associated label, allowing users to select an option while providing a descriptive text." - } - - fn examples(_: &WindowContext) -> Vec> { - vec![example_group(vec![ - single_example( - "Unselected", - CheckboxWithLabel::new( - "checkbox_with_label_unselected", - Label::new("Always save on quit"), - Selection::Unselected, - |_, _| {}, - ), - ), - single_example( - "Indeterminate", - CheckboxWithLabel::new( - "checkbox_with_label_indeterminate", - Label::new("Always save on quit"), - Selection::Indeterminate, - |_, _| {}, - ), - ), - single_example( - "Selected", - CheckboxWithLabel::new( - "checkbox_with_label_selected", - Label::new("Always save on quit"), - Selection::Selected, - |_, _| {}, - ), - ), - ])] - } -} diff --git a/crates/ui/src/components/content_group.rs b/crates/ui/src/components/content_group.rs index b8ba5b8860..d9d71c3eeb 100644 --- a/crates/ui/src/components/content_group.rs +++ b/crates/ui/src/components/content_group.rs @@ -95,7 +95,7 @@ impl ComponentPreview for ContentGroup { ExampleLabelSide::Bottom } - fn examples(_: &WindowContext) -> Vec> { + fn examples(_: &mut WindowContext) -> Vec> { vec![example_group(vec![ single_example( "Default", diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index 2f83953329..52d28306ac 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -434,7 +434,7 @@ impl Render for ContextMenu { ListItem::new(ix) .inset(true) .disabled(*disabled) - .selected(Some(ix) == self.selected_index) + .toggle_state(Some(ix) == self.selected_index) .when_some(*toggle, |list_item, (position, toggled)| { let contents = if toggled { v_flex().flex_none().child( @@ -495,7 +495,7 @@ impl Render for ContextMenu { let selectable = *selectable; ListItem::new(ix) .inset(true) - .selected(if selectable { + .toggle_state(if selectable { Some(ix) == self.selected_index } else { false diff --git a/crates/ui/src/components/disclosure.rs b/crates/ui/src/components/disclosure.rs index 9e8ab48221..7f04cc3528 100644 --- a/crates/ui/src/components/disclosure.rs +++ b/crates/ui/src/components/disclosure.rs @@ -34,8 +34,8 @@ impl Disclosure { } } -impl Selectable for Disclosure { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for Disclosure { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } @@ -65,7 +65,7 @@ impl RenderOnce for Disclosure { .shape(IconButtonShape::Square) .icon_color(Color::Muted) .icon_size(IconSize::Small) - .selected(self.selected) + .toggle_state(self.selected) .when_some(self.on_toggle, move |this, on_toggle| { this.on_click(move |event, cx| on_toggle(event, cx)) }) diff --git a/crates/ui/src/components/divider.rs b/crates/ui/src/components/divider.rs index 71234057b2..aa93350da4 100644 --- a/crates/ui/src/components/divider.rs +++ b/crates/ui/src/components/divider.rs @@ -3,6 +3,13 @@ use gpui::{Hsla, IntoElement}; use crate::prelude::*; +#[derive(Clone, Copy, PartialEq)] +enum DividerStyle { + Solid, + Dashed, +} + +#[derive(Clone, Copy, PartialEq)] enum DividerDirection { Horizontal, Vertical, @@ -27,6 +34,7 @@ impl DividerColor { #[derive(IntoElement)] pub struct Divider { + style: DividerStyle, direction: DividerDirection, color: DividerColor, inset: bool, @@ -34,22 +42,17 @@ pub struct Divider { impl RenderOnce for Divider { fn render(self, cx: &mut WindowContext) -> impl IntoElement { - div() - .map(|this| match self.direction { - DividerDirection::Horizontal => { - this.h_px().w_full().when(self.inset, |this| this.mx_1p5()) - } - DividerDirection::Vertical => { - this.w_px().h_full().when(self.inset, |this| this.my_1p5()) - } - }) - .bg(self.color.hsla(cx)) + match self.style { + DividerStyle::Solid => self.render_solid(cx).into_any_element(), + DividerStyle::Dashed => self.render_dashed(cx).into_any_element(), + } } } impl Divider { pub fn horizontal() -> Self { Self { + style: DividerStyle::Solid, direction: DividerDirection::Horizontal, color: DividerColor::default(), inset: false, @@ -58,6 +61,25 @@ impl Divider { pub fn vertical() -> Self { Self { + style: DividerStyle::Solid, + direction: DividerDirection::Vertical, + color: DividerColor::default(), + inset: false, + } + } + + pub fn horizontal_dashed() -> Self { + Self { + style: DividerStyle::Dashed, + direction: DividerDirection::Horizontal, + color: DividerColor::default(), + inset: false, + } + } + + pub fn vertical_dashed() -> Self { + Self { + style: DividerStyle::Dashed, direction: DividerDirection::Vertical, color: DividerColor::default(), inset: false, @@ -73,4 +95,49 @@ impl Divider { self.color = color; self } + + pub fn render_solid(self, cx: &WindowContext) -> impl IntoElement { + div() + .map(|this| match self.direction { + DividerDirection::Horizontal => { + this.h_px().w_full().when(self.inset, |this| this.mx_1p5()) + } + DividerDirection::Vertical => { + this.w_px().h_full().when(self.inset, |this| this.my_1p5()) + } + }) + .bg(self.color.hsla(cx)) + } + + // TODO: Use canvas or a shader here + // This obviously is a short term approach + pub fn render_dashed(self, cx: &WindowContext) -> impl IntoElement { + let segment_count = 128; + let segment_count_f = segment_count as f32; + let segment_min_w = 6.; + let base = match self.direction { + DividerDirection::Horizontal => h_flex(), + DividerDirection::Vertical => v_flex(), + }; + let (w, h) = match self.direction { + DividerDirection::Horizontal => (px(segment_min_w), px(1.)), + DividerDirection::Vertical => (px(1.), px(segment_min_w)), + }; + let color = self.color.hsla(cx); + let total_min_w = segment_min_w * segment_count_f * 2.; // * 2 because of the gap + + base.min_w(px(total_min_w)) + .map(|this| { + if self.direction == DividerDirection::Horizontal { + this.w_full().h_px() + } else { + this.w_px().h_full() + } + }) + .gap(px(segment_min_w)) + .overflow_hidden() + .children( + (0..segment_count).map(|_| div().flex_grow().flex_shrink_0().w(w).h(h).bg(color)), + ) + } } diff --git a/crates/ui/src/components/dropdown_menu.rs b/crates/ui/src/components/dropdown_menu.rs index 8d930a63ac..5d53795472 100644 --- a/crates/ui/src/components/dropdown_menu.rs +++ b/crates/ui/src/components/dropdown_menu.rs @@ -1,5 +1,5 @@ #![allow(missing_docs)] -use gpui::{AnchorCorner, ClickEvent, CursorStyle, MouseButton, View}; +use gpui::{ClickEvent, Corner, CursorStyle, MouseButton, View}; use crate::{prelude::*, ContextMenu, PopoverMenu}; @@ -46,7 +46,7 @@ impl RenderOnce for DropdownMenu { .full_width(self.full_width) .menu(move |_cx| Some(self.menu.clone())) .trigger(DropdownMenuTrigger::new(self.label).full_width(self.full_width)) - .attach(AnchorCorner::BottomLeft) + .attach(Corner::BottomLeft) } } @@ -85,8 +85,8 @@ impl Disableable for DropdownMenuTrigger { } } -impl Selectable for DropdownMenuTrigger { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for DropdownMenuTrigger { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } diff --git a/crates/ui/src/components/facepile.rs b/crates/ui/src/components/facepile.rs index eb4dd8a98e..e3b799efe5 100644 --- a/crates/ui/src/components/facepile.rs +++ b/crates/ui/src/components/facepile.rs @@ -67,7 +67,7 @@ impl ComponentPreview for Facepile { \n\nFacepiles are used to display a group of people or things,\ such as a list of participants in a collaboration session." } - fn examples(_: &WindowContext) -> Vec> { + fn examples(_: &mut WindowContext) -> Vec> { let few_faces: [&'static str; 3] = [ "https://avatars.githubusercontent.com/u/1714999?s=60&v=4", "https://avatars.githubusercontent.com/u/67129314?s=60&v=4", diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index 161f4c60b7..b27edf4d82 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -160,7 +160,6 @@ pub enum IconName { Copy, CountdownTimer, CursorIBeam, - TextSnippet, Dash, DatabaseZap, Delete, @@ -170,14 +169,16 @@ pub enum IconName { Ellipsis, EllipsisVertical, Envelope, + Eraser, Escape, - Exit, ExpandVertical, + Exit, ExternalLink, Eye, File, FileCode, FileDoc, + FileDiff, FileGeneric, FileGit, FileLock, @@ -198,10 +199,13 @@ pub enum IconName { GenericMinimize, GenericRestore, Github, + Globe, + GitBranch, Hash, HistoryRerun, Indicator, IndicatorX, + Info, InlayHint, Keyboard, Library, @@ -214,6 +218,7 @@ pub enum IconName { Maximize, Menu, MessageBubbles, + MessageCircle, Mic, MicMute, Microscope, @@ -221,15 +226,17 @@ pub enum IconName { Option, PageDown, PageUp, + PanelLeft, + PanelRight, Pencil, Person, + PhoneIncoming, Pin, Play, Plus, PocketKnife, Public, PullRequest, - PhoneIncoming, Quote, RefreshTitle, Regex, @@ -264,6 +271,9 @@ pub enum IconName { SparkleFilled, Spinner, Split, + SquareDot, + SquareMinus, + SquarePlus, Star, StarFilled, Stop, @@ -275,6 +285,9 @@ pub enum IconName { SwatchBook, Tab, Terminal, + TextSnippet, + ThumbsUp, + ThumbsDown, Trash, TrashAlt, Triangle, @@ -287,11 +300,13 @@ pub enum IconName { Wand, Warning, WholeWord, + X, XCircle, ZedAssistant, + ZedAssistant2, ZedAssistantFilled, + ZedPredict, ZedXCopilot, - X, } impl From for Icon { @@ -414,7 +429,9 @@ pub struct IconDecoration { kind: IconDecorationKind, color: Hsla, knockout_color: Hsla, + knockout_hover_color: Hsla, position: Point, + group_name: Option, } impl IconDecoration { @@ -427,7 +444,9 @@ impl IconDecoration { kind, color, knockout_color, + knockout_hover_color: knockout_color, position, + group_name: None, } } @@ -452,11 +471,23 @@ impl IconDecoration { self } + /// Sets the color of the decoration that is used on hover + pub fn knockout_hover_color(mut self, color: Hsla) -> Self { + self.knockout_hover_color = color; + self + } + /// Sets the position of the decoration pub fn position(mut self, position: Point) -> Self { self.position = position; self } + + /// Sets the name of the group the decoration belongs to + pub fn group_name(mut self, name: Option) -> Self { + self.group_name = name; + self + } } impl RenderOnce for IconDecoration { @@ -485,13 +516,21 @@ impl RenderOnce for IconDecoration { .right_0() .size(px(ICON_DECORATION_SIZE)) .path(self.kind.bg().path()) - .text_color(self.knockout_color), + .text_color(self.knockout_color) + .when(self.group_name.is_none(), |this| { + this.hover(|style| style.text_color(self.knockout_hover_color)) + }) + .when_some(self.group_name.clone(), |this, group_name| { + this.group_hover(group_name, |style| { + style.text_color(self.knockout_hover_color) + }) + }), ) } } impl ComponentPreview for IconDecoration { - fn examples(cx: &WindowContext) -> Vec> { + fn examples(cx: &mut WindowContext) -> Vec> { let all_kinds = IconDecorationKind::iter().collect::>(); let examples = all_kinds @@ -533,7 +572,7 @@ impl RenderOnce for DecoratedIcon { } impl ComponentPreview for DecoratedIcon { - fn examples(cx: &WindowContext) -> Vec> { + fn examples(cx: &mut WindowContext) -> Vec> { let icon_1 = Icon::new(IconName::FileDoc); let icon_2 = Icon::new(IconName::FileDoc); let icon_3 = Icon::new(IconName::FileDoc); @@ -652,7 +691,7 @@ impl RenderOnce for IconWithIndicator { } impl ComponentPreview for Icon { - fn examples(_cx: &WindowContext) -> Vec> { + fn examples(_cx: &mut WindowContext) -> Vec> { let arrow_icons = vec![ IconName::ArrowDown, IconName::ArrowLeft, diff --git a/crates/ui/src/components/indicator.rs b/crates/ui/src/components/indicator.rs index b0d5b0d2da..26eebb8568 100644 --- a/crates/ui/src/components/indicator.rs +++ b/crates/ui/src/components/indicator.rs @@ -89,7 +89,7 @@ impl ComponentPreview for Indicator { "An indicator visually represents a status or state." } - fn examples(_: &WindowContext) -> Vec> { + fn examples(_: &mut WindowContext) -> Vec> { vec![ example_group_with_title( "Types", diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index 770e46eafd..328481de6e 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -3,7 +3,7 @@ use crate::PlatformStyle; use crate::{h_flex, prelude::*, Icon, IconName, IconSize}; use gpui::{relative, Action, FocusHandle, IntoElement, Keystroke, WindowContext}; -#[derive(IntoElement, Clone)] +#[derive(Debug, IntoElement, Clone)] pub struct KeyBinding { /// A keybinding consists of a key and a set of modifier keys. /// More then one keybinding produces a chord. diff --git a/crates/ui/src/components/label/highlighted_label.rs b/crates/ui/src/components/label/highlighted_label.rs index f961713956..c6e5b444cc 100644 --- a/crates/ui/src/components/label/highlighted_label.rs +++ b/crates/ui/src/components/label/highlighted_label.rs @@ -65,6 +65,16 @@ impl LabelCommon for HighlightedLabel { self.base = self.base.underline(underline); self } + + fn text_ellipsis(mut self) -> Self { + self.base = self.base.text_ellipsis(); + self + } + + fn single_line(mut self) -> Self { + self.base = self.base.single_line(); + self + } } pub fn highlight_ranges( diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index f655961841..5ec557d7e6 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -56,20 +56,6 @@ impl Label { single_line: false, } } - - /// Make the label display in a single line mode - /// - /// # Examples - /// - /// ``` - /// use ui::prelude::*; - /// - /// let my_label = Label::new("Hello, World!").single_line(); - /// ``` - pub fn single_line(mut self) -> Self { - self.single_line = true; - self - } } // Style methods. @@ -177,6 +163,17 @@ impl LabelCommon for Label { self.base = self.base.underline(underline); self } + + fn text_ellipsis(mut self) -> Self { + self.base = self.base.text_ellipsis(); + self + } + + fn single_line(mut self) -> Self { + self.single_line = true; + self.base = self.base.single_line(); + self + } } impl RenderOnce for Label { diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index fd7303082a..683db552d6 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -49,6 +49,12 @@ pub trait LabelCommon { /// Sets the alpha property of the label, overwriting the alpha value of the color. fn alpha(self, alpha: f32) -> Self; + + /// Truncates overflowing text with an ellipsis (`…`) if needed. + fn text_ellipsis(self) -> Self; + + /// Sets the label to render as a single line. + fn single_line(self) -> Self; } #[derive(IntoElement)] @@ -63,6 +69,8 @@ pub struct LabelLike { children: SmallVec<[AnyElement; 2]>, alpha: Option, underline: bool, + single_line: bool, + text_ellipsis: bool, } impl Default for LabelLike { @@ -84,6 +92,8 @@ impl LabelLike { children: SmallVec::new(), alpha: None, underline: false, + single_line: false, + text_ellipsis: false, } } } @@ -139,6 +149,16 @@ impl LabelCommon for LabelLike { self.alpha = Some(alpha); self } + + fn text_ellipsis(mut self) -> Self { + self.text_ellipsis = true; + self + } + + fn single_line(mut self) -> Self { + self.single_line = true; + self + } } impl ParentElement for LabelLike { @@ -178,6 +198,10 @@ impl RenderOnce for LabelLike { this }) .when(self.strikethrough, |this| this.line_through()) + .when(self.single_line, |this| this.whitespace_nowrap()) + .when(self.text_ellipsis, |this| { + this.overflow_x_hidden().text_ellipsis() + }) .text_color(color) .font_weight(self.weight.unwrap_or(settings.ui_font.weight)) .children(self.children) diff --git a/crates/ui/src/components/list/list_header.rs b/crates/ui/src/components/list/list_header.rs index 0d1411e70a..70af7c2abc 100644 --- a/crates/ui/src/components/list/list_header.rs +++ b/crates/ui/src/components/list/list_header.rs @@ -73,8 +73,8 @@ impl ListHeader { } } -impl Selectable for ListHeader { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for ListHeader { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index c65832d3e9..e9fb9f9243 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -11,6 +11,7 @@ use crate::{prelude::*, Disclosure}; pub enum ListItemSpacing { #[default] Dense, + ExtraDense, Sparse, } @@ -38,7 +39,9 @@ pub struct ListItem { on_secondary_mouse_down: Option>, children: SmallVec<[AnyElement; 2]>, selectable: bool, + outlined: bool, overflow_x: bool, + focused: Option, } impl ListItem { @@ -61,7 +64,9 @@ impl ListItem { tooltip: None, children: SmallVec::new(), selectable: true, + outlined: false, overflow_x: false, + focused: None, } } @@ -136,10 +141,20 @@ impl ListItem { self } + pub fn outlined(mut self) -> Self { + self.outlined = true; + self + } + pub fn overflow_x(mut self) -> Self { self.overflow_x = true; self } + + pub fn focused(mut self, focused: bool) -> Self { + self.focused = Some(focused); + self + } } impl Disableable for ListItem { @@ -149,8 +164,8 @@ impl Disableable for ListItem { } } -impl Selectable for ListItem { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for ListItem { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } @@ -177,12 +192,18 @@ impl RenderOnce for ListItem { this // TODO: Add focus state // .when(self.state == InteractionState::Focused, |this| { - // this.border_1() - // .border_color(cx.theme().colors().border_focused) - // }) + .when_some(self.focused, |this, focused| { + if focused { + this.border_1() + .border_color(cx.theme().colors().border_focused) + } else { + this.border_1() + } + }) .when(self.selectable, |this| { this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) .active(|style| style.bg(cx.theme().colors().ghost_element_active)) + .when(self.outlined, |this| this.rounded_md()) .when(self.selected, |this| { this.bg(cx.theme().colors().ghost_element_selected) }) @@ -191,6 +212,7 @@ impl RenderOnce for ListItem { .child( h_flex() .id("inner_list_item") + .group("list_item") .w_full() .relative() .items_center() @@ -198,16 +220,21 @@ impl RenderOnce for ListItem { .px(DynamicSpacing::Base06.rems(cx)) .map(|this| match self.spacing { ListItemSpacing::Dense => this, + ListItemSpacing::ExtraDense => this.py_neg_px(), ListItemSpacing::Sparse => this.py_1(), }) - .group("list_item") .when(self.inset && !self.disabled, |this| { this // TODO: Add focus state - // .when(self.state == InteractionState::Focused, |this| { - // this.border_1() - // .border_color(cx.theme().colors().border_focused) - // }) + //.when(self.state == InteractionState::Focused, |this| { + .when_some(self.focused, |this, focused| { + if focused { + this.border_1() + .border_color(cx.theme().colors().border_focused) + } else { + this.border_1() + } + }) .when(self.selectable, |this| { this.hover(|style| { style.bg(cx.theme().colors().ghost_element_hover) @@ -221,6 +248,12 @@ impl RenderOnce for ListItem { .when_some(self.on_click, |this, on_click| { this.cursor_pointer().on_click(on_click) }) + .when(self.outlined, |this| { + this.border_1() + .border_color(cx.theme().colors().border) + .rounded_md() + .overflow_hidden() + }) .when_some(self.on_secondary_mouse_down, |this, on_mouse_down| { this.on_mouse_down(MouseButton::Right, move |event, cx| { (on_mouse_down)(event, cx) diff --git a/crates/ui/src/components/list/list_sub_header.rs b/crates/ui/src/components/list/list_sub_header.rs index 62d2ca0091..35e72f4ba7 100644 --- a/crates/ui/src/components/list/list_sub_header.rs +++ b/crates/ui/src/components/list/list_sub_header.rs @@ -32,8 +32,8 @@ impl ListSubHeader { } } -impl Selectable for ListSubHeader { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for ListSubHeader { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } diff --git a/crates/ui/src/components/navigable.rs b/crates/ui/src/components/navigable.rs index fadd6d597e..d4f10fd63a 100644 --- a/crates/ui/src/components/navigable.rs +++ b/crates/ui/src/components/navigable.rs @@ -18,14 +18,14 @@ pub struct NavigableEntry { impl NavigableEntry { /// Creates a new [NavigableEntry] for a given scroll handle. - pub fn new(scroll_handle: &ScrollHandle, cx: &WindowContext<'_>) -> Self { + pub fn new(scroll_handle: &ScrollHandle, cx: &WindowContext) -> Self { Self { focus_handle: cx.focus_handle(), scroll_anchor: Some(ScrollAnchor::for_handle(scroll_handle.clone())), } } /// Create a new [NavigableEntry] that cannot be scrolled to. - pub fn focusable(cx: &WindowContext<'_>) -> Self { + pub fn focusable(cx: &WindowContext) -> Self { Self { focus_handle: cx.focus_handle(), scroll_anchor: None, @@ -51,7 +51,7 @@ impl Navigable { fn find_focused( selectable_children: &[NavigableEntry], - cx: &mut WindowContext<'_>, + cx: &mut WindowContext, ) -> Option { selectable_children .iter() @@ -59,7 +59,7 @@ impl Navigable { } } impl RenderOnce for Navigable { - fn render(self, _: &mut WindowContext<'_>) -> impl crate::IntoElement { + fn render(self, _: &mut WindowContext) -> impl crate::IntoElement { div() .on_action({ let children = self.selectable_children.clone(); diff --git a/crates/ui/src/components/popover.rs b/crates/ui/src/components/popover.rs index 5bd6c1ed7c..3b16b0ccac 100644 --- a/crates/ui/src/components/popover.rs +++ b/crates/ui/src/components/popover.rs @@ -3,10 +3,13 @@ use crate::prelude::*; use crate::v_flex; use gpui::{ - div, AnyElement, Element, IntoElement, ParentElement, RenderOnce, Styled, WindowContext, + div, AnyElement, Element, IntoElement, ParentElement, Pixels, RenderOnce, Styled, WindowContext, }; use smallvec::SmallVec; +/// Y height added beyond the size of the contents. +pub const POPOVER_Y_PADDING: Pixels = px(8.); + /// A popover is used to display a menu or show some options. /// /// Clicking the element that launches the popover should not change the current view, @@ -45,7 +48,12 @@ impl RenderOnce for Popover { div() .flex() .gap_1() - .child(v_flex().elevation_2(cx).py_1().children(self.children)) + .child( + v_flex() + .elevation_2(cx) + .py(POPOVER_Y_PADDING / 2.) + .children(self.children), + ) .when_some(self.aside, |this, aside| { this.child( v_flex() diff --git a/crates/ui/src/components/popover_menu.rs b/crates/ui/src/components/popover_menu.rs index 547b8f5a83..9594960d9f 100644 --- a/crates/ui/src/components/popover_menu.rs +++ b/crates/ui/src/components/popover_menu.rs @@ -3,17 +3,39 @@ use std::{cell::RefCell, rc::Rc}; use gpui::{ - anchored, deferred, div, point, prelude::FluentBuilder, px, size, AnchorCorner, AnyElement, - Bounds, DismissEvent, DispatchPhase, Element, ElementId, GlobalElementId, HitboxId, - InteractiveElement, IntoElement, LayoutId, Length, ManagedView, MouseDownEvent, ParentElement, - Pixels, Point, Style, View, VisualContext, WindowContext, + anchored, deferred, div, point, prelude::FluentBuilder, px, size, AnyElement, Bounds, Corner, + DismissEvent, DispatchPhase, Element, ElementId, GlobalElementId, HitboxId, InteractiveElement, + IntoElement, LayoutId, Length, ManagedView, MouseDownEvent, ParentElement, Pixels, Point, + Style, View, VisualContext, WindowContext, }; use crate::prelude::*; -pub trait PopoverTrigger: IntoElement + Clickable + Selectable + 'static {} +pub trait PopoverTrigger: IntoElement + Clickable + Toggleable + 'static {} -impl PopoverTrigger for T {} +impl PopoverTrigger for T {} + +impl Clickable for gpui::AnimationElement +where + T: Clickable + 'static, +{ + fn on_click(self, handler: impl Fn(&gpui::ClickEvent, &mut WindowContext) + 'static) -> Self { + self.map_element(|e| e.on_click(handler)) + } + + fn cursor_style(self, cursor_style: gpui::CursorStyle) -> Self { + self.map_element(|e| e.cursor_style(cursor_style)) + } +} + +impl Toggleable for gpui::AnimationElement +where + T: Toggleable + 'static, +{ + fn toggle_state(self, selected: bool) -> Self { + self.map_element(|e| e.toggle_state(selected)) + } +} pub struct PopoverMenuHandle(Rc>>>); @@ -89,8 +111,8 @@ pub struct PopoverMenu { >, >, menu_builder: Option Option> + 'static>>, - anchor: AnchorCorner, - attach: Option, + anchor: Corner, + attach: Option, offset: Option>, trigger_handle: Option>, full_width: bool, @@ -103,7 +125,7 @@ impl PopoverMenu { id: id.into(), child_builder: None, menu_builder: None, - anchor: AnchorCorner::TopLeft, + anchor: Corner::TopLeft, attach: None, offset: None, trigger_handle: None, @@ -129,7 +151,7 @@ impl PopoverMenu { pub fn trigger(mut self, t: T) -> Self { self.child_builder = Some(Box::new(|menu, builder| { let open = menu.borrow().is_some(); - t.selected(open) + t.toggle_state(open) .when_some(builder, |el, builder| { el.on_click(move |_, cx| show_menu(&builder, &menu, cx)) }) @@ -140,13 +162,13 @@ impl PopoverMenu { /// anchor defines which corner of the menu to anchor to the attachment point /// (by default the cursor position, but see attach) - pub fn anchor(mut self, anchor: AnchorCorner) -> Self { + pub fn anchor(mut self, anchor: Corner) -> Self { self.anchor = anchor; self } /// attach defines which corner of the handle to attach the menu's anchor to - pub fn attach(mut self, attach: AnchorCorner) -> Self { + pub fn attach(mut self, attach: Corner) -> Self { self.attach = Some(attach); self } @@ -157,12 +179,12 @@ impl PopoverMenu { self } - fn resolved_attach(&self) -> AnchorCorner { + fn resolved_attach(&self) -> Corner { self.attach.unwrap_or(match self.anchor { - AnchorCorner::TopLeft => AnchorCorner::BottomLeft, - AnchorCorner::TopRight => AnchorCorner::BottomRight, - AnchorCorner::BottomLeft => AnchorCorner::TopLeft, - AnchorCorner::BottomRight => AnchorCorner::TopRight, + Corner::TopLeft => Corner::BottomLeft, + Corner::TopRight => Corner::BottomRight, + Corner::BottomLeft => Corner::TopLeft, + Corner::BottomRight => Corner::TopRight, }) } @@ -171,8 +193,8 @@ impl PopoverMenu { // Default offset = 4px padding + 1px border let offset = rems_from_px(5.) * cx.rem_size(); match self.anchor { - AnchorCorner::TopRight | AnchorCorner::BottomRight => point(offset, px(0.)), - AnchorCorner::TopLeft | AnchorCorner::BottomLeft => point(-offset, px(0.)), + Corner::TopRight | Corner::BottomRight => point(offset, px(0.)), + Corner::TopLeft | Corner::BottomLeft => point(-offset, px(0.)), } }) } @@ -254,13 +276,14 @@ impl Element for PopoverMenu { let mut menu_layout_id = None; let menu_element = element_state.menu.borrow_mut().as_mut().map(|menu| { + let offset = self.resolved_offset(cx); let mut anchored = anchored() .snap_to_window_with_margin(px(8.)) - .anchor(self.anchor); + .anchor(self.anchor) + .offset(offset); if let Some(child_bounds) = element_state.child_bounds { - anchored = anchored.position( - self.resolved_attach().corner(child_bounds) + self.resolved_offset(cx), - ); + anchored = + anchored.position(child_bounds.corner(self.resolved_attach()) + offset); } let mut element = deferred(anchored.child(div().occlude().child(menu.clone()))) .with_priority(1) diff --git a/crates/ui/src/components/right_click_menu.rs b/crates/ui/src/components/right_click_menu.rs index f68b85016c..03e168cbca 100644 --- a/crates/ui/src/components/right_click_menu.rs +++ b/crates/ui/src/components/right_click_menu.rs @@ -3,18 +3,17 @@ use std::{cell::RefCell, rc::Rc}; use gpui::{ - anchored, deferred, div, px, AnchorCorner, AnyElement, Bounds, DismissEvent, DispatchPhase, - Element, ElementId, GlobalElementId, Hitbox, InteractiveElement, IntoElement, LayoutId, - ManagedView, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, View, VisualContext, - WindowContext, + anchored, deferred, div, px, AnyElement, Bounds, Corner, DismissEvent, DispatchPhase, Element, + ElementId, GlobalElementId, Hitbox, InteractiveElement, IntoElement, LayoutId, ManagedView, + MouseButton, MouseDownEvent, ParentElement, Pixels, Point, View, VisualContext, WindowContext, }; pub struct RightClickMenu { id: ElementId, child_builder: Option AnyElement + 'static>>, menu_builder: Option View + 'static>>, - anchor: Option, - attach: Option, + anchor: Option, + attach: Option, } impl RightClickMenu { @@ -30,13 +29,13 @@ impl RightClickMenu { /// anchor defines which corner of the menu to anchor to the attachment point /// (by default the cursor position, but see attach) - pub fn anchor(mut self, anchor: AnchorCorner) -> Self { + pub fn anchor(mut self, anchor: Corner) -> Self { self.anchor = Some(anchor); self } /// attach defines which corner of the handle to attach the menu's anchor to - pub fn attach(mut self, attach: AnchorCorner) -> Self { + pub fn attach(mut self, attach: Corner) -> Self { self.attach = Some(attach); self } @@ -238,7 +237,7 @@ impl Element for RightClickMenu { *menu.borrow_mut() = Some(new_menu); *position.borrow_mut() = if let Some(child_bounds) = child_bounds { if let Some(attach) = attach { - attach.corner(child_bounds) + child_bounds.corner(attach) } else { cx.mouse_position() } diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index 7dbda4b3fb..dea8b3a396 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -236,12 +236,12 @@ impl Element for Scrollbar { let padded_bounds = if is_vertical { Bounds::from_corners( bounds.origin + point(Pixels::ZERO, extra_padding), - bounds.lower_right() - point(Pixels::ZERO, extra_padding * 3), + bounds.bottom_right() - point(Pixels::ZERO, extra_padding * 3), ) } else { Bounds::from_corners( bounds.origin + point(extra_padding, Pixels::ZERO), - bounds.lower_right() - point(extra_padding * 3, Pixels::ZERO), + bounds.bottom_right() - point(extra_padding * 3, Pixels::ZERO), ) }; diff --git a/crates/ui/src/components/stories/button.rs b/crates/ui/src/components/stories/button.rs index c3fcdc5ae9..7701a278c9 100644 --- a/crates/ui/src/components/stories/button.rs +++ b/crates/ui/src/components/stories/button.rs @@ -13,11 +13,11 @@ impl Render for ButtonStory { .child(Story::label("Default")) .child(Button::new("default_filled", "Click me")) .child(Story::label("Selected")) - .child(Button::new("selected_filled", "Click me").selected(true)) + .child(Button::new("selected_filled", "Click me").toggle_state(true)) .child(Story::label("Selected with `selected_label`")) .child( Button::new("selected_label_filled", "Click me") - .selected(true) + .toggle_state(true) .selected_label("I have been selected"), ) .child(Story::label("With `label_color`")) @@ -27,7 +27,7 @@ impl Render for ButtonStory { .child(Story::label("Selected with `icon`")) .child( Button::new("filled_and_selected_with_icon", "Click me") - .selected(true) + .toggle_state(true) .icon(IconName::FileGit), ) .child(Story::label("Default (Subtle)")) diff --git a/crates/ui/src/components/stories/context_menu.rs b/crates/ui/src/components/stories/context_menu.rs index e7218a6867..5b4f88457b 100644 --- a/crates/ui/src/components/stories/context_menu.rs +++ b/crates/ui/src/components/stories/context_menu.rs @@ -1,4 +1,4 @@ -use gpui::{actions, AnchorCorner, Render, View}; +use gpui::{actions, Corner, Render, View}; use story::Story; use crate::prelude::*; @@ -47,8 +47,8 @@ impl Render for ContextMenuStory { .child( right_click_menu("test1") .trigger(Label::new("BOTTOM LEFT")) - .anchor(AnchorCorner::BottomLeft) - .attach(AnchorCorner::TopLeft) + .anchor(Corner::BottomLeft) + .attach(Corner::TopLeft) .menu(move |cx| build_menu(cx, "bottom left")), ), ) @@ -60,14 +60,14 @@ impl Render for ContextMenuStory { .child( right_click_menu("test3") .trigger(Label::new("TOP RIGHT")) - .anchor(AnchorCorner::TopRight) + .anchor(Corner::TopRight) .menu(move |cx| build_menu(cx, "top right")), ) .child( right_click_menu("test4") .trigger(Label::new("BOTTOM RIGHT")) - .anchor(AnchorCorner::BottomRight) - .attach(AnchorCorner::TopRight) + .anchor(Corner::BottomRight) + .attach(Corner::TopRight) .menu(move |cx| build_menu(cx, "bottom right")), ), ) diff --git a/crates/ui/src/components/stories/icon_button.rs b/crates/ui/src/components/stories/icon_button.rs index 6d787e80b1..fb9b33e12b 100644 --- a/crates/ui/src/components/stories/icon_button.rs +++ b/crates/ui/src/components/stories/icon_button.rs @@ -21,7 +21,7 @@ impl Render for IconButtonStory { let selected_button = StoryItem::new( "Selected", - IconButton::new("selected_icon_button", IconName::Hash).selected(true), + IconButton::new("selected_icon_button", IconName::Hash).toggle_state(true), ) .description("Displays an icon button that is selected.") .usage( @@ -33,7 +33,7 @@ impl Render for IconButtonStory { let selected_with_selected_icon = StoryItem::new( "Selected with `selected_icon`", IconButton::new("selected_with_selected_icon_button", IconName::AudioOn) - .selected(true) + .toggle_state(true) .selected_icon(IconName::AudioOff), ) .description( @@ -89,7 +89,7 @@ impl Render for IconButtonStory { let selected_with_tooltip_button = StoryItem::new( "Selected with `tooltip`", IconButton::new("selected_with_tooltip_button", IconName::InlayHint) - .selected(true) + .toggle_state(true) .tooltip(|cx| Tooltip::text("Toggle inlay hints", cx)), ) .description("Displays a selected icon button with tooltip.") diff --git a/crates/ui/src/components/stories/tab.rs b/crates/ui/src/components/stories/tab.rs index eb0dd084b9..4f90268e6e 100644 --- a/crates/ui/src/components/stories/tab.rs +++ b/crates/ui/src/components/stories/tab.rs @@ -48,7 +48,7 @@ impl Render for TabStory { h_flex() .child( Tab::new("tab_1") - .selected(true) + .toggle_state(true) .position(TabPosition::First) .child("Tab 1"), ) @@ -85,7 +85,7 @@ impl Render for TabStory { .child( Tab::new("tab_4") .position(TabPosition::Last) - .selected(true) + .toggle_state(true) .child("Tab 4"), ), ) @@ -100,7 +100,7 @@ impl Render for TabStory { .child( Tab::new("tab_2") .position(TabPosition::Middle(Ordering::Equal)) - .selected(true) + .toggle_state(true) .child("Tab 2"), ) .child( diff --git a/crates/ui/src/components/stories/tab_bar.rs b/crates/ui/src/components/stories/tab_bar.rs index d6d42fa5e0..dbce893641 100644 --- a/crates/ui/src/components/stories/tab_bar.rs +++ b/crates/ui/src/components/stories/tab_bar.rs @@ -13,7 +13,7 @@ impl Render for TabBarStory { let tabs = (0..tab_count) .map(|index| { Tab::new(index) - .selected(index == selected_tab_index) + .toggle_state(index == selected_tab_index) .position(if index == 0 { TabPosition::First } else if index == tab_count - 1 { diff --git a/crates/ui/src/components/stories/toggle_button.rs b/crates/ui/src/components/stories/toggle_button.rs index 86f19d34c4..88523d2ce5 100644 --- a/crates/ui/src/components/stories/toggle_button.rs +++ b/crates/ui/src/components/stories/toggle_button.rs @@ -68,7 +68,7 @@ impl Render for ToggleButtonStory { ToggleButton::new(2, "Banana") .style(ButtonStyle::Filled) .size(ButtonSize::Large) - .selected(true) + .toggle_state(true) .middle(), ) .child( diff --git a/crates/ui/src/components/tab.rs b/crates/ui/src/components/tab.rs index e33fc732da..17962cdbc5 100644 --- a/crates/ui/src/components/tab.rs +++ b/crates/ui/src/components/tab.rs @@ -91,8 +91,8 @@ impl InteractiveElement for Tab { impl StatefulInteractiveElement for Tab {} -impl Selectable for Tab { - fn selected(mut self, selected: bool) -> Self { +impl Toggleable for Tab { + fn toggle_state(mut self, selected: bool) -> Self { self.selected = selected; self } diff --git a/crates/ui/src/components/table.rs b/crates/ui/src/components/table.rs index 0ef5eda7b7..796250947f 100644 --- a/crates/ui/src/components/table.rs +++ b/crates/ui/src/components/table.rs @@ -160,7 +160,7 @@ impl ComponentPreview for Table { ExampleLabelSide::Top } - fn examples(_: &WindowContext) -> Vec> { + fn examples(_: &mut WindowContext) -> Vec> { vec![ example_group(vec![ single_example( diff --git a/crates/ui/src/components/toggle.rs b/crates/ui/src/components/toggle.rs new file mode 100644 index 0000000000..45d0cb5bf7 --- /dev/null +++ b/crates/ui/src/components/toggle.rs @@ -0,0 +1,708 @@ +use gpui::{div, hsla, prelude::*, ElementId, Hsla, IntoElement, Styled, WindowContext}; +use std::sync::Arc; + +use crate::utils::is_light; +use crate::{prelude::*, ElevationIndex}; +use crate::{Color, Icon, IconName, ToggleState}; + +// TODO: Checkbox, CheckboxWithLabel, Switch, SwitchWithLabel all could be +// restructured to use a ToggleLike, similar to Button/Buttonlike, Label/Labellike + +/// Creates a new checkbox. +pub fn checkbox(id: impl Into, toggle_state: ToggleState) -> Checkbox { + Checkbox::new(id, toggle_state) +} + +/// Creates a new switch. +pub fn switch(id: impl Into, toggle_state: ToggleState) -> Switch { + Switch::new(id, toggle_state) +} + +/// The visual style of a toggle. +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub enum ToggleStyle { + /// Toggle has a transparent background + #[default] + Ghost, + /// Toggle has a filled background based on the + /// elevation index of the parent container + ElevationBased(ElevationIndex), + /// A custom style using a color to tint the toggle + Custom(Hsla), +} + +/// # Checkbox +/// +/// Checkboxes are used for multiple choices, not for mutually exclusive choices. +/// Each checkbox works independently from other checkboxes in the list, +/// therefore checking an additional box does not affect any other selections. +#[derive(IntoElement)] +pub struct Checkbox { + id: ElementId, + toggle_state: ToggleState, + disabled: bool, + on_click: Option>, + filled: bool, + style: ToggleStyle, +} + +impl Checkbox { + /// Creates a new [`Checkbox`]. + pub fn new(id: impl Into, checked: ToggleState) -> Self { + Self { + id: id.into(), + toggle_state: checked, + disabled: false, + on_click: None, + filled: false, + style: ToggleStyle::default(), + } + } + + /// Sets the disabled state of the [`Checkbox`]. + pub fn disabled(mut self, disabled: bool) -> Self { + self.disabled = disabled; + self + } + + /// Binds a handler to the [`Checkbox`] that will be called when clicked. + pub fn on_click( + mut self, + handler: impl Fn(&ToggleState, &mut WindowContext) + 'static, + ) -> Self { + self.on_click = Some(Box::new(handler)); + self + } + + /// Sets the `fill` setting of the checkbox, indicating whether it should be filled. + pub fn fill(mut self) -> Self { + self.filled = true; + self + } + + /// Sets the style of the checkbox using the specified [`ToggleStyle`]. + pub fn style(mut self, style: ToggleStyle) -> Self { + self.style = style; + self + } + + /// Match the style of the checkbox to the current elevation using [`ToggleStyle::ElevationBased`]. + pub fn elevation(mut self, elevation: ElevationIndex) -> Self { + self.style = ToggleStyle::ElevationBased(elevation); + self + } +} + +impl Checkbox { + fn bg_color(&self, cx: &WindowContext) -> Hsla { + let style = self.style.clone(); + match (style, self.filled) { + (ToggleStyle::Ghost, false) => cx.theme().colors().ghost_element_background, + (ToggleStyle::Ghost, true) => cx.theme().colors().element_background, + (ToggleStyle::ElevationBased(_), false) => gpui::transparent_black(), + (ToggleStyle::ElevationBased(elevation), true) => elevation.darker_bg(cx), + (ToggleStyle::Custom(_), false) => gpui::transparent_black(), + (ToggleStyle::Custom(color), true) => color.opacity(0.2), + } + } + + fn border_color(&self, cx: &WindowContext) -> Hsla { + if self.disabled { + return cx.theme().colors().border_disabled; + } + + match self.style.clone() { + ToggleStyle::Ghost => cx.theme().colors().border_variant, + ToggleStyle::ElevationBased(elevation) => elevation.on_elevation_bg(cx), + ToggleStyle::Custom(color) => color.opacity(0.3), + } + } +} + +impl RenderOnce for Checkbox { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let group_id = format!("checkbox_group_{:?}", self.id); + let icon = match self.toggle_state { + ToggleState::Selected => Some(Icon::new(IconName::Check).size(IconSize::Small).color( + if self.disabled { + Color::Disabled + } else { + Color::Selected + }, + )), + ToggleState::Indeterminate => Some( + Icon::new(IconName::Dash) + .size(IconSize::Small) + .color(if self.disabled { + Color::Disabled + } else { + Color::Selected + }), + ), + ToggleState::Unselected => None, + }; + + let bg_color = self.bg_color(cx); + let border_color = self.border_color(cx); + + h_flex() + .id(self.id) + .justify_center() + .items_center() + .size(DynamicSpacing::Base20.rems(cx)) + .group(group_id.clone()) + .child( + div() + .flex() + .flex_none() + .justify_center() + .items_center() + .m(DynamicSpacing::Base04.px(cx)) + .size(DynamicSpacing::Base16.rems(cx)) + .rounded_sm() + .bg(bg_color) + .border_1() + .border_color(border_color) + .when(!self.disabled, |this| { + this.group_hover(group_id.clone(), |el| { + el.bg(cx.theme().colors().element_hover) + }) + }) + .children(icon), + ) + .when_some( + self.on_click.filter(|_| !self.disabled), + |this, on_click| { + this.on_click(move |_, cx| on_click(&self.toggle_state.inverse(), cx)) + }, + ) + } +} + +/// A [`Checkbox`] that has a [`Label`]. +#[derive(IntoElement)] +pub struct CheckboxWithLabel { + id: ElementId, + label: Label, + checked: ToggleState, + on_click: Arc, + filled: bool, + style: ToggleStyle, +} + +impl CheckboxWithLabel { + /// Creates a checkbox with an attached label. + pub fn new( + id: impl Into, + label: Label, + checked: ToggleState, + on_click: impl Fn(&ToggleState, &mut WindowContext) + 'static, + ) -> Self { + Self { + id: id.into(), + label, + checked, + on_click: Arc::new(on_click), + filled: false, + style: ToggleStyle::default(), + } + } + + /// Sets the style of the checkbox using the specified [`ToggleStyle`]. + pub fn style(mut self, style: ToggleStyle) -> Self { + self.style = style; + self + } + + /// Match the style of the checkbox to the current elevation using [`ToggleStyle::ElevationBased`]. + pub fn elevation(mut self, elevation: ElevationIndex) -> Self { + self.style = ToggleStyle::ElevationBased(elevation); + self + } + + /// Sets the `fill` setting of the checkbox, indicating whether it should be filled. + pub fn fill(mut self) -> Self { + self.filled = true; + self + } +} + +impl RenderOnce for CheckboxWithLabel { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + h_flex() + .gap(DynamicSpacing::Base08.rems(cx)) + .child( + Checkbox::new(self.id.clone(), self.checked) + .style(self.style) + .when(self.filled, Checkbox::fill) + .on_click({ + let on_click = self.on_click.clone(); + move |checked, cx| { + (on_click)(checked, cx); + } + }), + ) + .child( + div() + .id(SharedString::from(format!("{}-label", self.id))) + .on_click(move |_event, cx| { + (self.on_click)(&self.checked.inverse(), cx); + }) + .child(self.label), + ) + } +} + +/// # Switch +/// +/// Switches are used to represent opposite states, such as enabled or disabled. +#[derive(IntoElement)] +pub struct Switch { + id: ElementId, + toggle_state: ToggleState, + disabled: bool, + on_click: Option>, +} + +impl Switch { + /// Creates a new [`Switch`]. + pub fn new(id: impl Into, state: ToggleState) -> Self { + Self { + id: id.into(), + toggle_state: state, + disabled: false, + on_click: None, + } + } + + /// Sets the disabled state of the [`Switch`]. + pub fn disabled(mut self, disabled: bool) -> Self { + self.disabled = disabled; + self + } + + /// Binds a handler to the [`Switch`] that will be called when clicked. + pub fn on_click( + mut self, + handler: impl Fn(&ToggleState, &mut WindowContext) + 'static, + ) -> Self { + self.on_click = Some(Box::new(handler)); + self + } +} + +impl RenderOnce for Switch { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let is_on = self.toggle_state == ToggleState::Selected; + let adjust_ratio = if is_light(cx) { 1.5 } else { 1.0 }; + let base_color = cx.theme().colors().text; + + let bg_color = if is_on { + cx.theme() + .colors() + .element_background + .blend(base_color.opacity(0.08)) + } else { + cx.theme().colors().element_background + }; + let thumb_color = base_color.opacity(0.8); + let thumb_hover_color = base_color; + let border_color = cx.theme().colors().border_variant; + // Lighter themes need higher contrast borders + let border_hover_color = if is_on { + border_color.blend(base_color.opacity(0.16 * adjust_ratio)) + } else { + border_color.blend(base_color.opacity(0.05 * adjust_ratio)) + }; + let thumb_opacity = match (is_on, self.disabled) { + (_, true) => 0.2, + (true, false) => 1.0, + (false, false) => 0.5, + }; + + let group_id = format!("switch_group_{:?}", self.id); + + h_flex() + .id(self.id) + .items_center() + .w(DynamicSpacing::Base32.rems(cx)) + .h(DynamicSpacing::Base20.rems(cx)) + .group(group_id.clone()) + .child( + h_flex() + .when(is_on, |on| on.justify_end()) + .when(!is_on, |off| off.justify_start()) + .items_center() + .size_full() + .rounded_full() + .px(DynamicSpacing::Base02.px(cx)) + .bg(bg_color) + .border_1() + .border_color(border_color) + .when(!self.disabled, |this| { + this.group_hover(group_id.clone(), |el| el.border_color(border_hover_color)) + }) + .child( + div() + .size(DynamicSpacing::Base12.rems(cx)) + .rounded_full() + .bg(thumb_color) + .when(!self.disabled, |this| { + this.group_hover(group_id.clone(), |el| el.bg(thumb_hover_color)) + }) + .opacity(thumb_opacity), + ), + ) + .when_some( + self.on_click.filter(|_| !self.disabled), + |this, on_click| { + this.on_click(move |_, cx| on_click(&self.toggle_state.inverse(), cx)) + }, + ) + } +} + +/// A [`Switch`] that has a [`Label`]. +#[derive(IntoElement)] +pub struct SwitchWithLabel { + id: ElementId, + label: Label, + checked: ToggleState, + on_click: Arc, +} + +impl SwitchWithLabel { + /// Creates a switch with an attached label. + pub fn new( + id: impl Into, + label: Label, + checked: ToggleState, + on_click: impl Fn(&ToggleState, &mut WindowContext) + 'static, + ) -> Self { + Self { + id: id.into(), + label, + checked, + on_click: Arc::new(on_click), + } + } +} + +impl RenderOnce for SwitchWithLabel { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + h_flex() + .gap(DynamicSpacing::Base08.rems(cx)) + .child(Switch::new(self.id.clone(), self.checked).on_click({ + let on_click = self.on_click.clone(); + move |checked, cx| { + (on_click)(checked, cx); + } + })) + .child( + div() + .id(SharedString::from(format!("{}-label", self.id))) + .on_click(move |_event, cx| { + (self.on_click)(&self.checked.inverse(), cx); + }) + .child(self.label), + ) + } +} + +impl ComponentPreview for Checkbox { + fn description() -> impl Into> { + "A checkbox lets people choose between a pair of opposing states, like enabled and disabled, using a different appearance to indicate each state." + } + + fn examples(_: &mut WindowContext) -> Vec> { + vec![ + example_group_with_title( + "Default", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_unselected", ToggleState::Unselected), + ), + single_example( + "Indeterminate", + Checkbox::new("checkbox_indeterminate", ToggleState::Indeterminate), + ), + single_example( + "Selected", + Checkbox::new("checkbox_selected", ToggleState::Selected), + ), + ], + ), + example_group_with_title( + "Default (Filled)", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_unselected", ToggleState::Unselected).fill(), + ), + single_example( + "Indeterminate", + Checkbox::new("checkbox_indeterminate", ToggleState::Indeterminate).fill(), + ), + single_example( + "Selected", + Checkbox::new("checkbox_selected", ToggleState::Selected).fill(), + ), + ], + ), + example_group_with_title( + "ElevationBased", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_unfilled_unselected", ToggleState::Unselected) + .style(ToggleStyle::ElevationBased(ElevationIndex::EditorSurface)), + ), + single_example( + "Indeterminate", + Checkbox::new( + "checkbox_unfilled_indeterminate", + ToggleState::Indeterminate, + ) + .style(ToggleStyle::ElevationBased(ElevationIndex::EditorSurface)), + ), + single_example( + "Selected", + Checkbox::new("checkbox_unfilled_selected", ToggleState::Selected) + .style(ToggleStyle::ElevationBased(ElevationIndex::EditorSurface)), + ), + ], + ), + example_group_with_title( + "ElevationBased (Filled)", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_filled_unselected", ToggleState::Unselected) + .fill() + .style(ToggleStyle::ElevationBased(ElevationIndex::EditorSurface)), + ), + single_example( + "Indeterminate", + Checkbox::new("checkbox_filled_indeterminate", ToggleState::Indeterminate) + .fill() + .style(ToggleStyle::ElevationBased(ElevationIndex::EditorSurface)), + ), + single_example( + "Selected", + Checkbox::new("checkbox_filled_selected", ToggleState::Selected) + .fill() + .style(ToggleStyle::ElevationBased(ElevationIndex::EditorSurface)), + ), + ], + ), + example_group_with_title( + "Custom Color", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_custom_unselected", ToggleState::Unselected) + .style(ToggleStyle::Custom(hsla(142.0 / 360., 0.68, 0.45, 0.7))), + ), + single_example( + "Indeterminate", + Checkbox::new("checkbox_custom_indeterminate", ToggleState::Indeterminate) + .style(ToggleStyle::Custom(hsla(142.0 / 360., 0.68, 0.45, 0.7))), + ), + single_example( + "Selected", + Checkbox::new("checkbox_custom_selected", ToggleState::Selected) + .style(ToggleStyle::Custom(hsla(142.0 / 360., 0.68, 0.45, 0.7))), + ), + ], + ), + example_group_with_title( + "Custom Color (Filled)", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_custom_filled_unselected", ToggleState::Unselected) + .fill() + .style(ToggleStyle::Custom(hsla(142.0 / 360., 0.68, 0.45, 0.7))), + ), + single_example( + "Indeterminate", + Checkbox::new( + "checkbox_custom_filled_indeterminate", + ToggleState::Indeterminate, + ) + .fill() + .style(ToggleStyle::Custom(hsla( + 142.0 / 360., + 0.68, + 0.45, + 0.7, + ))), + ), + single_example( + "Selected", + Checkbox::new("checkbox_custom_filled_selected", ToggleState::Selected) + .fill() + .style(ToggleStyle::Custom(hsla(142.0 / 360., 0.68, 0.45, 0.7))), + ), + ], + ), + example_group_with_title( + "Disabled", + vec![ + single_example( + "Unselected", + Checkbox::new("checkbox_disabled_unselected", ToggleState::Unselected) + .disabled(true), + ), + single_example( + "Indeterminate", + Checkbox::new( + "checkbox_disabled_indeterminate", + ToggleState::Indeterminate, + ) + .disabled(true), + ), + single_example( + "Selected", + Checkbox::new("checkbox_disabled_selected", ToggleState::Selected) + .disabled(true), + ), + ], + ), + example_group_with_title( + "Disabled (Filled)", + vec![ + single_example( + "Unselected", + Checkbox::new( + "checkbox_disabled_filled_unselected", + ToggleState::Unselected, + ) + .fill() + .disabled(true), + ), + single_example( + "Indeterminate", + Checkbox::new( + "checkbox_disabled_filled_indeterminate", + ToggleState::Indeterminate, + ) + .fill() + .disabled(true), + ), + single_example( + "Selected", + Checkbox::new("checkbox_disabled_filled_selected", ToggleState::Selected) + .fill() + .disabled(true), + ), + ], + ), + ] + } +} + +impl ComponentPreview for Switch { + fn description() -> impl Into> { + "A switch toggles between two mutually exclusive states, typically used for enabling or disabling a setting." + } + + fn examples(_cx: &mut WindowContext) -> Vec> { + vec![ + example_group_with_title( + "Default", + vec![ + single_example( + "Off", + Switch::new("switch_off", ToggleState::Unselected).on_click(|_, _cx| {}), + ), + single_example( + "On", + Switch::new("switch_on", ToggleState::Selected).on_click(|_, _cx| {}), + ), + ], + ), + example_group_with_title( + "Disabled", + vec![ + single_example( + "Off", + Switch::new("switch_disabled_off", ToggleState::Unselected).disabled(true), + ), + single_example( + "On", + Switch::new("switch_disabled_on", ToggleState::Selected).disabled(true), + ), + ], + ), + ] + } +} + +impl ComponentPreview for CheckboxWithLabel { + fn description() -> impl Into> { + "A checkbox with an associated label, allowing users to select an option while providing a descriptive text." + } + + fn examples(_: &mut WindowContext) -> Vec> { + vec![example_group(vec![ + single_example( + "Unselected", + CheckboxWithLabel::new( + "checkbox_with_label_unselected", + Label::new("Always save on quit"), + ToggleState::Unselected, + |_, _| {}, + ), + ), + single_example( + "Indeterminate", + CheckboxWithLabel::new( + "checkbox_with_label_indeterminate", + Label::new("Always save on quit"), + ToggleState::Indeterminate, + |_, _| {}, + ), + ), + single_example( + "Selected", + CheckboxWithLabel::new( + "checkbox_with_label_selected", + Label::new("Always save on quit"), + ToggleState::Selected, + |_, _| {}, + ), + ), + ])] + } +} + +impl ComponentPreview for SwitchWithLabel { + fn description() -> impl Into> { + "A switch with an associated label, allowing users to select an option while providing a descriptive text." + } + + fn examples(_: &mut WindowContext) -> Vec> { + vec![example_group(vec![ + single_example( + "Off", + SwitchWithLabel::new( + "switch_with_label_unselected", + Label::new("Always save on quit"), + ToggleState::Unselected, + |_, _| {}, + ), + ), + single_example( + "On", + SwitchWithLabel::new( + "switch_with_label_selected", + Label::new("Always save on quit"), + ToggleState::Selected, + |_, _| {}, + ), + ), + ])] + } +} diff --git a/crates/ui/src/components/tool_strip.rs b/crates/ui/src/components/tool_strip.rs index 707c5ed06d..ff08fda445 100644 --- a/crates/ui/src/components/tool_strip.rs +++ b/crates/ui/src/components/tool_strip.rs @@ -1,7 +1,8 @@ #![allow(missing_docs)] +use gpui::Axis; + use crate::prelude::*; -use gpui::*; #[derive(IntoElement)] pub struct ToolStrip { diff --git a/crates/ui/src/components/tooltip.rs b/crates/ui/src/components/tooltip.rs index 89b89786b0..e8bb8cfb2c 100644 --- a/crates/ui/src/components/tooltip.rs +++ b/crates/ui/src/components/tooltip.rs @@ -49,6 +49,7 @@ impl Tooltip { }) .into() } + pub fn with_meta( title: impl Into, action: Option<&dyn Action>, @@ -63,6 +64,22 @@ impl Tooltip { .into() } + pub fn with_meta_in( + title: impl Into, + action: Option<&dyn Action>, + meta: impl Into, + focus_handle: &FocusHandle, + cx: &mut WindowContext, + ) -> AnyView { + cx.new_view(|cx| Self { + title: title.into(), + meta: Some(meta.into()), + key_binding: action + .and_then(|action| KeyBinding::for_action_in(action, focus_handle, cx)), + }) + .into() + } + pub fn new(title: impl Into) -> Self { Self { title: title.into(), @@ -88,7 +105,7 @@ impl Render for Tooltip { el.child( h_flex() .gap_4() - .child(self.title.clone()) + .child(div().max_w_72().child(self.title.clone())) .when_some(self.key_binding.clone(), |this, key_binding| { this.justify_between().child(key_binding) }), diff --git a/crates/ui/src/prelude.rs b/crates/ui/src/prelude.rs index e763d0b663..887070f50a 100644 --- a/crates/ui/src/prelude.rs +++ b/crates/ui/src/prelude.rs @@ -12,8 +12,8 @@ pub use crate::traits::clickable::*; pub use crate::traits::component_preview::*; pub use crate::traits::disableable::*; pub use crate::traits::fixed::*; -pub use crate::traits::selectable::*; pub use crate::traits::styled_ext::*; +pub use crate::traits::toggleable::*; pub use crate::traits::visible_on_hover::*; pub use crate::DynamicSpacing; pub use crate::{h_flex, h_group, v_flex, v_group}; diff --git a/crates/ui/src/styles/elevation.rs b/crates/ui/src/styles/elevation.rs index 932fd3a944..913d0e812e 100644 --- a/crates/ui/src/styles/elevation.rs +++ b/crates/ui/src/styles/elevation.rs @@ -22,12 +22,8 @@ pub enum ElevationIndex { EditorSurface, /// A surface that is elevated above the primary surface. but below washes, models, and dragged elements. ElevatedSurface, - /// A surface that is above all non-modal surfaces, and separates the app from focused intents, like dialogs, alerts, modals, etc. - Wash, /// A surface above the [ElevationIndex::Wash] that is used for dialogs, alerts, modals, etc. ModalSurface, - /// A surface above all other surfaces, reserved exclusively for dragged elements, like a dragged file, tab or other draggable element. - DraggedElement, } impl Display for ElevationIndex { @@ -37,9 +33,7 @@ impl Display for ElevationIndex { ElevationIndex::Surface => write!(f, "Surface"), ElevationIndex::EditorSurface => write!(f, "Editor Surface"), ElevationIndex::ElevatedSurface => write!(f, "Elevated Surface"), - ElevationIndex::Wash => write!(f, "Wash"), ElevationIndex::ModalSurface => write!(f, "Modal Surface"), - ElevationIndex::DraggedElement => write!(f, "Dragged Element"), } } } @@ -90,9 +84,31 @@ impl ElevationIndex { ElevationIndex::Surface => cx.theme().colors().surface_background, ElevationIndex::EditorSurface => cx.theme().colors().editor_background, ElevationIndex::ElevatedSurface => cx.theme().colors().elevated_surface_background, - ElevationIndex::Wash => gpui::transparent_black(), ElevationIndex::ModalSurface => cx.theme().colors().elevated_surface_background, - ElevationIndex::DraggedElement => gpui::transparent_black(), + } + } + + /// Returns a color that is appropriate a filled element on this elevation + pub fn on_elevation_bg(&self, cx: &WindowContext) -> Hsla { + match self { + ElevationIndex::Background => cx.theme().colors().surface_background, + ElevationIndex::Surface => cx.theme().colors().background, + ElevationIndex::EditorSurface => cx.theme().colors().surface_background, + ElevationIndex::ElevatedSurface => cx.theme().colors().background, + ElevationIndex::ModalSurface => cx.theme().colors().background, + } + } + + /// Attempts to return a darker background color than the current elevation index's background. + /// + /// If the current background color is already dark, it will return a lighter color instead. + pub fn darker_bg(&self, cx: &WindowContext) -> Hsla { + match self { + ElevationIndex::Background => cx.theme().colors().surface_background, + ElevationIndex::Surface => cx.theme().colors().editor_background, + ElevationIndex::EditorSurface => cx.theme().colors().surface_background, + ElevationIndex::ElevatedSurface => cx.theme().colors().editor_background, + ElevationIndex::ModalSurface => cx.theme().colors().editor_background, } } } diff --git a/crates/ui/src/traits.rs b/crates/ui/src/traits.rs index 0898375e96..1b4d761711 100644 --- a/crates/ui/src/traits.rs +++ b/crates/ui/src/traits.rs @@ -2,6 +2,6 @@ pub mod clickable; pub mod component_preview; pub mod disableable; pub mod fixed; -pub mod selectable; pub mod styled_ext; +pub mod toggleable; pub mod visible_on_hover; diff --git a/crates/ui/src/traits/component_preview.rs b/crates/ui/src/traits/component_preview.rs index eefc1e8228..aab01355a1 100644 --- a/crates/ui/src/traits/component_preview.rs +++ b/crates/ui/src/traits/component_preview.rs @@ -30,20 +30,20 @@ pub trait ComponentPreview: IntoElement { ExampleLabelSide::default() } - fn examples(_cx: &WindowContext) -> Vec>; + fn examples(_cx: &mut WindowContext) -> Vec>; fn custom_example(_cx: &WindowContext) -> impl Into> { None:: } - fn component_previews(cx: &WindowContext) -> Vec { + fn component_previews(cx: &mut WindowContext) -> Vec { Self::examples(cx) .into_iter() .map(|example| Self::render_example_group(example)) .collect() } - fn render_component_previews(cx: &WindowContext) -> AnyElement { + fn render_component_previews(cx: &mut WindowContext) -> AnyElement { let title = Self::title(); let (source, title) = title .rsplit_once("::") diff --git a/crates/ui/src/traits/selectable.rs b/crates/ui/src/traits/toggleable.rs similarity index 74% rename from crates/ui/src/traits/selectable.rs rename to crates/ui/src/traits/toggleable.rs index 342a16a89e..2da5a64ad4 100644 --- a/crates/ui/src/traits/selectable.rs +++ b/crates/ui/src/traits/toggleable.rs @@ -1,14 +1,15 @@ -/// A trait for elements that can be selected. +/// A trait for elements that can be toggled. /// -/// Generally used to enable "toggle" or "active" behavior and styles on an element through the [`Selection`] status. -pub trait Selectable { +/// Implement this for elements that are visually distinct +/// when in two opposing states, like checkboxes or switches. +pub trait Toggleable { /// Sets whether the element is selected. - fn selected(self, selected: bool) -> Self; + fn toggle_state(self, selected: bool) -> Self; } /// Represents the selection status of an element. #[derive(Debug, Default, PartialEq, Eq, Hash, Clone, Copy)] -pub enum Selection { +pub enum ToggleState { /// The element is not selected. #[default] Unselected, @@ -18,7 +19,7 @@ pub enum Selection { Selected, } -impl Selection { +impl ToggleState { /// Returns the inverse of the current selection status. /// /// Indeterminate states become selected if inverted. @@ -30,7 +31,7 @@ impl Selection { } } -impl From for Selection { +impl From for ToggleState { fn from(selected: bool) -> Self { if selected { Self::Selected @@ -40,7 +41,7 @@ impl From for Selection { } } -impl From> for Selection { +impl From> for ToggleState { fn from(selected: Option) -> Self { match selected { Some(true) => Self::Selected, diff --git a/crates/ui/src/utils.rs b/crates/ui/src/utils.rs index 25477194dc..604ff7f4bf 100644 --- a/crates/ui/src/utils.rs +++ b/crates/ui/src/utils.rs @@ -1,9 +1,19 @@ //! UI-related utilities +use gpui::WindowContext; +use theme::ActiveTheme; + mod color_contrast; mod format_distance; +mod search_input; mod with_rem_size; pub use color_contrast::*; pub use format_distance::*; +pub use search_input::*; pub use with_rem_size::*; + +/// Returns true if the current theme is light or vibrant light. +pub fn is_light(cx: &WindowContext) -> bool { + cx.theme().appearance.is_light() +} diff --git a/crates/ui/src/utils/search_input.rs b/crates/ui/src/utils/search_input.rs new file mode 100644 index 0000000000..3a507f9a5a --- /dev/null +++ b/crates/ui/src/utils/search_input.rs @@ -0,0 +1,22 @@ +#![allow(missing_docs)] + +use gpui::Pixels; + +pub struct SearchInputWidth; + +impl SearchInputWidth { + /// The containzer size in which the input stops filling the whole width. + pub const THRESHOLD_WIDTH: f32 = 1200.0; + + /// The maximum width for the search input when the container is larger than the threshold. + pub const MAX_WIDTH: f32 = 1200.0; + + /// Calculates the actual width in pixels based on the container width. + pub fn calc_width(container_width: Pixels) -> Pixels { + if container_width.0 < Self::THRESHOLD_WIDTH { + container_width + } else { + Pixels(container_width.0.min(Self::MAX_WIDTH)) + } + } +} diff --git a/crates/ui_input/src/ui_input.rs b/crates/ui_input/src/ui_input.rs index b182e617e6..04e2ccb858 100644 --- a/crates/ui_input/src/ui_input.rs +++ b/crates/ui_input/src/ui_input.rs @@ -5,11 +5,11 @@ //! It can't be located in the `ui` crate because it depends on `editor`. //! -use editor::*; -use gpui::*; +use editor::{Editor, EditorElement, EditorStyle}; +use gpui::{AppContext, FocusHandle, FocusableView, FontStyle, Hsla, TextStyle, View}; use settings::Settings; use theme::ThemeSettings; -use ui::*; +use ui::prelude::*; #[derive(Debug, Clone, Copy, PartialEq)] pub enum FieldLabelLayout { diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 58c4686bf9..f993bb6eff 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -24,18 +24,24 @@ futures-lite.workspace = true futures.workspace = true git2 = { workspace = true, optional = true } globset.workspace = true +itertools.workspace = true log.workspace = true rand = { workspace = true, optional = true } regex.workspace = true rust-embed.workspace = true serde.workspace = true serde_json.workspace = true +smol.workspace = true take-until = "0.2.0" tempfile = { workspace = true, optional = true } unicase.workspace = true +[target.'cfg(unix)'.dependencies] +libc.workspace = true + [target.'cfg(windows)'.dependencies] tendril = "0.4.3" +dunce = "1.0" [dev-dependencies] git2.workspace = true diff --git a/crates/util/src/command.rs b/crates/util/src/command.rs new file mode 100644 index 0000000000..85e2234991 --- /dev/null +++ b/crates/util/src/command.rs @@ -0,0 +1,32 @@ +use std::ffi::OsStr; + +#[cfg(target_os = "windows")] +const CREATE_NO_WINDOW: u32 = 0x0800_0000_u32; + +#[cfg(target_os = "windows")] +pub fn new_std_command(program: impl AsRef) -> std::process::Command { + use std::os::windows::process::CommandExt; + + let mut command = std::process::Command::new(program); + command.creation_flags(CREATE_NO_WINDOW); + command +} + +#[cfg(not(target_os = "windows"))] +pub fn new_std_command(program: impl AsRef) -> std::process::Command { + std::process::Command::new(program) +} + +#[cfg(target_os = "windows")] +pub fn new_smol_command(program: impl AsRef) -> smol::process::Command { + use smol::process::windows::CommandExt; + + let mut command = smol::process::Command::new(program); + command.creation_flags(CREATE_NO_WINDOW); + command +} + +#[cfg(not(target_os = "windows"))] +pub fn new_smol_command(program: impl AsRef) -> smol::process::Command { + smol::process::Command::new(program) +} diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index d629c8facc..e3b0af1fdb 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -1,5 +1,5 @@ use std::cmp; -use std::sync::OnceLock; +use std::sync::{Arc, OnceLock}; use std::{ ffi::OsStr, path::{Path, PathBuf}, @@ -95,6 +95,46 @@ impl> PathExt for T { } } +/// Due to the issue of UNC paths on Windows, which can cause bugs in various parts of Zed, introducing this `SanitizedPath` +/// leverages Rust's type system to ensure that all paths entering Zed are always "sanitized" by removing the `\\\\?\\` prefix. +/// On non-Windows operating systems, this struct is effectively a no-op. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SanitizedPath(Arc); + +impl SanitizedPath { + pub fn starts_with(&self, prefix: &SanitizedPath) -> bool { + self.0.starts_with(&prefix.0) + } + + pub fn as_path(&self) -> &Arc { + &self.0 + } + + pub fn to_string(&self) -> String { + self.0.to_string_lossy().to_string() + } +} + +impl From for Arc { + fn from(sanitized_path: SanitizedPath) -> Self { + sanitized_path.0 + } +} + +impl> From for SanitizedPath { + #[cfg(not(target_os = "windows"))] + fn from(path: T) -> Self { + let path = path.as_ref(); + SanitizedPath(path.into()) + } + + #[cfg(target_os = "windows")] + fn from(path: T) -> Self { + let path = path.as_ref(); + SanitizedPath(dunce::simplified(path).into()) + } +} + /// A delimiter to use in `path_query:row_number:column_number` strings parsing. pub const FILE_ROW_COLUMN_DELIMITER: char = ':'; @@ -378,7 +418,15 @@ pub fn compare_paths( .as_deref() .map(NumericPrefixWithSuffix::from_numeric_prefixed_str); - num_and_remainder_a.cmp(&num_and_remainder_b) + num_and_remainder_a.cmp(&num_and_remainder_b).then_with(|| { + if a_is_file && b_is_file { + let ext_a = path_a.extension().unwrap_or_default(); + let ext_b = path_b.extension().unwrap_or_default(); + ext_a.cmp(ext_b) + } else { + cmp::Ordering::Equal + } + }) }); if !ordering.is_eq() { return ordering; @@ -433,6 +481,28 @@ mod tests { ); } + #[test] + fn compare_paths_with_same_name_different_extensions() { + let mut paths = vec![ + (Path::new("test_dirs/file.rs"), true), + (Path::new("test_dirs/file.txt"), true), + (Path::new("test_dirs/file.md"), true), + (Path::new("test_dirs/file"), true), + (Path::new("test_dirs/file.a"), true), + ]; + paths.sort_by(|&a, &b| compare_paths(a, b)); + assert_eq!( + paths, + vec![ + (Path::new("test_dirs/file"), true), + (Path::new("test_dirs/file.a"), true), + (Path::new("test_dirs/file.md"), true), + (Path::new("test_dirs/file.rs"), true), + (Path::new("test_dirs/file.txt"), true), + ] + ); + } + #[test] fn compare_paths_case_semi_sensitive() { let mut paths = vec![ @@ -775,4 +845,22 @@ mod tests { "Path matcher should match {path:?}" ); } + + #[test] + #[cfg(target_os = "windows")] + fn test_sanitized_path() { + let path = Path::new("C:\\Users\\someone\\test_file.rs"); + let sanitized_path = SanitizedPath::from(path); + assert_eq!( + sanitized_path.to_string(), + "C:\\Users\\someone\\test_file.rs" + ); + + let path = Path::new("\\\\?\\C:\\Users\\someone\\test_file.rs"); + let sanitized_path = SanitizedPath::from(path); + assert_eq!( + sanitized_path.to_string(), + "C:\\Users\\someone\\test_file.rs" + ); + } } diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index e27fd65ac7..6c6196756c 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -1,14 +1,16 @@ pub mod arc_cow; +pub mod command; pub mod fs; pub mod paths; pub mod serde; #[cfg(any(test, feature = "test-support"))] pub mod test; +use anyhow::Result; use futures::Future; - +use itertools::Either; use regex::Regex; -use std::sync::OnceLock; +use std::sync::{LazyLock, OnceLock}; use std::{ borrow::Cow, cmp::{self, Ordering}, @@ -21,6 +23,9 @@ use std::{ }; use unicase::UniCase; +#[cfg(unix)] +use anyhow::{anyhow, Context as _}; + pub use take_until::*; #[macro_export] @@ -47,10 +52,15 @@ pub fn truncate(s: &str, max_chars: usize) -> &str { pub fn truncate_and_trailoff(s: &str, max_chars: usize) -> String { debug_assert!(max_chars >= 5); + // If the string's byte length is <= max_chars, walking the string can be skipped since the + // number of chars is <= the number of bytes. + if s.len() <= max_chars { + return s.to_string(); + } let truncation_ix = s.char_indices().map(|(i, _)| i).nth(max_chars); match truncation_ix { - Some(length) => s[..length].to_string() + "…", - None => s.to_string(), + Some(index) => s[..index].to_string() + "…", + _ => s.to_string(), } } @@ -59,10 +69,19 @@ pub fn truncate_and_trailoff(s: &str, max_chars: usize) -> String { pub fn truncate_and_remove_front(s: &str, max_chars: usize) -> String { debug_assert!(max_chars >= 5); - let truncation_ix = s.char_indices().map(|(i, _)| i).nth_back(max_chars); + // If the string's byte length is <= max_chars, walking the string can be skipped since the + // number of chars is <= the number of bytes. + if s.len() <= max_chars { + return s.to_string(); + } + let suffix_char_length = max_chars.saturating_sub(1); + let truncation_ix = s + .char_indices() + .map(|(i, _)| i) + .nth_back(suffix_char_length); match truncation_ix { - Some(length) => "…".to_string() + &s[length..], - None => s.to_string(), + Some(index) if index > 0 => "…".to_string() + &s[index..], + _ => s.to_string(), } } @@ -108,6 +127,129 @@ where } } +pub fn truncate_to_bottom_n_sorted_by(items: &mut Vec, limit: usize, compare: &F) +where + F: Fn(&T, &T) -> Ordering, +{ + if limit == 0 { + items.truncate(0); + } + if items.len() <= limit { + items.sort_by(compare); + return; + } + // When limit is near to items.len() it may be more efficient to sort the whole list and + // truncate, rather than always doing selection first as is done below. It's hard to analyze + // where the threshold for this should be since the quickselect style algorithm used by + // `select_nth_unstable_by` makes the prefix partially sorted, and so its work is not wasted - + // the expected number of comparisons needed by `sort_by` is less than it is for some arbitrary + // unsorted input. + items.select_nth_unstable_by(limit, compare); + items.truncate(limit); + items.sort_by(compare); +} + +#[cfg(unix)] +pub fn load_shell_from_passwd() -> Result<()> { + let buflen = match unsafe { libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) } { + n if n < 0 => 1024, + n => n as usize, + }; + let mut buffer = Vec::with_capacity(buflen); + + let mut pwd: std::mem::MaybeUninit = std::mem::MaybeUninit::uninit(); + let mut result: *mut libc::passwd = std::ptr::null_mut(); + + let uid = unsafe { libc::getuid() }; + let status = unsafe { + libc::getpwuid_r( + uid, + pwd.as_mut_ptr(), + buffer.as_mut_ptr() as *mut libc::c_char, + buflen, + &mut result, + ) + }; + let entry = unsafe { pwd.assume_init() }; + + anyhow::ensure!( + status == 0, + "call to getpwuid_r failed. uid: {}, status: {}", + uid, + status + ); + anyhow::ensure!(!result.is_null(), "passwd entry for uid {} not found", uid); + anyhow::ensure!( + entry.pw_uid == uid, + "passwd entry has different uid ({}) than getuid ({}) returned", + entry.pw_uid, + uid, + ); + + let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() }; + if env::var("SHELL").map_or(true, |shell_env| shell_env != shell) { + log::info!( + "updating SHELL environment variable to value from passwd entry: {:?}", + shell, + ); + env::set_var("SHELL", shell); + } + + Ok(()) +} + +#[cfg(unix)] +pub fn load_login_shell_environment() -> Result<()> { + let marker = "ZED_LOGIN_SHELL_START"; + let shell = env::var("SHELL").context( + "SHELL environment variable is not assigned so we can't source login environment variables", + )?; + + // If possible, we want to `cd` in the user's `$HOME` to trigger programs + // such as direnv, asdf, mise, ... to adjust the PATH. These tools often hook + // into shell's `cd` command (and hooks) to manipulate env. + // We do this so that we get the env a user would have when spawning a shell + // in home directory. + let shell_cmd_prefix = std::env::var_os("HOME") + .and_then(|home| home.into_string().ok()) + .map(|home| format!("cd '{home}';")); + + // The `exit 0` is the result of hours of debugging, trying to find out + // why running this command here, without `exit 0`, would mess + // up signal process for our process so that `ctrl-c` doesn't work + // anymore. + // We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would + // do that, but it does, and `exit 0` helps. + let shell_cmd = format!( + "{}printf '%s' {marker}; /usr/bin/env; exit 0;", + shell_cmd_prefix.as_deref().unwrap_or("") + ); + + let output = std::process::Command::new(&shell) + .args(["-l", "-i", "-c", &shell_cmd]) + .output() + .context("failed to spawn login shell to source login environment variables")?; + if !output.status.success() { + Err(anyhow!("login shell exited with error"))?; + } + + let stdout = String::from_utf8_lossy(&output.stdout); + + if let Some(env_output_start) = stdout.find(marker) { + let env_output = &stdout[env_output_start + marker.len()..]; + + parse_env_output(env_output, |key, value| env::set_var(key, value)); + + log::info!( + "set environment variables from shell:{}, path:{}", + shell, + env::var("PATH").unwrap_or_default(), + ); + } + + Ok(()) +} + /// Parse the result of calling `usr/bin/env` with no arguments pub fn parse_env_output(env: &str, mut f: impl FnMut(String, String)) { let mut current_key: Option = None; @@ -148,6 +290,12 @@ pub fn merge_json_value_into(source: serde_json::Value, target: &mut serde_json: } } + (Value::Array(source), Value::Array(target)) => { + for value in source { + target.push(value); + } + } + (source, target) => *target = source, } } @@ -192,6 +340,35 @@ pub fn measure(label: &str, f: impl FnOnce() -> R) -> R { } } +pub fn iterate_expanded_and_wrapped_usize_range( + range: Range, + additional_before: usize, + additional_after: usize, + wrap_length: usize, +) -> impl Iterator { + let start_wraps = range.start < additional_before; + let end_wraps = wrap_length < range.end + additional_after; + if start_wraps && end_wraps { + Either::Left(0..wrap_length) + } else if start_wraps { + let wrapped_start = (range.start + wrap_length).saturating_sub(additional_before); + if wrapped_start <= range.end { + Either::Left(0..wrap_length) + } else { + Either::Right((0..range.end + additional_after).chain(wrapped_start..wrap_length)) + } + } else if end_wraps { + let wrapped_end = range.end + additional_after - wrap_length; + if range.start <= wrapped_end { + Either::Left(0..wrap_length) + } else { + Either::Right((0..wrapped_end).chain(range.start - additional_before..wrap_length)) + } + } else { + Either::Left((range.start - additional_before)..(range.end + additional_after)) + } +} + pub trait ResultExt { type Ok; @@ -199,6 +376,9 @@ pub trait ResultExt { /// Assert that this result should never be an error in development or tests. fn debug_assert_ok(self, reason: &str) -> Self; fn warn_on_err(self) -> Option; + fn anyhow(self) -> anyhow::Result + where + E: Into; } impl ResultExt for Result @@ -236,6 +416,13 @@ where } } } + + fn anyhow(self) -> anyhow::Result + where + E: Into, + { + self.map_err(Into::into) + } } fn log_error_with_caller(caller: core::panic::Location<'_>, error: E, level: log::Level) @@ -262,6 +449,10 @@ where ); } +pub fn log_err(error: &E) { + log_error_with_caller(*Location::caller(), error, log::Level::Warn); +} + pub trait TryFutureExt { fn log_err(self) -> LogErrorFuture where @@ -550,8 +741,9 @@ impl<'a> PartialOrd for NumericPrefixWithSuffix<'a> { } fn emoji_regex() -> &'static Regex { - static EMOJI_REGEX: OnceLock = OnceLock::new(); - EMOJI_REGEX.get_or_init(|| Regex::new("(\\p{Emoji}|\u{200D})").unwrap()) + static EMOJI_REGEX: LazyLock = + LazyLock::new(|| Regex::new("(\\p{Emoji}|\u{200D})").unwrap()); + &EMOJI_REGEX } /// Returns true if the given string consists of emojis only. @@ -585,6 +777,29 @@ mod tests { assert_eq!(vec, &[1000, 101, 21, 19, 17, 13, 9, 8]); } + #[test] + fn test_truncate_to_bottom_n_sorted_by() { + let mut vec: Vec = vec![5, 2, 3, 4, 1]; + truncate_to_bottom_n_sorted_by(&mut vec, 10, &u32::cmp); + assert_eq!(vec, &[1, 2, 3, 4, 5]); + + vec = vec![5, 2, 3, 4, 1]; + truncate_to_bottom_n_sorted_by(&mut vec, 5, &u32::cmp); + assert_eq!(vec, &[1, 2, 3, 4, 5]); + + vec = vec![5, 2, 3, 4, 1]; + truncate_to_bottom_n_sorted_by(&mut vec, 4, &u32::cmp); + assert_eq!(vec, &[1, 2, 3, 4]); + + vec = vec![5, 2, 3, 4, 1]; + truncate_to_bottom_n_sorted_by(&mut vec, 1, &u32::cmp); + assert_eq!(vec, &[1]); + + vec = vec![5, 2, 3, 4, 1]; + truncate_to_bottom_n_sorted_by(&mut vec, 0, &u32::cmp); + assert!(vec.is_empty()); + } + #[test] fn test_iife() { fn option_returning_function() -> Option<()> { @@ -602,11 +817,25 @@ mod tests { #[test] fn test_truncate_and_trailoff() { assert_eq!(truncate_and_trailoff("", 5), ""); + assert_eq!(truncate_and_trailoff("aaaaaa", 7), "aaaaaa"); + assert_eq!(truncate_and_trailoff("aaaaaa", 6), "aaaaaa"); + assert_eq!(truncate_and_trailoff("aaaaaa", 5), "aaaaa…"); assert_eq!(truncate_and_trailoff("èèèèèè", 7), "èèèèèè"); assert_eq!(truncate_and_trailoff("èèèèèè", 6), "èèèèèè"); assert_eq!(truncate_and_trailoff("èèèèèè", 5), "èèèèè…"); } + #[test] + fn test_truncate_and_remove_front() { + assert_eq!(truncate_and_remove_front("", 5), ""); + assert_eq!(truncate_and_remove_front("aaaaaa", 7), "aaaaaa"); + assert_eq!(truncate_and_remove_front("aaaaaa", 6), "aaaaaa"); + assert_eq!(truncate_and_remove_front("aaaaaa", 5), "…aaaaa"); + assert_eq!(truncate_and_remove_front("èèèèèè", 7), "èèèèèè"); + assert_eq!(truncate_and_remove_front("èèèèèè", 6), "èèèèèè"); + assert_eq!(truncate_and_remove_front("èèèèèè", 5), "…èèèèè"); + } + #[test] fn test_numeric_prefix_str_method() { let target = "1a"; @@ -716,4 +945,48 @@ Line 2 Line 3"# ); } + + #[test] + fn test_iterate_expanded_and_wrapped_usize_range() { + // Neither wrap + assert_eq!( + iterate_expanded_and_wrapped_usize_range(2..4, 1, 1, 8).collect::>(), + (1..5).collect::>() + ); + // Start wraps + assert_eq!( + iterate_expanded_and_wrapped_usize_range(2..4, 3, 1, 8).collect::>(), + ((0..5).chain(7..8)).collect::>() + ); + // Start wraps all the way around + assert_eq!( + iterate_expanded_and_wrapped_usize_range(2..4, 5, 1, 8).collect::>(), + (0..8).collect::>() + ); + // Start wraps all the way around and past 0 + assert_eq!( + iterate_expanded_and_wrapped_usize_range(2..4, 10, 1, 8).collect::>(), + (0..8).collect::>() + ); + // End wraps + assert_eq!( + iterate_expanded_and_wrapped_usize_range(3..5, 1, 4, 8).collect::>(), + (0..1).chain(2..8).collect::>() + ); + // End wraps all the way around + assert_eq!( + iterate_expanded_and_wrapped_usize_range(3..5, 1, 5, 8).collect::>(), + (0..8).collect::>() + ); + // End wraps all the way around and past the end + assert_eq!( + iterate_expanded_and_wrapped_usize_range(3..5, 1, 10, 8).collect::>(), + (0..8).collect::>() + ); + // Both start and end wrap + assert_eq!( + iterate_expanded_and_wrapped_usize_range(3..5, 4, 4, 8).collect::>(), + (0..8).collect::>() + ); + } } diff --git a/crates/vcs_menu/Cargo.toml b/crates/vcs_menu/Cargo.toml index 11de371868..47bf3d8984 100644 --- a/crates/vcs_menu/Cargo.toml +++ b/crates/vcs_menu/Cargo.toml @@ -18,3 +18,4 @@ project.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true +zed_actions.workspace = true diff --git a/crates/vcs_menu/src/lib.rs b/crates/vcs_menu/src/lib.rs index f165c91bfe..5dc02cc7c2 100644 --- a/crates/vcs_menu/src/lib.rs +++ b/crates/vcs_menu/src/lib.rs @@ -2,10 +2,9 @@ use anyhow::{anyhow, Context, Result}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::repository::Branch; use gpui::{ - actions, rems, AnyElement, AppContext, AsyncAppContext, DismissEvent, EventEmitter, - FocusHandle, FocusableView, InteractiveElement, IntoElement, ParentElement, Render, - SharedString, Styled, Subscription, Task, View, ViewContext, VisualContext, WeakView, - WindowContext, + rems, AnyElement, AppContext, AsyncAppContext, DismissEvent, EventEmitter, FocusHandle, + FocusableView, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, + Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowContext, }; use picker::{Picker, PickerDelegate}; use project::ProjectPath; @@ -14,8 +13,7 @@ use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::ResultExt; use workspace::notifications::DetachAndPromptErr; use workspace::{ModalView, Workspace}; - -actions!(branches, [OpenRecent]); +use zed_actions::branches::OpenRecent; pub fn init(cx: &mut AppContext) { cx.observe_new_views(|workspace: &mut Workspace, _| { @@ -174,11 +172,7 @@ impl PickerDelegate for BranchListDelegate { branches .into_iter() .enumerate() - .map(|(ix, command)| StringMatchCandidate { - id: ix, - char_bag: command.name.chars().collect(), - string: command.name.into(), - }) + .map(|(ix, command)| StringMatchCandidate::new(ix, &command.name)) .collect::>() }); let Some(candidates) = candidates.log_err() else { @@ -286,7 +280,7 @@ impl PickerDelegate for BranchListDelegate { ListItem::new(SharedString::from(format!("vcs-menu-{ix}"))) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .map(|parent| match hit { BranchEntry::Branch(branch) => { let highlights: Vec<_> = branch diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index fddb607c1f..02d4136faa 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -36,9 +36,11 @@ serde.workspace = true serde_derive.workspace = true serde_json.workspace = true settings.workspace = true +theme.workspace = true tokio = { version = "1.15", features = ["full"], optional = true } ui.workspace = true util.workspace = true +vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/vim/src/change_list.rs b/crates/vim/src/change_list.rs index 69fcdd8319..adf553983b 100644 --- a/crates/vim/src/change_list.rs +++ b/crates/vim/src/change_list.rs @@ -16,7 +16,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { impl Vim { fn move_to_change(&mut self, direction: Direction, cx: &mut ViewContext) { - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); if self.change_list.is_empty() { return; } diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 2fa75c8579..632835e133 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -3,19 +3,22 @@ use std::{ ops::{Deref, Range}, str::Chars, sync::OnceLock, + time::Instant, }; use anyhow::{anyhow, Result}; use command_palette_hooks::CommandInterceptResult; use editor::{ actions::{SortLinesCaseInsensitive, SortLinesCaseSensitive}, - Editor, ToPoint, + display_map::ToDisplayPoint, + Bias, Editor, ToPoint, }; -use gpui::{actions, impl_actions, Action, AppContext, Global, ViewContext}; +use gpui::{actions, impl_actions, Action, AppContext, Global, ViewContext, WindowContext}; use language::Point; use multi_buffer::MultiBufferRow; +use regex::Regex; +use search::{BufferSearchBar, SearchOptions}; use serde::Deserialize; -use ui::WindowContext; use util::ResultExt; use workspace::{notifications::NotifyResultExt, SaveIntent}; @@ -57,7 +60,10 @@ pub struct WithCount { struct WrappedAction(Box); actions!(vim, [VisualCommand, CountCommand]); -impl_actions!(vim, [GoToLine, YankCommand, WithRange, WithCount]); +impl_actions!( + vim, + [GoToLine, YankCommand, WithRange, WithCount, OnMatchingLines] +); impl<'de> Deserialize<'de> for WrappedAction { fn deserialize(_: D) -> Result @@ -101,7 +107,7 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { let Some(workspace) = vim.workspace(cx) else { return; }; - let count = vim.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); workspace.update(cx, |workspace, cx| { command_palette::CommandPalette::toggle( workspace, @@ -136,7 +142,7 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.update_editor(cx, |vim, editor, cx| { let snapshot = editor.snapshot(cx); if let Ok(range) = action.range.buffer_range(vim, editor, cx) { - let end = if range.end < snapshot.max_buffer_row() { + let end = if range.end < snapshot.buffer_snapshot.max_row() { Point::new(range.end.0 + 1, 0) } else { snapshot.buffer_snapshot.max_point() @@ -204,6 +210,10 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { }); }); }); + + Vim::action(editor, cx, |vim, action: &OnMatchingLines, cx| { + action.run(vim, cx) + }) } #[derive(Default)] @@ -436,9 +446,11 @@ impl Position { .row .saturating_add_signed(*offset) } - Position::LastLine { offset } => { - snapshot.max_buffer_row().0.saturating_add_signed(*offset) - } + Position::LastLine { offset } => snapshot + .buffer_snapshot + .max_row() + .0 + .saturating_add_signed(*offset), Position::CurrentLine { offset } => editor .selections .newest_anchor() @@ -448,7 +460,7 @@ impl Position { .saturating_add_signed(*offset), }; - Ok(MultiBufferRow(target).min(snapshot.max_buffer_row())) + Ok(MultiBufferRow(target).min(snapshot.buffer_snapshot.max_row())) } } @@ -784,6 +796,31 @@ pub fn command_interceptor(mut input: &str, cx: &AppContext) -> Option Vec { positions } +#[derive(Debug, PartialEq, Deserialize, Clone)] +pub(crate) struct OnMatchingLines { + range: CommandRange, + search: String, + action: WrappedAction, + invert: bool, +} + +impl OnMatchingLines { + // convert a vim query into something more usable by zed. + // we don't attempt to fully convert between the two regex syntaxes, + // but we do flip \( and \) to ( and ) (and vice-versa) in the pattern, + // and convert \0..\9 to $0..$9 in the replacement so that common idioms work. + pub(crate) fn parse( + mut chars: Peekable, + invert: bool, + range: CommandRange, + cx: &AppContext, + ) -> Option { + let delimiter = chars.next().filter(|c| { + !c.is_alphanumeric() && *c != '"' && *c != '|' && *c != '\'' && *c != '!' + })?; + + let mut search = String::new(); + let mut escaped = false; + + while let Some(c) = chars.next() { + if escaped { + escaped = false; + // unescape escaped parens + if c != '(' && c != ')' && c != delimiter { + search.push('\\') + } + search.push(c) + } else if c == '\\' { + escaped = true; + } else if c == delimiter { + break; + } else { + // escape unescaped parens + if c == '(' || c == ')' { + search.push('\\') + } + search.push(c) + } + } + + let command: String = chars.collect(); + + let action = WrappedAction(command_interceptor(&command, cx)?.action); + + Some(Self { + range, + search, + invert, + action, + }) + } + + pub fn run(&self, vim: &mut Vim, cx: &mut ViewContext) { + let result = vim.update_editor(cx, |vim, editor, cx| { + self.range.buffer_range(vim, editor, cx) + }); + + let range = match result { + None => return, + Some(e @ Err(_)) => { + let Some(workspace) = vim.workspace(cx) else { + return; + }; + workspace.update(cx, |workspace, cx| { + e.notify_err(workspace, cx); + }); + return; + } + Some(Ok(result)) => result, + }; + + let mut action = self.action.boxed_clone(); + let mut last_pattern = self.search.clone(); + + let mut regexes = match Regex::new(&self.search) { + Ok(regex) => vec![(regex, !self.invert)], + e @ Err(_) => { + let Some(workspace) = vim.workspace(cx) else { + return; + }; + workspace.update(cx, |workspace, cx| { + e.notify_err(workspace, cx); + }); + return; + } + }; + while let Some(inner) = action + .boxed_clone() + .as_any() + .downcast_ref::() + { + let Some(regex) = Regex::new(&inner.search).ok() else { + break; + }; + last_pattern = inner.search.clone(); + action = inner.action.boxed_clone(); + regexes.push((regex, !inner.invert)) + } + + if let Some(pane) = vim.pane(cx) { + pane.update(cx, |pane, cx| { + if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() + { + search_bar.update(cx, |search_bar, cx| { + if search_bar.show(cx) { + let _ = search_bar.search( + &last_pattern, + Some(SearchOptions::REGEX | SearchOptions::CASE_SENSITIVE), + cx, + ); + } + }); + } + }); + }; + + vim.update_editor(cx, |_, editor, cx| { + let snapshot = editor.snapshot(cx); + let mut row = range.start.0; + + let point_range = Point::new(range.start.0, 0) + ..snapshot + .buffer_snapshot + .clip_point(Point::new(range.end.0 + 1, 0), Bias::Left); + cx.spawn(|editor, mut cx| async move { + let new_selections = cx + .background_executor() + .spawn(async move { + let mut line = String::new(); + let mut new_selections = Vec::new(); + let chunks = snapshot + .buffer_snapshot + .text_for_range(point_range) + .chain(["\n"]); + + for chunk in chunks { + for (newline_ix, text) in chunk.split('\n').enumerate() { + if newline_ix > 0 { + if regexes.iter().all(|(regex, should_match)| { + regex.is_match(&line) == *should_match + }) { + new_selections + .push(Point::new(row, 0).to_display_point(&snapshot)) + } + row += 1; + line.clear(); + } + line.push_str(text) + } + } + + new_selections + }) + .await; + + if new_selections.is_empty() { + return; + } + editor + .update(&mut cx, |editor, cx| { + editor.start_transaction_at(Instant::now(), cx); + editor.change_selections(None, cx, |s| { + s.replace_cursors_with(|_| new_selections); + }); + cx.dispatch_action(action); + cx.defer(move |editor, cx| { + let newest = editor.selections.newest::(cx).clone(); + editor.change_selections(None, cx, |s| { + s.select(vec![newest]); + }); + editor.end_transaction_at(Instant::now(), cx); + }) + }) + .ok(); + }) + .detach(); + }); + } +} + #[cfg(test)] mod test { use std::path::Path; @@ -1107,4 +1331,46 @@ mod test { assert_active_item(workspace, "/root/dir/file3.rs", "go to file3", cx); }); } + + #[gpui::test] + async fn test_command_matching_lines(cx: &mut TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! {" + ˇa + b + a + b + a + "}) + .await; + + cx.simulate_shared_keystrokes(":").await; + cx.simulate_shared_keystrokes("g / a / d").await; + cx.simulate_shared_keystrokes("enter").await; + + cx.shared_state().await.assert_eq(indoc! {" + b + b + ˇ"}); + + cx.simulate_shared_keystrokes("u").await; + + cx.shared_state().await.assert_eq(indoc! {" + ˇa + b + a + b + a + "}); + + cx.simulate_shared_keystrokes(":").await; + cx.simulate_shared_keystrokes("v / a / d").await; + cx.simulate_shared_keystrokes("enter").await; + + cx.shared_state().await.assert_eq(indoc! {" + a + a + ˇa"}); + } } diff --git a/crates/vim/src/digraph.rs b/crates/vim/src/digraph.rs index 4c09dd3e33..dcccc8b5cd 100644 --- a/crates/vim/src/digraph.rs +++ b/crates/vim/src/digraph.rs @@ -83,7 +83,7 @@ impl Vim { cx: &mut ViewContext, ) { // handled by handle_literal_input - if keystroke_event.keystroke.ime_key.is_some() { + if keystroke_event.keystroke.key_char.is_some() { return; }; diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs new file mode 100644 index 0000000000..3358538991 --- /dev/null +++ b/crates/vim/src/helix.rs @@ -0,0 +1,373 @@ +use editor::{movement, scroll::Autoscroll, DisplayPoint, Editor}; +use gpui::{actions, Action}; +use language::{CharClassifier, CharKind}; +use ui::ViewContext; + +use crate::{motion::Motion, state::Mode, Vim}; + +actions!(vim, [HelixNormalAfter, HelixDelete]); + +pub fn register(editor: &mut Editor, cx: &mut ViewContext) { + Vim::action(editor, cx, Vim::helix_normal_after); + Vim::action(editor, cx, Vim::helix_delete); +} + +impl Vim { + pub fn helix_normal_after(&mut self, action: &HelixNormalAfter, cx: &mut ViewContext) { + if self.active_operator().is_some() { + self.operator_stack.clear(); + self.sync_vim_settings(cx); + return; + } + self.stop_recording_immediately(action.boxed_clone(), cx); + self.switch_mode(Mode::HelixNormal, false, cx); + return; + } + + pub fn helix_normal_motion( + &mut self, + motion: Motion, + times: Option, + cx: &mut ViewContext, + ) { + self.helix_move_cursor(motion, times, cx); + } + + fn helix_find_range_forward( + &mut self, + times: Option, + cx: &mut ViewContext, + mut is_boundary: impl FnMut(char, char, &CharClassifier) -> bool, + ) { + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + let times = times.unwrap_or(1); + + if selection.head() == map.max_point() { + return; + } + + // collapse to block cursor + if selection.tail() < selection.head() { + selection.set_tail(movement::left(map, selection.head()), selection.goal); + } else { + selection.set_tail(selection.head(), selection.goal); + selection.set_head(movement::right(map, selection.head()), selection.goal); + } + + // create a classifier + let classifier = map + .buffer_snapshot + .char_classifier_at(selection.head().to_point(map)); + + let mut last_selection = selection.clone(); + for _ in 0..times { + let (new_tail, new_head) = + movement::find_boundary_trail(map, selection.head(), |left, right| { + is_boundary(left, right, &classifier) + }); + + selection.set_head(new_head, selection.goal); + if let Some(new_tail) = new_tail { + selection.set_tail(new_tail, selection.goal); + } + + if selection.head() == last_selection.head() + && selection.tail() == last_selection.tail() + { + break; + } + last_selection = selection.clone(); + } + }); + }); + }); + } + + fn helix_find_range_backward( + &mut self, + times: Option, + cx: &mut ViewContext, + mut is_boundary: impl FnMut(char, char, &CharClassifier) -> bool, + ) { + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + let times = times.unwrap_or(1); + + if selection.head() == DisplayPoint::zero() { + return; + } + + // collapse to block cursor + if selection.tail() < selection.head() { + selection.set_tail(movement::left(map, selection.head()), selection.goal); + } else { + selection.set_tail(selection.head(), selection.goal); + selection.set_head(movement::right(map, selection.head()), selection.goal); + } + + // flip the selection + selection.swap_head_tail(); + + // create a classifier + let classifier = map + .buffer_snapshot + .char_classifier_at(selection.head().to_point(map)); + + let mut last_selection = selection.clone(); + for _ in 0..times { + let (new_tail, new_head) = movement::find_preceding_boundary_trail( + map, + selection.head(), + |left, right| is_boundary(left, right, &classifier), + ); + + selection.set_head(new_head, selection.goal); + if let Some(new_tail) = new_tail { + selection.set_tail(new_tail, selection.goal); + } + + if selection.head() == last_selection.head() + && selection.tail() == last_selection.tail() + { + break; + } + last_selection = selection.clone(); + } + }); + }) + }); + } + + pub fn helix_move_and_collapse( + &mut self, + motion: Motion, + times: Option, + cx: &mut ViewContext, + ) { + self.update_editor(cx, |_, editor, cx| { + let text_layout_details = editor.text_layout_details(cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + let goal = selection.goal; + let cursor = if selection.is_empty() || selection.reversed { + selection.head() + } else { + movement::left(map, selection.head()) + }; + + let (point, goal) = motion + .move_point(map, cursor, selection.goal, times, &text_layout_details) + .unwrap_or((cursor, goal)); + + selection.collapse_to(point, goal) + }) + }); + }); + } + + pub fn helix_move_cursor( + &mut self, + motion: Motion, + times: Option, + cx: &mut ViewContext, + ) { + match motion { + Motion::NextWordStart { ignore_punctuation } => { + self.helix_find_range_forward(times, cx, |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = right == '\n'; + + let found = + left_kind != right_kind && right_kind != CharKind::Whitespace || at_newline; + + found + }) + } + Motion::NextWordEnd { ignore_punctuation } => { + self.helix_find_range_forward(times, cx, |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = right == '\n'; + + let found = left_kind != right_kind + && (left_kind != CharKind::Whitespace || at_newline); + + found + }) + } + Motion::PreviousWordStart { ignore_punctuation } => { + self.helix_find_range_backward(times, cx, |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = right == '\n'; + + let found = left_kind != right_kind + && (left_kind != CharKind::Whitespace || at_newline); + + found + }) + } + Motion::PreviousWordEnd { ignore_punctuation } => { + self.helix_find_range_backward(times, cx, |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = right == '\n'; + + let found = left_kind != right_kind + && right_kind != CharKind::Whitespace + && !at_newline; + + found + }) + } + _ => self.helix_move_and_collapse(motion, times, cx), + } + } + + pub fn helix_delete(&mut self, _: &HelixDelete, cx: &mut ViewContext) { + self.store_visual_marks(cx); + self.update_editor(cx, |vim, editor, cx| { + // Fixup selections so they have helix's semantics. + // Specifically: + // - Make sure that each cursor acts as a 1 character wide selection + editor.transact(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() && !selection.reversed { + selection.end = movement::right(map, selection.end); + } + }); + }); + }); + + vim.copy_selections_content(editor, false, cx); + editor.insert("", cx); + }); + } +} + +#[cfg(test)] +mod test { + use indoc::indoc; + + use crate::{state::Mode, test::VimTestContext}; + + #[gpui::test] + async fn test_next_word_start(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + // « + // ˇ + // » + cx.set_state( + indoc! {" + The quˇick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("w"); + + cx.assert_state( + indoc! {" + The qu«ick ˇ»brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("w"); + + cx.assert_state( + indoc! {" + The quick «brownˇ» + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_delete(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // test delete a selection + cx.set_state( + indoc! {" + The qu«ick ˇ»brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("d"); + + cx.assert_state( + indoc! {" + The quˇbrown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // test deleting a single character + cx.simulate_keystrokes("d"); + + cx.assert_state( + indoc! {" + The quˇrown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_delete_character_end_of_line(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + The quick brownˇ + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("d"); + + cx.assert_state( + indoc! {" + The quick brownˇfox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_delete_character_end_of_buffer(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + The quick brown + fox jumps over + the lazy dog.ˇ"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("d"); + + cx.assert_state( + indoc! {" + The quick brown + fox jumps over + the lazy dog.ˇ"}, + Mode::HelixNormal, + ); + } +} diff --git a/crates/vim/src/indent.rs b/crates/vim/src/indent.rs index b6ca2de34c..6d5ce78f5c 100644 --- a/crates/vim/src/indent.rs +++ b/crates/vim/src/indent.rs @@ -9,14 +9,15 @@ use ui::ViewContext; pub(crate) enum IndentDirection { In, Out, + Auto, } -actions!(vim, [Indent, Outdent,]); +actions!(vim, [Indent, Outdent, AutoIndent]); pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &Indent, cx| { vim.record_current_action(cx); - let count = vim.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { @@ -34,7 +35,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &Outdent, cx| { vim.record_current_action(cx); - let count = vim.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { @@ -49,6 +50,24 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.switch_mode(Mode::Normal, true, cx) } }); + + Vim::action(editor, cx, |vim, _: &AutoIndent, cx| { + vim.record_current_action(cx); + let count = Vim::take_count(cx).unwrap_or(1); + vim.store_visual_marks(cx); + vim.update_editor(cx, |vim, editor, cx| { + editor.transact(cx, |editor, cx| { + let original_positions = vim.save_selection_starts(editor, cx); + for _ in 0..count { + editor.autoindent(&Default::default(), cx); + } + vim.restore_selection_cursors(editor, cx, original_positions); + }); + }); + if vim.mode.is_visual() { + vim.switch_mode(Mode::Normal, true, cx) + } + }); } impl Vim { @@ -71,10 +90,10 @@ impl Vim { motion.expand_selection(map, selection, times, false, &text_layout_details); }); }); - if dir == IndentDirection::In { - editor.indent(&Default::default(), cx); - } else { - editor.outdent(&Default::default(), cx); + match dir { + IndentDirection::In => editor.indent(&Default::default(), cx), + IndentDirection::Out => editor.outdent(&Default::default(), cx), + IndentDirection::Auto => editor.autoindent(&Default::default(), cx), } editor.change_selections(None, cx, |s| { s.move_with(|map, selection| { @@ -104,10 +123,10 @@ impl Vim { object.expand_selection(map, selection, around); }); }); - if dir == IndentDirection::In { - editor.indent(&Default::default(), cx); - } else { - editor.outdent(&Default::default(), cx); + match dir { + IndentDirection::In => editor.indent(&Default::default(), cx), + IndentDirection::Out => editor.outdent(&Default::default(), cx), + IndentDirection::Auto => editor.autoindent(&Default::default(), cx), } editor.change_selections(None, cx, |s| { s.move_with(|map, selection| { @@ -122,7 +141,11 @@ impl Vim { #[cfg(test)] mod test { - use crate::test::NeovimBackedTestContext; + use crate::{ + state::Mode, + test::{NeovimBackedTestContext, VimTestContext}, + }; + use indoc::indoc; #[gpui::test] async fn test_indent_gv(cx: &mut gpui::TestAppContext) { @@ -135,4 +158,46 @@ mod test { .await .assert_eq("« hello\n ˇ» world\n"); } + + #[gpui::test] + async fn test_autoindent_op(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc!( + " + fn a() { + b(); + c(); + + d(); + ˇe(); + f(); + + g(); + } + " + ), + Mode::Normal, + ); + + cx.simulate_keystrokes("= a p"); + cx.assert_state( + indoc!( + " + fn a() { + b(); + c(); + + d(); + ˇe(); + f(); + + g(); + } + " + ), + Mode::Normal, + ); + } } diff --git a/crates/vim/src/insert.rs b/crates/vim/src/insert.rs index ba83e2125b..b1e7af9b10 100644 --- a/crates/vim/src/insert.rs +++ b/crates/vim/src/insert.rs @@ -17,7 +17,7 @@ impl Vim { self.sync_vim_settings(cx); return; } - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); self.stop_recording_immediately(action.boxed_clone(), cx); if count <= 1 || Vim::globals(cx).dot_replaying { self.create_mark("^".into(), false, cx); diff --git a/crates/vim/src/mode_indicator.rs b/crates/vim/src/mode_indicator.rs index 619bb6e1f4..8b608fdfe3 100644 --- a/crates/vim/src/mode_indicator.rs +++ b/crates/vim/src/mode_indicator.rs @@ -2,7 +2,7 @@ use gpui::{div, Element, Render, Subscription, View, ViewContext, WeakView}; use itertools::Itertools; use workspace::{item::ItemHandle, ui::prelude::*, StatusItemView}; -use crate::{Vim, VimEvent}; +use crate::{Vim, VimEvent, VimGlobals}; /// The ModeIndicator displays the current mode in the status bar. pub struct ModeIndicator { @@ -68,14 +68,22 @@ impl ModeIndicator { let vim = vim.read(cx); recording - .chain(vim.pre_count.map(|count| format!("{}", count))) + .chain( + cx.global::() + .pre_count + .map(|count| format!("{}", count)), + ) .chain(vim.selected_register.map(|reg| format!("\"{reg}"))) .chain( vim.operator_stack .iter() .map(|item| item.status().to_string()), ) - .chain(vim.post_count.map(|count| format!("{}", count))) + .chain( + cx.global::() + .post_count + .map(|count| format!("{}", count)), + ) .collect::>() .join("") } diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 9f7a30afe9..acce32b24d 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -11,6 +11,7 @@ use language::{CharKind, Point, Selection, SelectionGoal}; use multi_buffer::MultiBufferRow; use serde::Deserialize; use std::ops::Range; +use workspace::searchable::Direction; use crate::{ normal::mark, @@ -72,6 +73,12 @@ pub enum Motion { StartOfDocument, EndOfDocument, Matching, + UnmatchedForward { + char: char, + }, + UnmatchedBackward { + char: char, + }, FindForward { before: bool, char: char, @@ -98,6 +105,16 @@ pub enum Motion { WindowTop, WindowMiddle, WindowBottom, + NextSectionStart, + NextSectionEnd, + PreviousSectionStart, + PreviousSectionEnd, + NextMethodStart, + NextMethodEnd, + PreviousMethodStart, + PreviousMethodEnd, + NextComment, + PreviousComment, // we don't have a good way to run a search synchronously, so // we handle search motions by running the search async and then @@ -203,6 +220,20 @@ pub struct StartOfLine { pub(crate) display_lines: bool, } +#[derive(Clone, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +struct UnmatchedForward { + #[serde(default)] + char: char, +} + +#[derive(Clone, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +struct UnmatchedBackward { + #[serde(default)] + char: char, +} + impl_actions!( vim, [ @@ -219,6 +250,8 @@ impl_actions!( NextSubwordEnd, PreviousSubwordStart, PreviousSubwordEnd, + UnmatchedForward, + UnmatchedBackward ] ); @@ -247,6 +280,16 @@ actions!( WindowTop, WindowMiddle, WindowBottom, + NextSectionStart, + NextSectionEnd, + PreviousSectionStart, + PreviousSectionEnd, + NextMethodStart, + NextMethodEnd, + PreviousMethodStart, + PreviousMethodEnd, + NextComment, + PreviousComment, ] ); @@ -326,7 +369,20 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &Matching, cx| { vim.motion(Motion::Matching, cx) }); - + Vim::action( + editor, + cx, + |vim, &UnmatchedForward { char }: &UnmatchedForward, cx| { + vim.motion(Motion::UnmatchedForward { char }, cx) + }, + ); + Vim::action( + editor, + cx, + |vim, &UnmatchedBackward { char }: &UnmatchedBackward, cx| { + vim.motion(Motion::UnmatchedBackward { char }, cx) + }, + ); Vim::action( editor, cx, @@ -419,6 +475,37 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, &WindowBottom, cx| { vim.motion(Motion::WindowBottom, cx) }); + + Vim::action(editor, cx, |vim, &PreviousSectionStart, cx| { + vim.motion(Motion::PreviousSectionStart, cx) + }); + Vim::action(editor, cx, |vim, &NextSectionStart, cx| { + vim.motion(Motion::NextSectionStart, cx) + }); + Vim::action(editor, cx, |vim, &PreviousSectionEnd, cx| { + vim.motion(Motion::PreviousSectionEnd, cx) + }); + Vim::action(editor, cx, |vim, &NextSectionEnd, cx| { + vim.motion(Motion::NextSectionEnd, cx) + }); + Vim::action(editor, cx, |vim, &PreviousMethodStart, cx| { + vim.motion(Motion::PreviousMethodStart, cx) + }); + Vim::action(editor, cx, |vim, &NextMethodStart, cx| { + vim.motion(Motion::NextMethodStart, cx) + }); + Vim::action(editor, cx, |vim, &PreviousMethodEnd, cx| { + vim.motion(Motion::PreviousMethodEnd, cx) + }); + Vim::action(editor, cx, |vim, &NextMethodEnd, cx| { + vim.motion(Motion::NextMethodEnd, cx) + }); + Vim::action(editor, cx, |vim, &NextComment, cx| { + vim.motion(Motion::NextComment, cx) + }); + Vim::action(editor, cx, |vim, &PreviousComment, cx| { + vim.motion(Motion::PreviousComment, cx) + }); } impl Vim { @@ -442,6 +529,8 @@ impl Vim { return; } } + + Mode::HelixNormal => {} } } @@ -455,7 +544,7 @@ impl Vim { self.pop_operator(cx); } - let count = self.take_count(cx); + let count = Vim::take_count(cx); let active_operator = self.active_operator(); let mut waiting_operator: Option = None; match self.mode { @@ -471,11 +560,13 @@ impl Vim { Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { self.visual_motion(motion.clone(), count, cx) } + + Mode::HelixNormal => self.helix_normal_motion(motion.clone(), count, cx), } self.clear_operator(cx); if let Some(operator) = waiting_operator { self.push_operator(operator, cx); - self.pre_count = count + Vim::globals(cx).pre_count = count } } } @@ -494,20 +585,32 @@ impl Motion { | NextLineStart | PreviousLineStart | StartOfLineDownward - | SentenceBackward - | SentenceForward | StartOfParagraph | EndOfParagraph | WindowTop | WindowMiddle | WindowBottom + | NextSectionStart + | NextSectionEnd + | PreviousSectionStart + | PreviousSectionEnd + | NextMethodStart + | NextMethodEnd + | PreviousMethodStart + | PreviousMethodEnd + | NextComment + | PreviousComment | Jump { line: true, .. } => true, EndOfLine { .. } | Matching + | UnmatchedForward { .. } + | UnmatchedBackward { .. } | FindForward { .. } | Left | Backspace | Right + | SentenceBackward + | SentenceForward | Space | StartOfLine { .. } | EndOfLineDownward @@ -537,6 +640,8 @@ impl Motion { | Up { .. } | EndOfLine { .. } | Matching + | UnmatchedForward { .. } + | UnmatchedBackward { .. } | FindForward { .. } | RepeatFind { .. } | Left @@ -568,6 +673,16 @@ impl Motion { | NextLineStart | PreviousLineStart | ZedSearchResult { .. } + | NextSectionStart + | NextSectionEnd + | PreviousSectionStart + | PreviousSectionEnd + | NextMethodStart + | NextMethodEnd + | PreviousMethodStart + | PreviousMethodEnd + | NextComment + | PreviousComment | Jump { .. } => false, } } @@ -583,6 +698,8 @@ impl Motion { | EndOfLine { .. } | EndOfLineDownward | Matching + | UnmatchedForward { .. } + | UnmatchedBackward { .. } | FindForward { .. } | WindowTop | WindowMiddle @@ -611,6 +728,16 @@ impl Motion { | FirstNonWhitespace { .. } | FindBackward { .. } | Jump { .. } + | NextSectionStart + | NextSectionEnd + | PreviousSectionStart + | PreviousSectionEnd + | NextMethodStart + | NextMethodEnd + | PreviousMethodStart + | PreviousMethodEnd + | NextComment + | PreviousComment | ZedSearchResult { .. } => false, RepeatFind { last_find: motion } | RepeatFindReversed { last_find: motion } => { motion.inclusive() @@ -707,6 +834,14 @@ impl Motion { SelectionGoal::None, ), Matching => (matching(map, point), SelectionGoal::None), + UnmatchedForward { char } => ( + unmatched_forward(map, point, *char, times), + SelectionGoal::None, + ), + UnmatchedBackward { char } => ( + unmatched_backward(map, point, *char, times), + SelectionGoal::None, + ), // t f FindForward { before, @@ -818,6 +953,47 @@ impl Motion { return None; } } + NextSectionStart => ( + section_motion(map, point, times, Direction::Next, true), + SelectionGoal::None, + ), + NextSectionEnd => ( + section_motion(map, point, times, Direction::Next, false), + SelectionGoal::None, + ), + PreviousSectionStart => ( + section_motion(map, point, times, Direction::Prev, true), + SelectionGoal::None, + ), + PreviousSectionEnd => ( + section_motion(map, point, times, Direction::Prev, false), + SelectionGoal::None, + ), + + NextMethodStart => ( + method_motion(map, point, times, Direction::Next, true), + SelectionGoal::None, + ), + NextMethodEnd => ( + method_motion(map, point, times, Direction::Next, false), + SelectionGoal::None, + ), + PreviousMethodStart => ( + method_motion(map, point, times, Direction::Prev, true), + SelectionGoal::None, + ), + PreviousMethodEnd => ( + method_motion(map, point, times, Direction::Prev, false), + SelectionGoal::None, + ), + NextComment => ( + comment_motion(map, point, times, Direction::Next), + SelectionGoal::None, + ), + PreviousComment => ( + comment_motion(map, point, times, Direction::Prev), + SelectionGoal::None, + ), }; (new_point != point || infallible).then_some((new_point, goal)) @@ -1030,6 +1206,7 @@ fn up_down_buffer_rows( times: isize, text_layout_details: &TextLayoutDetails, ) -> (DisplayPoint, SelectionGoal) { + let bias = if times < 0 { Bias::Left } else { Bias::Right }; let start = map.display_point_to_fold_point(point, Bias::Left); let begin_folded_line = map.fold_point_to_display_point( map.fold_snapshot @@ -1053,14 +1230,14 @@ fn up_down_buffer_rows( let mut begin_folded_line = map.fold_point_to_display_point( map.fold_snapshot - .clip_point(FoldPoint::new(new_row, 0), Bias::Left), + .clip_point(FoldPoint::new(new_row, 0), bias), ); let mut i = 0; while i < goal_wrap && begin_folded_line.row() < map.max_point().row() { let next_folded_line = DisplayPoint::new(begin_folded_line.row().next_row(), 0); if map - .display_point_to_fold_point(next_folded_line, Bias::Right) + .display_point_to_fold_point(next_folded_line, bias) .row() == new_row { @@ -1078,10 +1255,7 @@ fn up_down_buffer_rows( }; ( - map.clip_point( - DisplayPoint::new(begin_folded_line.row(), new_col), - Bias::Left, - ), + map.clip_point(DisplayPoint::new(begin_folded_line.row(), new_col), bias), goal, ) } @@ -1690,7 +1864,7 @@ fn end_of_document( let new_row = if let Some(line) = line { (line - 1) as u32 } else { - map.max_buffer_row().0 + map.buffer_snapshot.max_row().0 }; let new_point = Point::new(new_row, point.column()); @@ -1792,6 +1966,92 @@ fn matching(map: &DisplaySnapshot, display_point: DisplayPoint) -> DisplayPoint } } +fn unmatched_forward( + map: &DisplaySnapshot, + mut display_point: DisplayPoint, + char: char, + times: usize, +) -> DisplayPoint { + for _ in 0..times { + // https://github.com/vim/vim/blob/1d87e11a1ef201b26ed87585fba70182ad0c468a/runtime/doc/motion.txt#L1245 + let point = display_point.to_point(map); + let offset = point.to_offset(&map.buffer_snapshot); + + let ranges = map.buffer_snapshot.enclosing_bracket_ranges(point..point); + let Some(ranges) = ranges else { break }; + let mut closest_closing_destination = None; + let mut closest_distance = usize::MAX; + + for (_, close_range) in ranges { + if close_range.start > offset { + let mut chars = map.buffer_snapshot.chars_at(close_range.start); + if Some(char) == chars.next() { + let distance = close_range.start - offset; + if distance < closest_distance { + closest_closing_destination = Some(close_range.start); + closest_distance = distance; + continue; + } + } + } + } + + let new_point = closest_closing_destination + .map(|destination| destination.to_display_point(map)) + .unwrap_or(display_point); + if new_point == display_point { + break; + } + display_point = new_point; + } + return display_point; +} + +fn unmatched_backward( + map: &DisplaySnapshot, + mut display_point: DisplayPoint, + char: char, + times: usize, +) -> DisplayPoint { + for _ in 0..times { + // https://github.com/vim/vim/blob/1d87e11a1ef201b26ed87585fba70182ad0c468a/runtime/doc/motion.txt#L1239 + let point = display_point.to_point(map); + let offset = point.to_offset(&map.buffer_snapshot); + + let ranges = map.buffer_snapshot.enclosing_bracket_ranges(point..point); + let Some(ranges) = ranges else { + break; + }; + + let mut closest_starting_destination = None; + let mut closest_distance = usize::MAX; + + for (start_range, _) in ranges { + if start_range.start < offset { + let mut chars = map.buffer_snapshot.chars_at(start_range.start); + if Some(char) == chars.next() { + let distance = offset - start_range.start; + if distance < closest_distance { + closest_starting_destination = Some(start_range.start); + closest_distance = distance; + continue; + } + } + } + } + + let new_point = closest_starting_destination + .map(|destination| destination.to_display_point(map)) + .unwrap_or(display_point); + if new_point == display_point { + break; + } else { + display_point = new_point; + } + } + display_point +} + fn find_forward( map: &DisplaySnapshot, from: DisplayPoint, @@ -1994,10 +2254,239 @@ fn window_bottom( } } +fn method_motion( + map: &DisplaySnapshot, + mut display_point: DisplayPoint, + times: usize, + direction: Direction, + is_start: bool, +) -> DisplayPoint { + let Some((_, _, buffer)) = map.buffer_snapshot.as_singleton() else { + return display_point; + }; + + for _ in 0..times { + let point = map.display_point_to_point(display_point, Bias::Left); + let offset = point.to_offset(&map.buffer_snapshot); + let range = if direction == Direction::Prev { + 0..offset + } else { + offset..buffer.len() + }; + + let possibilities = buffer + .text_object_ranges(range, language::TreeSitterOptions::max_start_depth(4)) + .filter_map(|(range, object)| { + if !matches!(object, language::TextObject::AroundFunction) { + return None; + } + + let relevant = if is_start { range.start } else { range.end }; + if direction == Direction::Prev && relevant < offset { + Some(relevant) + } else if direction == Direction::Next && relevant > offset + 1 { + Some(relevant) + } else { + None + } + }); + + let dest = if direction == Direction::Prev { + possibilities.max().unwrap_or(offset) + } else { + possibilities.min().unwrap_or(offset) + }; + let new_point = map.clip_point(dest.to_display_point(&map), Bias::Left); + if new_point == display_point { + break; + } + display_point = new_point; + } + display_point +} + +fn comment_motion( + map: &DisplaySnapshot, + mut display_point: DisplayPoint, + times: usize, + direction: Direction, +) -> DisplayPoint { + let Some((_, _, buffer)) = map.buffer_snapshot.as_singleton() else { + return display_point; + }; + + for _ in 0..times { + let point = map.display_point_to_point(display_point, Bias::Left); + let offset = point.to_offset(&map.buffer_snapshot); + let range = if direction == Direction::Prev { + 0..offset + } else { + offset..buffer.len() + }; + + let possibilities = buffer + .text_object_ranges(range, language::TreeSitterOptions::max_start_depth(6)) + .filter_map(|(range, object)| { + if !matches!(object, language::TextObject::AroundComment) { + return None; + } + + let relevant = if direction == Direction::Prev { + range.start + } else { + range.end + }; + if direction == Direction::Prev && relevant < offset { + Some(relevant) + } else if direction == Direction::Next && relevant > offset + 1 { + Some(relevant) + } else { + None + } + }); + + let dest = if direction == Direction::Prev { + possibilities.max().unwrap_or(offset) + } else { + possibilities.min().unwrap_or(offset) + }; + let new_point = map.clip_point(dest.to_display_point(&map), Bias::Left); + if new_point == display_point { + break; + } + display_point = new_point; + } + + display_point +} + +fn section_motion( + map: &DisplaySnapshot, + mut display_point: DisplayPoint, + times: usize, + direction: Direction, + is_start: bool, +) -> DisplayPoint { + if let Some((_, _, buffer)) = map.buffer_snapshot.as_singleton() { + for _ in 0..times { + let offset = map + .display_point_to_point(display_point, Bias::Left) + .to_offset(&map.buffer_snapshot); + let range = if direction == Direction::Prev { + 0..offset + } else { + offset..buffer.len() + }; + + // we set a max start depth here because we want a section to only be "top level" + // similar to vim's default of '{' in the first column. + // (and without it, ]] at the start of editor.rs is -very- slow) + let mut possibilities = buffer + .text_object_ranges(range, language::TreeSitterOptions::max_start_depth(3)) + .filter(|(_, object)| { + matches!( + object, + language::TextObject::AroundClass | language::TextObject::AroundFunction + ) + }) + .collect::>(); + possibilities.sort_by_key(|(range_a, _)| range_a.start); + let mut prev_end = None; + let possibilities = possibilities.into_iter().filter_map(|(range, t)| { + if t == language::TextObject::AroundFunction + && prev_end.is_some_and(|prev_end| prev_end > range.start) + { + return None; + } + prev_end = Some(range.end); + + let relevant = if is_start { range.start } else { range.end }; + if direction == Direction::Prev && relevant < offset { + Some(relevant) + } else if direction == Direction::Next && relevant > offset + 1 { + Some(relevant) + } else { + None + } + }); + + let offset = if direction == Direction::Prev { + possibilities.max().unwrap_or(0) + } else { + possibilities.min().unwrap_or(buffer.len()) + }; + + let new_point = map.clip_point(offset.to_display_point(&map), Bias::Left); + if new_point == display_point { + break; + } + display_point = new_point; + } + return display_point; + }; + + for _ in 0..times { + let point = map.display_point_to_point(display_point, Bias::Left); + let Some(excerpt) = map.buffer_snapshot.excerpt_containing(point..point) else { + return display_point; + }; + let next_point = match (direction, is_start) { + (Direction::Prev, true) => { + let mut start = excerpt.start_anchor().to_display_point(&map); + if start >= display_point && start.row() > DisplayRow(0) { + let Some(excerpt) = map.buffer_snapshot.excerpt_before(excerpt.id()) else { + return display_point; + }; + start = excerpt.start_anchor().to_display_point(&map); + } + start + } + (Direction::Prev, false) => { + let mut start = excerpt.start_anchor().to_display_point(&map); + if start.row() > DisplayRow(0) { + *start.row_mut() -= 1; + } + map.clip_point(start, Bias::Left) + } + (Direction::Next, true) => { + let mut end = excerpt.end_anchor().to_display_point(&map); + *end.row_mut() += 1; + map.clip_point(end, Bias::Right) + } + (Direction::Next, false) => { + let mut end = excerpt.end_anchor().to_display_point(&map); + *end.column_mut() = 0; + if end <= display_point { + *end.row_mut() += 1; + let point_end = map.display_point_to_point(end, Bias::Right); + let Some(excerpt) = + map.buffer_snapshot.excerpt_containing(point_end..point_end) + else { + return display_point; + }; + end = excerpt.end_anchor().to_display_point(&map); + *end.column_mut() = 0; + } + end + } + }; + if next_point == display_point { + break; + } + display_point = next_point; + } + + display_point +} + #[cfg(test)] mod test { - use crate::test::NeovimBackedTestContext; + use crate::{ + state::Mode, + test::{NeovimBackedTestContext, VimTestContext}, + }; + use editor::display_map::Inlay; use indoc::indoc; #[gpui::test] @@ -2118,6 +2607,103 @@ mod test { cx.shared_state().await.assert_eq("func boop(ˇ) {\n}"); } + #[gpui::test] + async fn test_unmatched_forward(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + // test it works with curly braces + cx.set_shared_state(indoc! {r"func (a string) { + do(something(with.anˇd_arrays[0, 2])) + }"}) + .await; + cx.simulate_shared_keystrokes("] }").await; + cx.shared_state() + .await + .assert_eq(indoc! {r"func (a string) { + do(something(with.and_arrays[0, 2])) + ˇ}"}); + + // test it works with brackets + cx.set_shared_state(indoc! {r"func (a string) { + do(somethiˇng(with.and_arrays[0, 2])) + }"}) + .await; + cx.simulate_shared_keystrokes("] )").await; + cx.shared_state() + .await + .assert_eq(indoc! {r"func (a string) { + do(something(with.and_arrays[0, 2])ˇ) + }"}); + + cx.set_shared_state(indoc! {r"func (a string) { a((b, cˇ))}"}) + .await; + cx.simulate_shared_keystrokes("] )").await; + cx.shared_state() + .await + .assert_eq(indoc! {r"func (a string) { a((b, c)ˇ)}"}); + + // test it works on immediate nesting + cx.set_shared_state("{ˇ {}{}}").await; + cx.simulate_shared_keystrokes("] }").await; + cx.shared_state().await.assert_eq("{ {}{}ˇ}"); + cx.set_shared_state("(ˇ ()())").await; + cx.simulate_shared_keystrokes("] )").await; + cx.shared_state().await.assert_eq("( ()()ˇ)"); + + // test it works on immediate nesting inside braces + cx.set_shared_state("{\n ˇ {()}\n}").await; + cx.simulate_shared_keystrokes("] }").await; + cx.shared_state().await.assert_eq("{\n {()}\nˇ}"); + cx.set_shared_state("(\n ˇ {()}\n)").await; + cx.simulate_shared_keystrokes("] )").await; + cx.shared_state().await.assert_eq("(\n {()}\nˇ)"); + } + + #[gpui::test] + async fn test_unmatched_backward(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + // test it works with curly braces + cx.set_shared_state(indoc! {r"func (a string) { + do(something(with.anˇd_arrays[0, 2])) + }"}) + .await; + cx.simulate_shared_keystrokes("[ {").await; + cx.shared_state() + .await + .assert_eq(indoc! {r"func (a string) ˇ{ + do(something(with.and_arrays[0, 2])) + }"}); + + // test it works with brackets + cx.set_shared_state(indoc! {r"func (a string) { + do(somethiˇng(with.and_arrays[0, 2])) + }"}) + .await; + cx.simulate_shared_keystrokes("[ (").await; + cx.shared_state() + .await + .assert_eq(indoc! {r"func (a string) { + doˇ(something(with.and_arrays[0, 2])) + }"}); + + // test it works on immediate nesting + cx.set_shared_state("{{}{} ˇ }").await; + cx.simulate_shared_keystrokes("[ {").await; + cx.shared_state().await.assert_eq("ˇ{{}{} }"); + cx.set_shared_state("(()() ˇ )").await; + cx.simulate_shared_keystrokes("[ (").await; + cx.shared_state().await.assert_eq("ˇ(()() )"); + + // test it works on immediate nesting inside braces + cx.set_shared_state("{\n {()} ˇ\n}").await; + cx.simulate_shared_keystrokes("[ {").await; + cx.shared_state().await.assert_eq("ˇ{\n {()} \n}"); + cx.set_shared_state("(\n {()} ˇ\n)").await; + cx.simulate_shared_keystrokes("[ (").await; + cx.shared_state().await.assert_eq("ˇ(\n {()} \n)"); + } + #[gpui::test] async fn test_matching_tags(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new_html(cx).await; @@ -2562,4 +3148,35 @@ mod test { }ˇ» "}); } + + #[gpui::test] + async fn test_clipping_with_inlay_hints(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + struct Foo { + ˇ + } + "}, + Mode::Normal, + ); + + cx.update_editor(|editor, cx| { + let range = editor.selections.newest_anchor().range(); + let inlay_text = " field: int,\n field2: string\n field3: float"; + let inlay = Inlay::inline_completion(1, range.start, inlay_text); + editor.splice_inlays(vec![], vec![inlay], cx); + }); + + cx.simulate_keystrokes("j"); + cx.assert_state( + indoc! {" + struct Foo { + + ˇ} + "}, + Mode::Normal, + ); + } } diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 37a8115e33..df01f2affc 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -44,6 +44,8 @@ actions!( InsertLineAbove, InsertLineBelow, InsertAtPrevious, + JoinLines, + JoinLinesNoWhitespace, DeleteLeft, DeleteRight, ChangeToEndOfLine, @@ -53,7 +55,6 @@ actions!( ChangeCase, ConvertToUpperCase, ConvertToLowerCase, - JoinLines, ToggleComments, Undo, Redo, @@ -77,17 +78,17 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &DeleteLeft, cx| { vim.record_current_action(cx); - let times = vim.take_count(cx); + let times = Vim::take_count(cx); vim.delete_motion(Motion::Left, times, cx); }); Vim::action(editor, cx, |vim, _: &DeleteRight, cx| { vim.record_current_action(cx); - let times = vim.take_count(cx); + let times = Vim::take_count(cx); vim.delete_motion(Motion::Right, times, cx); }); Vim::action(editor, cx, |vim, _: &ChangeToEndOfLine, cx| { vim.start_recording(cx); - let times = vim.take_count(cx); + let times = Vim::take_count(cx); vim.change_motion( Motion::EndOfLine { display_lines: false, @@ -98,7 +99,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { }); Vim::action(editor, cx, |vim, _: &DeleteToEndOfLine, cx| { vim.record_current_action(cx); - let times = vim.take_count(cx); + let times = Vim::take_count(cx); vim.delete_motion( Motion::EndOfLine { display_lines: false, @@ -108,29 +109,15 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { ); }); Vim::action(editor, cx, |vim, _: &JoinLines, cx| { - vim.record_current_action(cx); - let mut times = vim.take_count(cx).unwrap_or(1); - if vim.mode.is_visual() { - times = 1; - } else if times > 1 { - // 2J joins two lines together (same as J or 1J) - times -= 1; - } + vim.join_lines_impl(true, cx); + }); - vim.update_editor(cx, |_, editor, cx| { - editor.transact(cx, |editor, cx| { - for _ in 0..times { - editor.join_lines(&Default::default(), cx) - } - }) - }); - if vim.mode.is_visual() { - vim.switch_mode(Mode::Normal, true, cx) - } + Vim::action(editor, cx, |vim, _: &JoinLinesNoWhitespace, cx| { + vim.join_lines_impl(false, cx); }); Vim::action(editor, cx, |vim, _: &Undo, cx| { - let times = vim.take_count(cx); + let times = Vim::take_count(cx); vim.update_editor(cx, |_, editor, cx| { for _ in 0..times.unwrap_or(1) { editor.undo(&editor::actions::Undo, cx); @@ -138,7 +125,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { }); }); Vim::action(editor, cx, |vim, _: &Redo, cx| { - let times = vim.take_count(cx); + let times = Vim::take_count(cx); vim.update_editor(cx, |_, editor, cx| { for _ in 0..times.unwrap_or(1) { editor.redo(&editor::actions::Redo, cx); @@ -170,6 +157,9 @@ impl Vim { Some(Operator::Indent) => self.indent_motion(motion, times, IndentDirection::In, cx), Some(Operator::Rewrap) => self.rewrap_motion(motion, times, cx), Some(Operator::Outdent) => self.indent_motion(motion, times, IndentDirection::Out, cx), + Some(Operator::AutoIndent) => { + self.indent_motion(motion, times, IndentDirection::Auto, cx) + } Some(Operator::Lowercase) => { self.change_case_motion(motion, times, CaseTarget::Lowercase, cx) } @@ -202,6 +192,9 @@ impl Vim { Some(Operator::Outdent) => { self.indent_object(object, around, IndentDirection::Out, cx) } + Some(Operator::AutoIndent) => { + self.indent_object(object, around, IndentDirection::Auto, cx) + } Some(Operator::Rewrap) => self.rewrap_object(object, around, cx), Some(Operator::Lowercase) => { self.change_case_object(object, around, CaseTarget::Lowercase, cx) @@ -395,8 +388,30 @@ impl Vim { }); } + fn join_lines_impl(&mut self, insert_whitespace: bool, cx: &mut ViewContext) { + self.record_current_action(cx); + let mut times = Vim::take_count(cx).unwrap_or(1); + if self.mode.is_visual() { + times = 1; + } else if times > 1 { + // 2J joins two lines together (same as J or 1J) + times -= 1; + } + + self.update_editor(cx, |_, editor, cx| { + editor.transact(cx, |editor, cx| { + for _ in 0..times { + editor.join_lines_impl(insert_whitespace, cx) + } + }) + }); + if self.mode.is_visual() { + self.switch_mode(Mode::Normal, true, cx) + } + } + fn yank_line(&mut self, _: &YankLine, cx: &mut ViewContext) { - let count = self.take_count(cx); + let count = Vim::take_count(cx); self.yank_motion(motion::Motion::CurrentLine, count, cx) } @@ -416,7 +431,7 @@ impl Vim { } pub(crate) fn normal_replace(&mut self, text: Arc, cx: &mut ViewContext) { - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); self.stop_recording(cx); self.update_editor(cx, |_, editor, cx| { editor.transact(cx, |editor, cx| { diff --git a/crates/vim/src/normal/case.rs b/crates/vim/src/normal/case.rs index 2c591a1f1f..405185adf5 100644 --- a/crates/vim/src/normal/case.rs +++ b/crates/vim/src/normal/case.rs @@ -118,7 +118,7 @@ impl Vim { { self.record_current_action(cx); self.store_visual_marks(cx); - let count = self.take_count(cx).unwrap_or(1) as u32; + let count = Vim::take_count(cx).unwrap_or(1) as u32; self.update_editor(cx, |vim, editor, cx| { let mut ranges = Vec::new(); @@ -145,6 +145,8 @@ impl Vim { cursor_positions.push(selection.start..selection.start); } } + + Mode::HelixNormal => {} Mode::Insert | Mode::Normal | Mode::Replace => { let start = selection.start; let mut end = start; diff --git a/crates/vim/src/normal/change.rs b/crates/vim/src/normal/change.rs index 59b5d3cb3d..ffa0ec8b96 100644 --- a/crates/vim/src/normal/change.rs +++ b/crates/vim/src/normal/change.rs @@ -77,6 +77,7 @@ impl Vim { }); vim.copy_selections_content(editor, motion.linewise(), cx); editor.insert("", cx); + editor.refresh_inline_completion(true, false, cx); }); }); @@ -101,6 +102,7 @@ impl Vim { if objects_found { vim.copy_selections_content(editor, false, cx); editor.insert("", cx); + editor.refresh_inline_completion(true, false, cx); } }); }); diff --git a/crates/vim/src/normal/delete.rs b/crates/vim/src/normal/delete.rs index fee2ef56e1..0b9d3b6fcc 100644 --- a/crates/vim/src/normal/delete.rs +++ b/crates/vim/src/normal/delete.rs @@ -28,23 +28,27 @@ impl Vim { original_columns.insert(selection.id, original_head.column()); motion.expand_selection(map, selection, times, true, &text_layout_details); + let start_point = selection.start.to_point(map); + let next_line = map + .buffer_snapshot + .clip_point(Point::new(start_point.row + 1, 0), Bias::Left) + .to_display_point(map); match motion { // Motion::NextWordStart on an empty line should delete it. - Motion::NextWordStart { .. } => { + Motion::NextWordStart { .. } if selection.is_empty() && map .buffer_snapshot - .line_len(MultiBufferRow(selection.start.to_point(map).row)) - == 0 - { - selection.end = map - .buffer_snapshot - .clip_point( - Point::new(selection.start.to_point(map).row + 1, 0), - Bias::Left, - ) - .to_display_point(map) - } + .line_len(MultiBufferRow(start_point.row)) + == 0 => + { + selection.end = next_line + } + // Sentence motions, when done from start of line, include the newline + Motion::SentenceForward | Motion::SentenceBackward + if selection.start.column() == 0 => + { + selection.end = next_line } Motion::EndOfDocument {} => { // Deleting until the end of the document includes the last line, including @@ -72,6 +76,7 @@ impl Vim { selection.collapse_to(cursor, selection.goal) }); }); + editor.refresh_inline_completion(true, false, cx); }); }); } @@ -151,6 +156,7 @@ impl Vim { selection.collapse_to(cursor, selection.goal) }); }); + editor.refresh_inline_completion(true, false, cx); }); }); } @@ -602,4 +608,62 @@ mod test { cx.simulate("d t x", "ˇax").await.assert_matches(); cx.simulate("d t x", "aˇx").await.assert_matches(); } + + #[gpui::test] + async fn test_delete_sentence(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.simulate( + "d )", + indoc! {" + Fiˇrst. Second. Third. + Fourth. + "}, + ) + .await + .assert_matches(); + + cx.simulate( + "d )", + indoc! {" + First. Secˇond. Third. + Fourth. + "}, + ) + .await + .assert_matches(); + + // Two deletes + cx.simulate( + "d ) d )", + indoc! {" + First. Second. Thirˇd. + Fourth. + "}, + ) + .await + .assert_matches(); + + // Should delete whole line if done on first column + cx.simulate( + "d )", + indoc! {" + ˇFirst. + Fourth. + "}, + ) + .await + .assert_matches(); + + // Backwards it should also delete the whole first line + cx.simulate( + "d (", + indoc! {" + First. + ˇSecond. + Fourth. + "}, + ) + .await + .assert_matches(); + } } diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index ec24064b31..ca300fc1be 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -26,13 +26,13 @@ impl_actions!(vim, [Increment, Decrement]); pub fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, action: &Increment, cx| { vim.record_current_action(cx); - let count = vim.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); let step = if action.step { 1 } else { 0 }; vim.increment(count as i64, step, cx) }); Vim::action(editor, cx, |vim, action: &Decrement, cx| { vim.record_current_action(cx); - let count = vim.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); let step = if action.step { -1 } else { 0 }; vim.increment(-(count as i64), step, cx) }); diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index feb060d594..8d49a6802c 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -25,7 +25,7 @@ impl Vim { pub fn paste(&mut self, action: &Paste, cx: &mut ViewContext) { self.record_current_action(cx); self.store_visual_marks(cx); - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); self.update_editor(cx, |vim, editor, cx| { let text_layout_details = editor.text_layout_details(cx); diff --git a/crates/vim/src/normal/repeat.rs b/crates/vim/src/normal/repeat.rs index c89b63ecc6..6dceca4f8b 100644 --- a/crates/vim/src/normal/repeat.rs +++ b/crates/vim/src/normal/repeat.rs @@ -134,7 +134,11 @@ impl Replayer { let Ok(workspace) = handle.downcast::() else { return; }; - let Some(editor) = workspace.read(cx).active_item_as::(cx) else { + let Some(editor) = workspace + .read(cx) + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + else { return; }; editor.update(cx, |editor, cx| { @@ -158,7 +162,7 @@ impl Vim { } pub(crate) fn replay_register(&mut self, mut register: char, cx: &mut ViewContext) { - let mut count = self.take_count(cx).unwrap_or(1); + let mut count = Vim::take_count(cx).unwrap_or(1); self.clear_operator(cx); let globals = Vim::globals(cx); @@ -184,7 +188,7 @@ impl Vim { } pub(crate) fn repeat(&mut self, from_insert_mode: bool, cx: &mut ViewContext) { - let count = self.take_count(cx); + let count = Vim::take_count(cx); let Some((mut actions, selection, mode)) = Vim::update_globals(cx, |globals, _| { let actions = globals.recorded_actions.clone(); if actions.is_empty() { diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 8d1443e633..3f71401e2e 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -53,7 +53,7 @@ impl Vim { cx: &mut ViewContext, by: fn(c: Option) -> ScrollAmount, ) { - let amount = by(self.take_count(cx).map(|c| c as f32)); + let amount = by(Vim::take_count(cx).map(|c| c as f32)); self.update_editor(cx, |_, editor, cx| { scroll_editor(editor, move_cursor, &amount, cx) }); diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 5d78c8937e..103d33f8af 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -120,7 +120,7 @@ impl Vim { } else { Direction::Next }; - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); let prior_selections = self.editor_selections(cx); pane.update(cx, |pane, cx| { if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() { @@ -226,7 +226,7 @@ impl Vim { pub fn move_to_match_internal(&mut self, direction: Direction, cx: &mut ViewContext) { let Some(pane) = self.pane(cx) else { return }; - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); let prior_selections = self.editor_selections(cx); let success = pane.update(cx, |pane, cx| { @@ -264,7 +264,7 @@ impl Vim { cx: &mut ViewContext, ) { let Some(pane) = self.pane(cx) else { return }; - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); let prior_selections = self.editor_selections(cx); let vim = cx.view().clone(); diff --git a/crates/vim/src/normal/substitute.rs b/crates/vim/src/normal/substitute.rs index dc27e2b219..c2b27227ca 100644 --- a/crates/vim/src/normal/substitute.rs +++ b/crates/vim/src/normal/substitute.rs @@ -9,7 +9,7 @@ actions!(vim, [Substitute, SubstituteLine]); pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &Substitute, cx| { vim.start_recording(cx); - let count = vim.take_count(cx); + let count = Vim::take_count(cx); vim.substitute(count, vim.mode == Mode::VisualLine, cx); }); @@ -18,7 +18,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { if matches!(vim.mode, Mode::VisualBlock | Mode::Visual) { vim.switch_mode(Mode::VisualLine, false, cx) } - let count = vim.take_count(cx); + let count = Vim::take_count(cx); vim.substitute(count, true, cx) }); } diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index 763f1a3d16..d23dc2f9b0 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -4,13 +4,14 @@ use crate::{ motion::Motion, object::Object, state::{Mode, Register}, - Vim, + Vim, VimSettings, }; use collections::HashMap; use editor::{ClipboardSelection, Editor}; use gpui::ViewContext; use language::Point; use multi_buffer::MultiBufferRow; +use settings::Settings; struct HighlightOnYank; @@ -154,9 +155,9 @@ impl Vim { // contains a newline (so that delete works as expected). We undo that change // here. let is_last_line = linewise - && end.row == buffer.max_buffer_row().0 + && end.row == buffer.max_row().0 && buffer.max_point().column > 0 - && start.row < buffer.max_buffer_row().0 + && start.row < buffer.max_row().0 && start == Point::new(start.row, buffer.line_len(MultiBufferRow(start.row))); if is_last_line { @@ -195,7 +196,8 @@ impl Vim { ) }); - if !is_yank || self.mode == Mode::Visual { + let highlight_duration = VimSettings::get_global(cx).highlight_on_yank_duration; + if !is_yank || self.mode == Mode::Visual || highlight_duration == 0 { return; } @@ -206,7 +208,7 @@ impl Vim { ); cx.spawn(|this, mut cx| async move { cx.background_executor() - .timer(Duration::from_millis(200)) + .timer(Duration::from_millis(highlight_duration)) .await; this.update(&mut cx, |editor, cx| { editor.clear_background_highlights::(cx) diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 7c1f2fdb4c..745d1adb78 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -1,6 +1,10 @@ use std::ops::Range; -use crate::{motion::right, state::Mode, Vim}; +use crate::{ + motion::right, + state::{Mode, Operator}, + Vim, +}; use editor::{ display_map::{DisplaySnapshot, ToDisplayPoint}, movement::{self, FindRange}, @@ -10,7 +14,7 @@ use editor::{ use itertools::Itertools; use gpui::{actions, impl_actions, ViewContext}; -use language::{BufferSnapshot, CharKind, Point, Selection}; +use language::{BufferSnapshot, CharKind, Point, Selection, TextObject, TreeSitterOptions}; use multi_buffer::MultiBufferRow; use serde::Deserialize; @@ -21,6 +25,7 @@ pub enum Object { Paragraph, Quotes, BackQuotes, + AnyQuotes, DoubleQuotes, VerticalBars, Parentheses, @@ -28,7 +33,11 @@ pub enum Object { CurlyBrackets, AngleBrackets, Argument, + IndentObj { include_below: bool }, Tag, + Method, + Class, + Comment, } #[derive(Clone, Deserialize, PartialEq)] @@ -37,8 +46,14 @@ struct Word { #[serde(default)] ignore_punctuation: bool, } +#[derive(Clone, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +struct IndentObj { + #[serde(default)] + include_below: bool, +} -impl_actions!(vim, [Word]); +impl_actions!(vim, [Word, IndentObj]); actions!( vim, @@ -47,6 +62,7 @@ actions!( Paragraph, Quotes, BackQuotes, + AnyQuotes, DoubleQuotes, VerticalBars, Parentheses, @@ -54,7 +70,10 @@ actions!( CurlyBrackets, AngleBrackets, Argument, - Tag + Tag, + Method, + Class, + Comment ] ); @@ -79,6 +98,9 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &BackQuotes, cx| { vim.object(Object::BackQuotes, cx) }); + Vim::action(editor, cx, |vim, _: &AnyQuotes, cx| { + vim.object(Object::AnyQuotes, cx) + }); Vim::action(editor, cx, |vim, _: &DoubleQuotes, cx| { vim.object(Object::DoubleQuotes, cx) }); @@ -100,6 +122,25 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &Argument, cx| { vim.object(Object::Argument, cx) }); + Vim::action(editor, cx, |vim, _: &Method, cx| { + vim.object(Object::Method, cx) + }); + Vim::action(editor, cx, |vim, _: &Class, cx| { + vim.object(Object::Class, cx) + }); + Vim::action(editor, cx, |vim, _: &Comment, cx| { + if !matches!(vim.active_operator(), Some(Operator::Object { .. })) { + vim.push_operator(Operator::Object { around: true }, cx); + } + vim.object(Object::Comment, cx) + }); + Vim::action( + editor, + cx, + |vim, &IndentObj { include_below }: &IndentObj, cx| { + vim.object(Object::IndentObj { include_below }, cx) + }, + ); } impl Vim { @@ -107,7 +148,7 @@ impl Vim { match self.mode { Mode::Normal => self.normal_object(object, cx), Mode::Visual | Mode::VisualLine | Mode::VisualBlock => self.visual_object(object, cx), - Mode::Insert | Mode::Replace => { + Mode::Insert | Mode::Replace | Mode::HelixNormal => { // Shouldn't execute a text object in insert mode. Ignoring } } @@ -120,6 +161,7 @@ impl Object { Object::Word { .. } | Object::Quotes | Object::BackQuotes + | Object::AnyQuotes | Object::VerticalBars | Object::DoubleQuotes => false, Object::Sentence @@ -129,30 +171,43 @@ impl Object { | Object::AngleBrackets | Object::CurlyBrackets | Object::SquareBrackets - | Object::Argument => true, + | Object::Argument + | Object::Method + | Object::Class + | Object::Comment + | Object::IndentObj { .. } => true, } } pub fn always_expands_both_ways(self) -> bool { match self { - Object::Word { .. } | Object::Sentence | Object::Paragraph | Object::Argument => false, + Object::Word { .. } + | Object::Sentence + | Object::Paragraph + | Object::Argument + | Object::IndentObj { .. } => false, Object::Quotes | Object::BackQuotes + | Object::AnyQuotes | Object::DoubleQuotes | Object::VerticalBars | Object::Parentheses | Object::SquareBrackets | Object::Tag + | Object::Method + | Object::Class + | Object::Comment | Object::CurlyBrackets | Object::AngleBrackets => true, } } - pub fn target_visual_mode(self, current_mode: Mode) -> Mode { + pub fn target_visual_mode(self, current_mode: Mode, around: bool) -> Mode { match self { Object::Word { .. } | Object::Sentence | Object::Quotes + | Object::AnyQuotes | Object::BackQuotes | Object::DoubleQuotes => { if current_mode == Mode::VisualBlock { @@ -167,7 +222,16 @@ impl Object { | Object::AngleBrackets | Object::VerticalBars | Object::Tag - | Object::Argument => Mode::Visual, + | Object::Comment + | Object::Argument + | Object::IndentObj { .. } => Mode::Visual, + Object::Method | Object::Class => { + if around { + Mode::VisualLine + } else { + Mode::Visual + } + } Object::Paragraph => Mode::VisualLine, } } @@ -195,6 +259,35 @@ impl Object { Object::BackQuotes => { surrounding_markers(map, relative_to, around, self.is_multiline(), '`', '`') } + Object::AnyQuotes => { + let quote_types = ['\'', '"', '`']; // Types of quotes to handle + let relative_offset = relative_to.to_offset(map, Bias::Left) as isize; + + // Find the closest matching quote range + quote_types + .iter() + .flat_map(|"e| { + // Get ranges for each quote type + surrounding_markers( + map, + relative_to, + around, + self.is_multiline(), + quote, + quote, + ) + }) + .min_by_key(|range| { + // Calculate proximity of ranges to the cursor + let start_distance = (relative_offset + - range.start.to_offset(map, Bias::Left) as isize) + .abs(); + let end_distance = (relative_offset + - range.end.to_offset(map, Bias::Right) as isize) + .abs(); + start_distance + end_distance + }) + } Object::DoubleQuotes => { surrounding_markers(map, relative_to, around, self.is_multiline(), '"', '"') } @@ -218,7 +311,35 @@ impl Object { Object::AngleBrackets => { surrounding_markers(map, relative_to, around, self.is_multiline(), '<', '>') } + Object::Method => text_object( + map, + relative_to, + if around { + TextObject::AroundFunction + } else { + TextObject::InsideFunction + }, + ), + Object::Comment => text_object( + map, + relative_to, + if around { + TextObject::AroundComment + } else { + TextObject::InsideComment + }, + ), + Object::Class => text_object( + map, + relative_to, + if around { + TextObject::AroundClass + } else { + TextObject::InsideClass + }, + ), Object::Argument => argument(map, relative_to, around), + Object::IndentObj { include_below } => indent(map, relative_to, around, include_below), } } @@ -420,6 +541,51 @@ fn around_next_word( Some(start..end) } +fn text_object( + map: &DisplaySnapshot, + relative_to: DisplayPoint, + target: TextObject, +) -> Option> { + let snapshot = &map.buffer_snapshot; + let offset = relative_to.to_offset(map, Bias::Left); + + let excerpt = snapshot.excerpt_containing(offset..offset)?; + let buffer = excerpt.buffer(); + let offset = excerpt.map_offset_to_buffer(offset); + + let mut matches: Vec> = buffer + .text_object_ranges(offset..offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| (r.end - r.start)); + if let Some(buffer_range) = matches.first() { + let range = excerpt.map_range_from_buffer(buffer_range.clone()); + return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); + } + + let around = target.around()?; + let mut matches: Vec> = buffer + .text_object_ranges(offset..offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == around { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| (r.end - r.start)); + let around_range = matches.first()?; + + let mut matches: Vec> = buffer + .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.start); + if let Some(buffer_range) = matches.first() { + if !buffer_range.is_empty() { + let range = excerpt.map_range_from_buffer(buffer_range.clone()); + return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); + } + } + let buffer_range = excerpt.map_range_from_buffer(around_range.clone()); + return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map)); +} + fn argument( map: &DisplaySnapshot, relative_to: DisplayPoint, @@ -569,6 +735,58 @@ fn argument( } } +fn indent( + map: &DisplaySnapshot, + relative_to: DisplayPoint, + around: bool, + include_below: bool, +) -> Option> { + let point = relative_to.to_point(map); + let row = point.row; + + let desired_indent = map.line_indent_for_buffer_row(MultiBufferRow(row)); + + // Loop backwards until we find a non-blank line with less indent + let mut start_row = row; + for prev_row in (0..row).rev() { + let indent = map.line_indent_for_buffer_row(MultiBufferRow(prev_row)); + if indent.is_line_empty() { + continue; + } + if indent.spaces < desired_indent.spaces || indent.tabs < desired_indent.tabs { + if around { + // When around is true, include the first line with less indent + start_row = prev_row; + } + break; + } + start_row = prev_row; + } + + // Loop forwards until we find a non-blank line with less indent + let mut end_row = row; + let max_rows = map.buffer_snapshot.max_row().0; + for next_row in (row + 1)..=max_rows { + let indent = map.line_indent_for_buffer_row(MultiBufferRow(next_row)); + if indent.is_line_empty() { + continue; + } + if indent.spaces < desired_indent.spaces || indent.tabs < desired_indent.tabs { + if around && include_below { + // When around is true and including below, include this line + end_row = next_row; + } + break; + } + end_row = next_row; + } + + let end_len = map.buffer_snapshot.line_len(MultiBufferRow(end_row)); + let start = map.point_to_display_point(Point::new(start_row, 0), Bias::Right); + let end = map.point_to_display_point(Point::new(end_row, end_len), Bias::Left); + Some(start..end) +} + fn sentence( map: &DisplaySnapshot, relative_to: DisplayPoint, @@ -781,13 +999,13 @@ pub fn start_of_paragraph(map: &DisplaySnapshot, display_point: DisplayPoint) -> /// The trailing newline is excluded from the paragraph. pub fn end_of_paragraph(map: &DisplaySnapshot, display_point: DisplayPoint) -> DisplayPoint { let point = display_point.to_point(map); - if point.row == map.max_buffer_row().0 { + if point.row == map.buffer_snapshot.max_row().0 { return map.max_point(); } let is_current_line_blank = map.buffer_snapshot.is_line_blank(MultiBufferRow(point.row)); - for row in point.row + 1..map.max_buffer_row().0 + 1 { + for row in point.row + 1..map.buffer_snapshot.max_row().0 + 1 { let blank = map.buffer_snapshot.is_line_blank(MultiBufferRow(row)); if blank != is_current_line_blank { let previous_row = row - 1; @@ -1407,7 +1625,7 @@ mod test { // Generic arguments cx.set_state("fn boop() {}", Mode::Normal); - cx.simulate_keystrokes("v i g"); + cx.simulate_keystrokes("v i a"); cx.assert_state("fn boop<«A: Debugˇ», B>() {}", Mode::Visual); // Function arguments @@ -1415,11 +1633,11 @@ mod test { "fn boop(ˇarg_a: (Tuple, Of, Types), arg_b: String) {}", Mode::Normal, ); - cx.simulate_keystrokes("d a g"); + cx.simulate_keystrokes("d a a"); cx.assert_state("fn boop(ˇarg_b: String) {}", Mode::Normal); cx.set_state("std::namespace::test(\"strinˇg\", a.b.c())", Mode::Normal); - cx.simulate_keystrokes("v a g"); + cx.simulate_keystrokes("v a a"); cx.assert_state("std::namespace::test(«\"string\", ˇ»a.b.c())", Mode::Visual); // Tuple, vec, and array arguments @@ -1427,37 +1645,125 @@ mod test { "fn boop(arg_a: (Tuple, Ofˇ, Types), arg_b: String) {}", Mode::Normal, ); - cx.simulate_keystrokes("c i g"); + cx.simulate_keystrokes("c i a"); cx.assert_state( "fn boop(arg_a: (Tuple, ˇ, Types), arg_b: String) {}", Mode::Insert, ); cx.set_state("let a = (test::call(), 'p', my_macro!{ˇ});", Mode::Normal); - cx.simulate_keystrokes("c a g"); + cx.simulate_keystrokes("c a a"); cx.assert_state("let a = (test::call(), 'p'ˇ);", Mode::Insert); cx.set_state("let a = [test::call(ˇ), 300];", Mode::Normal); - cx.simulate_keystrokes("c i g"); + cx.simulate_keystrokes("c i a"); cx.assert_state("let a = [ˇ, 300];", Mode::Insert); cx.set_state( "let a = vec![Vec::new(), vecˇ![test::call(), 300]];", Mode::Normal, ); - cx.simulate_keystrokes("c a g"); + cx.simulate_keystrokes("c a a"); cx.assert_state("let a = vec![Vec::new()ˇ];", Mode::Insert); // Cursor immediately before / after brackets cx.set_state("let a = [test::call(first_arg)ˇ]", Mode::Normal); - cx.simulate_keystrokes("v i g"); + cx.simulate_keystrokes("v i a"); cx.assert_state("let a = [«test::call(first_arg)ˇ»]", Mode::Visual); cx.set_state("let a = [test::callˇ(first_arg)]", Mode::Normal); - cx.simulate_keystrokes("v i g"); + cx.simulate_keystrokes("v i a"); cx.assert_state("let a = [«test::call(first_arg)ˇ»]", Mode::Visual); } + #[gpui::test] + async fn test_indent_object(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Base use case + cx.set_state( + indoc! {" + fn boop() { + // Comment + baz();ˇ + + loop { + bar(1); + bar(2); + } + + result + } + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v i i"); + cx.assert_state( + indoc! {" + fn boop() { + « // Comment + baz(); + + loop { + bar(1); + bar(2); + } + + resultˇ» + } + "}, + Mode::Visual, + ); + + // Around indent (include line above) + cx.set_state( + indoc! {" + const ABOVE: str = true; + fn boop() { + + hello(); + worˇld() + } + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a i"); + cx.assert_state( + indoc! {" + const ABOVE: str = true; + «fn boop() { + + hello(); + world()ˇ» + } + "}, + Mode::Visual, + ); + + // Around indent (include line above & below) + cx.set_state( + indoc! {" + const ABOVE: str = true; + fn boop() { + hellˇo(); + world() + + } + const BELOW: str = true; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("c a shift-i"); + cx.assert_state( + indoc! {" + const ABOVE: str = true; + ˇ + const BELOW: str = true; + "}, + Mode::Insert, + ); + } + #[gpui::test] async fn test_delete_surrounding_character_objects(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; @@ -1482,6 +1788,120 @@ mod test { } } + #[gpui::test] + async fn test_anyquotes_object(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + const TEST_CASES: &[(&str, &str, &str, Mode)] = &[ + // Single quotes + ( + "c i q", + "This is a 'qˇuote' example.", + "This is a 'ˇ' example.", + Mode::Insert, + ), + ( + "c a q", + "This is a 'qˇuote' example.", + "This is a ˇexample.", + Mode::Insert, + ), + ( + "d i q", + "This is a 'qˇuote' example.", + "This is a 'ˇ' example.", + Mode::Normal, + ), + ( + "d a q", + "This is a 'qˇuote' example.", + "This is a ˇexample.", + Mode::Normal, + ), + // Double quotes + ( + "c i q", + "This is a \"qˇuote\" example.", + "This is a \"ˇ\" example.", + Mode::Insert, + ), + ( + "c a q", + "This is a \"qˇuote\" example.", + "This is a ˇexample.", + Mode::Insert, + ), + ( + "d i q", + "This is a \"qˇuote\" example.", + "This is a \"ˇ\" example.", + Mode::Normal, + ), + ( + "d a q", + "This is a \"qˇuote\" example.", + "This is a ˇexample.", + Mode::Normal, + ), + // Back quotes + ( + "c i q", + "This is a `qˇuote` example.", + "This is a `ˇ` example.", + Mode::Insert, + ), + ( + "c a q", + "This is a `qˇuote` example.", + "This is a ˇexample.", + Mode::Insert, + ), + ( + "d i q", + "This is a `qˇuote` example.", + "This is a `ˇ` example.", + Mode::Normal, + ), + ( + "d a q", + "This is a `qˇuote` example.", + "This is a ˇexample.", + Mode::Normal, + ), + ]; + + for (keystrokes, initial_state, expected_state, expected_mode) in TEST_CASES { + cx.set_state(initial_state, Mode::Normal); + + cx.simulate_keystrokes(keystrokes); + + cx.assert_state(expected_state, *expected_mode); + } + + const INVALID_CASES: &[(&str, &str, Mode)] = &[ + ("c i q", "this is a 'qˇuote example.", Mode::Normal), // Missing closing simple quote + ("c a q", "this is a 'qˇuote example.", Mode::Normal), // Missing closing simple quote + ("d i q", "this is a 'qˇuote example.", Mode::Normal), // Missing closing simple quote + ("d a q", "this is a 'qˇuote example.", Mode::Normal), // Missing closing simple quote + ("c i q", "this is a \"qˇuote example.", Mode::Normal), // Missing closing double quote + ("c a q", "this is a \"qˇuote example.", Mode::Normal), // Missing closing double quote + ("d i q", "this is a \"qˇuote example.", Mode::Normal), // Missing closing double quote + ("d a q", "this is a \"qˇuote example.", Mode::Normal), // Missing closing back quote + ("c i q", "this is a `qˇuote example.", Mode::Normal), // Missing closing back quote + ("c a q", "this is a `qˇuote example.", Mode::Normal), // Missing closing back quote + ("d i q", "this is a `qˇuote example.", Mode::Normal), // Missing closing back quote + ("d a q", "this is a `qˇuote example.", Mode::Normal), // Missing closing back quote + ]; + + for (keystrokes, initial_state, mode) in INVALID_CASES { + cx.set_state(initial_state, Mode::Normal); + + cx.simulate_keystrokes(keystrokes); + + cx.assert_state(initial_state, *mode); + } + } + #[gpui::test] async fn test_tags(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new_html(cx).await; diff --git a/crates/vim/src/replace.rs b/crates/vim/src/replace.rs index 753eec0971..8b84849043 100644 --- a/crates/vim/src/replace.rs +++ b/crates/vim/src/replace.rs @@ -22,7 +22,7 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { if vim.mode != Mode::Replace { return; } - let count = vim.take_count(cx); + let count = Vim::take_count(cx); vim.undo_replace(count, cx) }); } diff --git a/crates/vim/src/rewrap.rs b/crates/vim/src/rewrap.rs index db54c4ed57..1ef4a3fc03 100644 --- a/crates/vim/src/rewrap.rs +++ b/crates/vim/src/rewrap.rs @@ -10,7 +10,7 @@ actions!(vim, [Rewrap]); pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, |vim, _: &Rewrap, cx| { vim.record_current_action(cx); - vim.take_count(cx); + Vim::take_count(cx); vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 510ed6557d..e93eeef404 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -26,6 +26,7 @@ pub enum Mode { Visual, VisualLine, VisualBlock, + HelixNormal, } impl Display for Mode { @@ -37,6 +38,7 @@ impl Display for Mode { Mode::Visual => write!(f, "VISUAL"), Mode::VisualLine => write!(f, "VISUAL LINE"), Mode::VisualBlock => write!(f, "VISUAL BLOCK"), + Mode::HelixNormal => write!(f, "HELIX NORMAL"), } } } @@ -46,6 +48,7 @@ impl Mode { match self { Mode::Normal | Mode::Insert | Mode::Replace => false, Mode::Visual | Mode::VisualLine | Mode::VisualBlock => true, + Mode::HelixNormal => false, } } } @@ -72,6 +75,7 @@ pub enum Operator { Jump { line: bool }, Indent, Outdent, + AutoIndent, Rewrap, Lowercase, Uppercase, @@ -150,6 +154,11 @@ pub struct VimGlobals { pub dot_recording: bool, pub dot_replaying: bool, + /// pre_count is the number before an operator is specified (3 in 3d2d) + pub pre_count: Option, + /// post_count is the number after an operator is specified (2 in 3d2d) + pub post_count: Option, + pub stop_recording_after_next_action: bool, pub ignore_current_insertion: bool, pub recorded_count: Option, @@ -460,6 +469,7 @@ impl Operator { Operator::Jump { line: true } => "'", Operator::Jump { line: false } => "`", Operator::Indent => ">", + Operator::AutoIndent => "eq", Operator::Rewrap => "gq", Operator::Outdent => "<", Operator::Uppercase => "gU", @@ -480,6 +490,7 @@ impl Operator { Operator::Literal { prefix: Some(prefix), } => format!("^V{prefix}"), + Operator::AutoIndent => "=".to_string(), _ => self.id().to_string(), } } @@ -505,6 +516,7 @@ impl Operator { | Operator::Rewrap | Operator::Indent | Operator::Outdent + | Operator::AutoIndent | Operator::Lowercase | Operator::Uppercase | Operator::Object { .. } diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index 88bcb6a2e1..719a147062 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -35,7 +35,7 @@ impl Vim { cx: &mut ViewContext, ) { self.stop_recording(cx); - let count = self.take_count(cx); + let count = Vim::take_count(cx); let mode = self.mode; self.update_editor(cx, |_, editor, cx| { let text_layout_details = editor.text_layout_details(cx); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 947353e2d3..25488c9146 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -367,6 +367,46 @@ async fn test_join_lines(cx: &mut gpui::TestAppContext) { two three fourˇ five six "}); + + cx.set_shared_state(indoc! {" + ˇone + two + three + four + five + six + "}) + .await; + cx.simulate_shared_keystrokes("g shift-j").await; + cx.shared_state().await.assert_eq(indoc! {" + oneˇtwo + three + four + five + six + "}); + cx.simulate_shared_keystrokes("3 g shift-j").await; + cx.shared_state().await.assert_eq(indoc! {" + onetwothreeˇfour + five + six + "}); + + cx.set_shared_state(indoc! {" + ˇone + two + three + four + five + six + "}) + .await; + cx.simulate_shared_keystrokes("j v 3 j g shift-j").await; + cx.shared_state().await.assert_eq(indoc! {" + one + twothreefourˇfive + six + "}); } #[cfg(target_os = "macos")] diff --git a/crates/vim/src/test/neovim_connection.rs b/crates/vim/src/test/neovim_connection.rs index a2ab1f3972..a0a2343bdf 100644 --- a/crates/vim/src/test/neovim_connection.rs +++ b/crates/vim/src/test/neovim_connection.rs @@ -442,6 +442,7 @@ impl NeovimConnection { } Mode::Insert | Mode::Normal | Mode::Replace => selections .push(Point::new(selection_row, selection_col)..Point::new(cursor_row, cursor_col)), + Mode::HelixNormal => unreachable!(), } let ranges = encode_ranges(&text, &selections); diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 77fc7db9d6..5e64d1c93e 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -6,6 +6,7 @@ mod test; mod change_list; mod command; mod digraph; +mod helix; mod indent; mod insert; mod mode_indicator; @@ -25,8 +26,8 @@ use editor::{ Anchor, Bias, Editor, EditorEvent, EditorMode, ToPoint, }; use gpui::{ - actions, impl_actions, Action, AppContext, Entity, EventEmitter, KeyContext, KeystrokeEvent, - Render, Subscription, View, ViewContext, WeakView, + actions, impl_actions, Action, AppContext, Axis, Entity, EventEmitter, KeyContext, + KeystrokeEvent, Render, Subscription, View, ViewContext, WeakView, }; use insert::{NormalBefore, TemporaryNormal}; use language::{CursorShape, Point, Selection, SelectionGoal, TransactionId}; @@ -40,15 +41,16 @@ use settings::{update_settings_file, Settings, SettingsSources, SettingsStore}; use state::{Mode, Operator, RecordedSelection, SearchState, VimGlobals}; use std::{mem, ops::Range, sync::Arc}; use surrounds::SurroundsType; -use ui::{IntoElement, VisualContext}; -use workspace::{self, Pane, Workspace}; +use theme::ThemeSettings; +use ui::{px, IntoElement, VisualContext}; +use vim_mode_setting::VimModeSetting; +use workspace::{self, Pane, ResizeIntent, Workspace}; use crate::state::ReplayableAction; -/// Whether or not to enable Vim mode. -/// -/// Default: false -pub struct VimModeSetting(pub bool); +/// Used to resize the current pane +#[derive(Clone, Deserialize, PartialEq)] +pub struct ResizePane(pub ResizeIntent); /// An Action to Switch between modes #[derive(Clone, Deserialize, PartialEq)] @@ -78,18 +80,23 @@ actions!( InnerObject, FindForward, FindBackward, - OpenDefaultKeymap + OpenDefaultKeymap, + MaximizePane, + ResetPaneSizes, ] ); // in the workspace namespace so it's not filtered out when vim is disabled. actions!(workspace, [ToggleVimMode]); -impl_actions!(vim, [SwitchMode, PushOperator, Number, SelectRegister]); +impl_actions!( + vim, + [ResizePane, SwitchMode, PushOperator, Number, SelectRegister] +); /// Initializes the `vim` crate. pub fn init(cx: &mut AppContext) { - VimModeSetting::register(cx); + vim_mode_setting::init(cx); VimSettings::register(cx); VimGlobals::register(cx); @@ -113,6 +120,51 @@ pub fn init(cx: &mut AppContext) { }); }); + workspace.register_action(|workspace, _: &ResetPaneSizes, cx| { + workspace.reset_pane_sizes(cx); + }); + + workspace.register_action(|workspace, _: &MaximizePane, cx| { + let pane = workspace.active_pane(); + let Some(size) = workspace.bounding_box_for_pane(&pane) else { + return; + }; + + let theme = ThemeSettings::get_global(cx); + let height = theme.buffer_font_size(cx) * theme.buffer_line_height.value(); + + let desired_size = if let Some(count) = Vim::take_count(cx) { + height * count + } else { + px(10000.) + }; + workspace.resize_pane(Axis::Vertical, desired_size - size.size.height, cx) + }); + + workspace.register_action(|workspace, action: &ResizePane, cx| { + let count = Vim::take_count(cx).unwrap_or(1) as f32; + let theme = ThemeSettings::get_global(cx); + let Ok(font_id) = cx.text_system().font_id(&theme.buffer_font) else { + return; + }; + let Ok(width) = cx + .text_system() + .advance(font_id, theme.buffer_font_size(cx), 'm') + else { + return; + }; + let height = theme.buffer_font_size(cx) * theme.buffer_line_height.value(); + + let (axis, amount) = match action.0 { + ResizeIntent::Lengthen => (Axis::Vertical, height), + ResizeIntent::Shorten => (Axis::Vertical, height * -1.), + ResizeIntent::Widen => (Axis::Horizontal, width.width), + ResizeIntent::Narrow => (Axis::Horizontal, width.width * -1.), + }; + + workspace.resize_pane(axis, amount * count, cx); + }); + workspace.register_action(|workspace, _: &SearchSubmit, cx| { let vim = workspace .focused_pane(cx) @@ -135,7 +187,7 @@ pub(crate) struct VimAddon { impl editor::Addon for VimAddon { fn extend_key_context(&self, key_context: &mut KeyContext, cx: &AppContext) { - self.view.read(cx).extend_key_context(key_context) + self.view.read(cx).extend_key_context(key_context, cx) } fn to_any(&self) -> &dyn std::any::Any { @@ -150,11 +202,6 @@ pub(crate) struct Vim { pub temp_mode: bool, pub exit_temporary_mode: bool, - /// pre_count is the number before an operator is specified (3 in 3d2d) - pre_count: Option, - /// post_count is the number after an operator is specified (2 in 3d2d) - post_count: Option, - operator_stack: Vec, pub(crate) replacements: Vec<(Range, String)>, @@ -201,8 +248,6 @@ impl Vim { last_mode: Mode::Normal, temp_mode: false, exit_temporary_mode: false, - pre_count: None, - post_count: None, operator_stack: Vec::new(), replacements: Vec::new(), @@ -293,6 +338,7 @@ impl Vim { normal::register(editor, cx); insert::register(editor, cx); + helix::register(editor, cx); motion::register(editor, cx); command::register(editor, cx); replace::register(editor, cx); @@ -426,6 +472,7 @@ impl Vim { | Operator::Replace | Operator::Indent | Operator::Outdent + | Operator::AutoIndent | Operator::Lowercase | Operator::Uppercase | Operator::OppositeCase @@ -475,7 +522,7 @@ impl Vim { self.current_anchor.take(); } if mode != Mode::Insert && mode != Mode::Replace { - self.take_count(cx); + Vim::take_count(cx); } // Sync editor settings like clip mode @@ -555,36 +602,53 @@ impl Vim { }); } - fn take_count(&mut self, cx: &mut ViewContext) -> Option { + pub fn take_count(cx: &mut AppContext) -> Option { let global_state = cx.global_mut::(); if global_state.dot_replaying { return global_state.recorded_count; } - let count = if self.post_count.is_none() && self.pre_count.is_none() { + let count = if global_state.post_count.is_none() && global_state.pre_count.is_none() { return None; } else { - Some(self.post_count.take().unwrap_or(1) * self.pre_count.take().unwrap_or(1)) + Some( + global_state.post_count.take().unwrap_or(1) + * global_state.pre_count.take().unwrap_or(1), + ) }; if global_state.dot_recording { global_state.recorded_count = count; } - self.sync_vim_settings(cx); count } pub fn cursor_shape(&self) -> CursorShape { match self.mode { Mode::Normal => { - if self.operator_stack.is_empty() { - CursorShape::Block + if let Some(operator) = self.operator_stack.last() { + match operator { + // Navigation operators -> Block cursor + Operator::FindForward { .. } + | Operator::FindBackward { .. } + | Operator::Mark + | Operator::Jump { .. } + | Operator::Register + | Operator::RecordRegister + | Operator::ReplayRegister => CursorShape::Block, + + // All other operators -> Underline cursor + _ => CursorShape::Underline, + } } else { - CursorShape::Underline + // No operator active -> Block cursor + CursorShape::Block } } Mode::Replace => CursorShape::Underline, - Mode::Visual | Mode::VisualLine | Mode::VisualBlock => CursorShape::Block, + Mode::HelixNormal | Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { + CursorShape::Block + } Mode::Insert => CursorShape::Bar, } } @@ -598,9 +662,12 @@ impl Vim { true } } - Mode::Normal | Mode::Replace | Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { - false - } + Mode::Normal + | Mode::HelixNormal + | Mode::Replace + | Mode::Visual + | Mode::VisualLine + | Mode::VisualBlock => false, } } @@ -610,27 +677,31 @@ impl Vim { pub fn clip_at_line_ends(&self) -> bool { match self.mode { - Mode::Insert | Mode::Visual | Mode::VisualLine | Mode::VisualBlock | Mode::Replace => { - false - } + Mode::Insert + | Mode::Visual + | Mode::VisualLine + | Mode::VisualBlock + | Mode::Replace + | Mode::HelixNormal => false, Mode::Normal => true, } } - pub fn extend_key_context(&self, context: &mut KeyContext) { + pub fn extend_key_context(&self, context: &mut KeyContext, cx: &AppContext) { let mut mode = match self.mode { Mode::Normal => "normal", Mode::Visual | Mode::VisualLine | Mode::VisualBlock => "visual", Mode::Insert => "insert", Mode::Replace => "replace", + Mode::HelixNormal => "helix_normal", } .to_string(); let mut operator_id = "none"; let active_operator = self.active_operator(); - if active_operator.is_none() && self.pre_count.is_some() - || active_operator.is_some() && self.post_count.is_some() + if active_operator.is_none() && cx.global::().pre_count.is_some() + || active_operator.is_some() && cx.global::().post_count.is_some() { context.add("VimCount"); } @@ -841,18 +912,18 @@ impl Vim { fn push_count_digit(&mut self, number: usize, cx: &mut ViewContext) { if self.active_operator().is_some() { - let post_count = self.post_count.unwrap_or(0); + let post_count = Vim::globals(cx).post_count.unwrap_or(0); - self.post_count = Some( + Vim::globals(cx).post_count = Some( post_count .checked_mul(10) .and_then(|post_count| post_count.checked_add(number)) .unwrap_or(post_count), ) } else { - let pre_count = self.pre_count.unwrap_or(0); + let pre_count = Vim::globals(cx).pre_count.unwrap_or(0); - self.pre_count = Some( + Vim::globals(cx).pre_count = Some( pre_count .checked_mul(10) .and_then(|pre_count| pre_count.checked_add(number)) @@ -884,7 +955,7 @@ impl Vim { } fn clear_operator(&mut self, cx: &mut ViewContext) { - self.take_count(cx); + Vim::take_count(cx); self.selected_register.take(); self.operator_stack.clear(); self.sync_vim_settings(cx); @@ -951,7 +1022,7 @@ impl Vim { }) }); } - Mode::Insert | Mode::Replace => {} + Mode::Insert | Mode::Replace | Mode::HelixNormal => {} } } @@ -1104,6 +1175,15 @@ impl Vim { if self.mode == Mode::Replace { self.multi_replace(text, cx) } + + if self.mode == Mode::Normal { + self.update_editor(cx, |_, editor, cx| { + editor.accept_inline_completion( + &editor::actions::AcceptInlineCompletion {}, + cx, + ); + }); + } } } } @@ -1116,29 +1196,20 @@ impl Vim { editor.set_input_enabled(vim.editor_input_enabled()); editor.set_autoindent(vim.should_autoindent()); editor.selections.line_mode = matches!(vim.mode, Mode::VisualLine); - editor.set_inline_completions_enabled(matches!(vim.mode, Mode::Insert | Mode::Replace)); + + let enable_inline_completions = match vim.mode { + Mode::Insert | Mode::Replace => true, + Mode::Normal => editor + .inline_completion_provider() + .map_or(false, |provider| provider.show_completions_in_normal_mode()), + _ => false, + }; + editor.set_inline_completions_enabled(enable_inline_completions); }); cx.notify() } } -impl Settings for VimModeSetting { - const KEY: Option<&'static str> = Some("vim_mode"); - - type FileContent = Option; - - fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - Ok(Self( - sources - .user - .or(sources.server) - .copied() - .flatten() - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?), - )) - } -} - /// Controls when to use system clipboard. #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] @@ -1158,6 +1229,7 @@ struct VimSettings { pub use_multiline_find: bool, pub use_smartcase_find: bool, pub custom_digraphs: HashMap>, + pub highlight_on_yank_duration: u64, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] @@ -1167,6 +1239,7 @@ struct VimSettingsContent { pub use_multiline_find: Option, pub use_smartcase_find: Option, pub custom_digraphs: Option>>, + pub highlight_on_yank_duration: Option, } impl Settings for VimSettings { diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 47aa618b5c..12d9337dfe 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use collections::HashMap; use editor::{ - display_map::{DisplaySnapshot, ToDisplayPoint}, + display_map::{DisplayRow, DisplaySnapshot, ToDisplayPoint}, movement, scroll::Autoscroll, Bias, DisplayPoint, Editor, ToOffset, @@ -36,6 +36,8 @@ actions!( SelectPrevious, SelectNextMatch, SelectPreviousMatch, + SelectSmallerSyntaxNode, + SelectLargerSyntaxNode, RestoreVisualSelection, VisualInsertEndOfLine, VisualInsertFirstNonWhiteSpace, @@ -63,7 +65,12 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.record_current_action(cx); vim.visual_delete(true, cx); }); - Vim::action(editor, cx, |vim, _: &VisualYank, cx| vim.visual_yank(cx)); + Vim::action(editor, cx, |vim, _: &VisualYank, cx| { + vim.visual_yank(false, cx) + }); + Vim::action(editor, cx, |vim, _: &VisualYankLine, cx| { + vim.visual_yank(true, cx) + }); Vim::action(editor, cx, Vim::select_next); Vim::action(editor, cx, Vim::select_previous); @@ -74,6 +81,24 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.select_match(Direction::Prev, cx); }); + Vim::action(editor, cx, |vim, _: &SelectLargerSyntaxNode, cx| { + let count = Vim::take_count(cx).unwrap_or(1); + for _ in 0..count { + vim.update_editor(cx, |_, editor, cx| { + editor.select_larger_syntax_node(&Default::default(), cx); + }); + } + }); + + Vim::action(editor, cx, |vim, _: &SelectSmallerSyntaxNode, cx| { + let count = Vim::take_count(cx).unwrap_or(1); + for _ in 0..count { + vim.update_editor(cx, |_, editor, cx| { + editor.select_smaller_syntax_node(&Default::default(), cx); + }); + } + }); + Vim::action(editor, cx, |vim, _: &RestoreVisualSelection, cx| { let Some((stored_mode, reversed)) = vim.stored_visual_mode.take() else { return; @@ -308,7 +333,7 @@ impl Vim { if let Some(Operator::Object { around }) = self.active_operator() { self.pop_operator(cx); let current_mode = self.mode; - let target_mode = object.target_visual_mode(current_mode); + let target_mode = object.target_visual_mode(current_mode, around); if target_mode != current_mode { self.switch_mode(target_mode, true, cx); } @@ -443,8 +468,16 @@ impl Vim { *selection.end.column_mut() = map.line_len(selection.end.row()) } else if vim.mode != Mode::VisualLine { selection.start = DisplayPoint::new(selection.start.row(), 0); + selection.end = + map.next_line_boundary(selection.end.to_point(map)).1; if selection.end.row() == map.max_point().row() { - selection.end = map.max_point() + selection.end = map.max_point(); + if selection.start == selection.end { + let prev_row = + DisplayRow(selection.start.row().0.saturating_sub(1)); + selection.start = + DisplayPoint::new(prev_row, map.line_len(prev_row)); + } } else { *selection.end.row_mut() += 1; *selection.end.column_mut() = 0; @@ -478,10 +511,11 @@ impl Vim { self.switch_mode(Mode::Normal, true, cx); } - pub fn visual_yank(&mut self, cx: &mut ViewContext) { + pub fn visual_yank(&mut self, line_mode: bool, cx: &mut ViewContext) { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { - let line_mode = editor.selections.line_mode; + let line_mode = line_mode || editor.selections.line_mode; + editor.selections.line_mode = line_mode; vim.yank_selections_content(editor, line_mode, cx); editor.change_selections(None, cx, |s| { s.move_with(|map, selection| { @@ -538,9 +572,8 @@ impl Vim { } pub fn select_next(&mut self, _: &SelectNext, cx: &mut ViewContext) { - let count = self - .take_count(cx) - .unwrap_or_else(|| if self.mode.is_visual() { 1 } else { 2 }); + let count = + Vim::take_count(cx).unwrap_or_else(|| if self.mode.is_visual() { 1 } else { 2 }); self.update_editor(cx, |_, editor, cx| { editor.set_clip_at_line_ends(false, cx); for _ in 0..count { @@ -556,9 +589,8 @@ impl Vim { } pub fn select_previous(&mut self, _: &SelectPrevious, cx: &mut ViewContext) { - let count = self - .take_count(cx) - .unwrap_or_else(|| if self.mode.is_visual() { 1 } else { 2 }); + let count = + Vim::take_count(cx).unwrap_or_else(|| if self.mode.is_visual() { 1 } else { 2 }); self.update_editor(cx, |_, editor, cx| { for _ in 0..count { if editor @@ -573,7 +605,7 @@ impl Vim { } pub fn select_match(&mut self, direction: Direction, cx: &mut ViewContext) { - let count = self.take_count(cx).unwrap_or(1); + let count = Vim::take_count(cx).unwrap_or(1); let Some(pane) = self.pane(cx) else { return; }; @@ -644,7 +676,7 @@ impl Vim { self.stop_recording(cx); self.visual_delete(false, cx) } - Some(Operator::Yank) => self.visual_yank(cx), + Some(Operator::Yank) => self.visual_yank(false, cx), _ => {} // Ignoring other operators } } @@ -1360,6 +1392,20 @@ mod test { }); } + #[gpui::test] + async fn test_shift_y(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! { + "The ˇquick brown\n" + }) + .await; + cx.simulate_shared_keystrokes("v i w shift-y").await; + cx.shared_clipboard().await.assert_eq(indoc! { + "The quick brown\n" + }); + } + #[gpui::test] async fn test_gv(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_command_matching_lines.json b/crates/vim/test_data/test_command_matching_lines.json new file mode 100644 index 0000000000..450aae0de0 --- /dev/null +++ b/crates/vim/test_data/test_command_matching_lines.json @@ -0,0 +1,19 @@ +{"Put":{"state":"ˇa\nb\na\nb\na\n"}} +{"Key":":"} +{"Key":"g"} +{"Key":"/"} +{"Key":"a"} +{"Key":"/"} +{"Key":"d"} +{"Key":"enter"} +{"Get":{"state":"b\nb\nˇ","mode":"Normal"}} +{"Key":"u"} +{"Get":{"state":"ˇa\nb\na\nb\na\n","mode":"Normal"}} +{"Key":":"} +{"Key":"v"} +{"Key":"/"} +{"Key":"a"} +{"Key":"/"} +{"Key":"d"} +{"Key":"enter"} +{"Get":{"state":"a\na\nˇa","mode":"Normal"}} diff --git a/crates/vim/test_data/test_delete_sentence.json b/crates/vim/test_data/test_delete_sentence.json new file mode 100644 index 0000000000..ec8edfbbfd --- /dev/null +++ b/crates/vim/test_data/test_delete_sentence.json @@ -0,0 +1,22 @@ +{"Put":{"state":"Fiˇrst. Second. Third.\nFourth.\n"}} +{"Key":"d"} +{"Key":")"} +{"Get":{"state":"FiˇSecond. Third.\nFourth.\n","mode":"Normal"}} +{"Put":{"state":"First. Secˇond. Third.\nFourth.\n"}} +{"Key":"d"} +{"Key":")"} +{"Get":{"state":"First. SecˇThird.\nFourth.\n","mode":"Normal"}} +{"Put":{"state":"First. Second. Thirˇd.\nFourth.\n"}} +{"Key":"d"} +{"Key":")"} +{"Key":"d"} +{"Key":")"} +{"Get":{"state":"First. Second. Thˇi\n","mode":"Normal"}} +{"Put":{"state":"ˇFirst.\nFourth.\n"}} +{"Key":"d"} +{"Key":")"} +{"Get":{"state":"ˇFourth.\n","mode":"Normal"}} +{"Put":{"state":"First.\nˇSecond.\nFourth.\n"}} +{"Key":"d"} +{"Key":"("} +{"Get":{"state":"ˇSecond.\nFourth.\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_join_lines.json b/crates/vim/test_data/test_join_lines.json index b4bc5c30e1..55aa8b1dcb 100644 --- a/crates/vim/test_data/test_join_lines.json +++ b/crates/vim/test_data/test_join_lines.json @@ -11,3 +11,19 @@ {"Key":"j"} {"Key":"shift-j"} {"Get":{"state":"one\ntwo three fourˇ five\nsix\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\nfour\nfive\nsix\n"}} +{"Key":"g"} +{"Key":"shift-j"} +{"Get":{"state":"oneˇtwo\nthree\nfour\nfive\nsix\n","mode":"Normal"}} +{"Key":"3"} +{"Key":"g"} +{"Key":"shift-j"} +{"Get":{"state":"onetwothreeˇfour\nfive\nsix\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\nfour\nfive\nsix\n"}} +{"Key":"j"} +{"Key":"v"} +{"Key":"3"} +{"Key":"j"} +{"Key":"g"} +{"Key":"shift-j"} +{"Get":{"state":"one\ntwothreefourˇfive\nsix\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_shift_y.json b/crates/vim/test_data/test_shift_y.json new file mode 100644 index 0000000000..f68f1df18d --- /dev/null +++ b/crates/vim/test_data/test_shift_y.json @@ -0,0 +1,7 @@ +{"Put":{"state":"The ˇquick brown\n"}} +{"Key":"v"} +{"Key":"i"} +{"Key":"w"} +{"Key":"shift-y"} +{"Get":{"state":"ˇThe quick brown\n","mode":"Normal"}} +{"ReadRegister":{"name":"\"","value":"The quick brown\n"}} diff --git a/crates/vim/test_data/test_unmatched_backward.json b/crates/vim/test_data/test_unmatched_backward.json new file mode 100644 index 0000000000..bb3825dcd2 --- /dev/null +++ b/crates/vim/test_data/test_unmatched_backward.json @@ -0,0 +1,24 @@ +{"Put":{"state":"func (a string) {\n do(something(with.anˇd_arrays[0, 2]))\n}"}} +{"Key":"["} +{"Key":"{"} +{"Get":{"state":"func (a string) ˇ{\n do(something(with.and_arrays[0, 2]))\n}","mode":"Normal"}} +{"Put":{"state":"func (a string) {\n do(somethiˇng(with.and_arrays[0, 2]))\n}"}} +{"Key":"["} +{"Key":"("} +{"Get":{"state":"func (a string) {\n doˇ(something(with.and_arrays[0, 2]))\n}","mode":"Normal"}} +{"Put":{"state":"{{}{} ˇ }"}} +{"Key":"["} +{"Key":"{"} +{"Get":{"state":"ˇ{{}{} }","mode":"Normal"}} +{"Put":{"state":"(()() ˇ )"}} +{"Key":"["} +{"Key":"("} +{"Get":{"state":"ˇ(()() )","mode":"Normal"}} +{"Put":{"state":"{\n {()} ˇ\n}"}} +{"Key":"["} +{"Key":"{"} +{"Get":{"state":"ˇ{\n {()} \n}","mode":"Normal"}} +{"Put":{"state":"(\n {()} ˇ\n)"}} +{"Key":"["} +{"Key":"("} +{"Get":{"state":"ˇ(\n {()} \n)","mode":"Normal"}} diff --git a/crates/vim/test_data/test_unmatched_forward.json b/crates/vim/test_data/test_unmatched_forward.json new file mode 100644 index 0000000000..a6b4a38f29 --- /dev/null +++ b/crates/vim/test_data/test_unmatched_forward.json @@ -0,0 +1,28 @@ +{"Put":{"state":"func (a string) {\n do(something(with.anˇd_arrays[0, 2]))\n}"}} +{"Key":"]"} +{"Key":"}"} +{"Get":{"state":"func (a string) {\n do(something(with.and_arrays[0, 2]))\nˇ}","mode":"Normal"}} +{"Put":{"state":"func (a string) {\n do(somethiˇng(with.and_arrays[0, 2]))\n}"}} +{"Key":"]"} +{"Key":")"} +{"Get":{"state":"func (a string) {\n do(something(with.and_arrays[0, 2])ˇ)\n}","mode":"Normal"}} +{"Put":{"state":"func (a string) { a((b, cˇ))}"}} +{"Key":"]"} +{"Key":")"} +{"Get":{"state":"func (a string) { a((b, c)ˇ)}","mode":"Normal"}} +{"Put":{"state":"{ˇ {}{}}"}} +{"Key":"]"} +{"Key":"}"} +{"Get":{"state":"{ {}{}ˇ}","mode":"Normal"}} +{"Put":{"state":"(ˇ ()())"}} +{"Key":"]"} +{"Key":")"} +{"Get":{"state":"( ()()ˇ)","mode":"Normal"}} +{"Put":{"state":"{\n ˇ {()}\n}"}} +{"Key":"]"} +{"Key":"}"} +{"Get":{"state":"{\n {()}\nˇ}","mode":"Normal"}} +{"Put":{"state":"(\n ˇ {()}\n)"}} +{"Key":"]"} +{"Key":")"} +{"Get":{"state":"(\n {()}\nˇ)","mode":"Normal"}} diff --git a/crates/vim_mode_setting/Cargo.toml b/crates/vim_mode_setting/Cargo.toml new file mode 100644 index 0000000000..0c009fdfd6 --- /dev/null +++ b/crates/vim_mode_setting/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "vim_mode_setting" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/vim_mode_setting.rs" + +[dependencies] +anyhow.workspace = true +gpui.workspace = true +settings.workspace = true diff --git a/crates/vim_mode_setting/LICENSE-GPL b/crates/vim_mode_setting/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/vim_mode_setting/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/vim_mode_setting/src/vim_mode_setting.rs b/crates/vim_mode_setting/src/vim_mode_setting.rs new file mode 100644 index 0000000000..072db138df --- /dev/null +++ b/crates/vim_mode_setting/src/vim_mode_setting.rs @@ -0,0 +1,36 @@ +//! Contains the [`VimModeSetting`] used to enable/disable Vim mode. +//! +//! This is in its own crate as we want other crates to be able to enable or +//! disable Vim mode without having to depend on the `vim` crate in its +//! entirety. + +use anyhow::Result; +use gpui::AppContext; +use settings::{Settings, SettingsSources}; + +/// Initializes the `vim_mode_setting` crate. +pub fn init(cx: &mut AppContext) { + VimModeSetting::register(cx); +} + +/// Whether or not to enable Vim mode. +/// +/// Default: false +pub struct VimModeSetting(pub bool); + +impl Settings for VimModeSetting { + const KEY: Option<&'static str> = Some("vim_mode"); + + type FileContent = Option; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + Ok(Self( + sources + .user + .or(sources.server) + .copied() + .flatten() + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?), + )) + } +} diff --git a/crates/welcome/Cargo.toml b/crates/welcome/Cargo.toml index 0db1af9252..3052f8b1df 100644 --- a/crates/welcome/Cargo.toml +++ b/crates/welcome/Cargo.toml @@ -17,21 +17,20 @@ test-support = [] [dependencies] anyhow.workspace = true client.workspace = true +copilot.workspace = true db.workspace = true -extensions_ui.workspace = true fuzzy.workspace = true gpui.workspace = true -inline_completion_button.workspace = true install_cli.workspace = true picker.workspace = true project.workspace = true schemars.workspace = true serde.workspace = true settings.workspace = true -theme_selector.workspace = true ui.workspace = true +telemetry.workspace = true util.workspace = true -vim.workspace = true +vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/welcome/src/base_keymap_picker.rs b/crates/welcome/src/base_keymap_picker.rs index 96a9df9c3c..fcffef91aa 100644 --- a/crates/welcome/src/base_keymap_picker.rs +++ b/crates/welcome/src/base_keymap_picker.rs @@ -1,5 +1,4 @@ use super::base_keymap_setting::BaseKeymap; -use client::telemetry::Telemetry; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ actions, AppContext, DismissEvent, EventEmitter, FocusableView, Render, Task, View, @@ -28,10 +27,9 @@ pub fn toggle( cx: &mut ViewContext, ) { let fs = workspace.app_state().fs.clone(); - let telemetry = workspace.client().telemetry().clone(); workspace.toggle_modal(cx, |cx| { BaseKeymapSelector::new( - BaseKeymapSelectorDelegate::new(cx.view().downgrade(), fs, telemetry, cx), + BaseKeymapSelectorDelegate::new(cx.view().downgrade(), fs, cx), cx, ) }); @@ -70,7 +68,6 @@ pub struct BaseKeymapSelectorDelegate { view: WeakView, matches: Vec, selected_index: usize, - telemetry: Arc, fs: Arc, } @@ -78,7 +75,6 @@ impl BaseKeymapSelectorDelegate { fn new( weak_view: WeakView, fs: Arc, - telemetry: Arc, cx: &mut ViewContext, ) -> Self { let base = BaseKeymap::get(None, cx); @@ -90,7 +86,6 @@ impl BaseKeymapSelectorDelegate { view: weak_view, matches: Vec::new(), selected_index, - telemetry, fs, } } @@ -127,11 +122,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { let background = cx.background_executor().clone(); let candidates = BaseKeymap::names() .enumerate() - .map(|(id, name)| StringMatchCandidate { - id, - char_bag: name.into(), - string: name.into(), - }) + .map(|(id, name)| StringMatchCandidate::new(id, name)) .collect::>(); cx.spawn(|this, mut cx| async move { @@ -173,8 +164,11 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { if let Some(selection) = self.matches.get(self.selected_index) { let base_keymap = BaseKeymap::from_names(&selection.string); - self.telemetry - .report_setting_event("keymap", base_keymap.to_string()); + telemetry::event!( + "Settings Changed", + setting = "keymap", + value = base_keymap.to_string() + ); update_settings_file::(self.fs.clone(), cx, move |setting, _| { *setting = Some(base_keymap) @@ -200,7 +194,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { &self, ix: usize, selected: bool, - _cx: &mut gpui::ViewContext>, + _cx: &mut ViewContext>, ) -> Option { let keymap_match = &self.matches[ix]; @@ -208,7 +202,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { ListItem::new(ix) .inset(true) .spacing(ListItemSpacing::Sparse) - .selected(selected) + .toggle_state(selected) .child(HighlightedLabel::new( keymap_match.string.clone(), keymap_match.positions.clone(), diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index c8d5bf6dfc..cf4886e1eb 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -5,14 +5,14 @@ mod multibuffer_hint; use client::{telemetry::Telemetry, TelemetrySettings}; use db::kvp::KEY_VALUE_STORE; use gpui::{ - actions, svg, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement, + actions, svg, Action, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement, ParentElement, Render, Styled, Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowContext, }; use settings::{Settings, SettingsStore}; use std::sync::Arc; -use ui::{prelude::*, CheckboxWithLabel}; -use vim::VimModeSetting; +use ui::{prelude::*, CheckboxWithLabel, ElevationIndex, Tooltip}; +use vim_mode_setting::VimModeSetting; use workspace::{ dock::DockPosition, item::{Item, ItemEvent}, @@ -69,10 +69,11 @@ pub struct WelcomePage { } impl Render for WelcomePage { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { h_flex() .size_full() .bg(cx.theme().colors().editor_background) + .key_context("Welcome") .track_focus(&self.focus_handle(cx)) .child( v_flex() @@ -132,12 +133,8 @@ impl Render for WelcomePage { "welcome page: change theme".to_string(), ); this.workspace - .update(cx, |workspace, cx| { - theme_selector::toggle( - workspace, - &Default::default(), - cx, - ) + .update(cx, |_workspace, cx| { + cx.dispatch_action(zed_actions::theme_selector::Toggle::default().boxed_clone()); }) .ok(); })), @@ -177,7 +174,7 @@ impl Render for WelcomePage { this.telemetry.report_app_event( "welcome page: sign in to copilot".to_string(), ); - inline_completion_button::initiate_sign_in(cx); + copilot::initiate_sign_in(cx); }), ), ) @@ -250,7 +247,7 @@ impl Render for WelcomePage { "welcome page: open extensions".to_string(), ); cx.dispatch_action(Box::new( - extensions_ui::Extensions, + zed_actions::Extensions, )); })), ) @@ -269,76 +266,99 @@ impl Render for WelcomePage { .child( v_group() .gap_2() - .child(CheckboxWithLabel::new( - "enable-vim", - Label::new("Enable Vim Mode"), - if VimModeSetting::get_global(cx).0 { - ui::Selection::Selected - } else { - ui::Selection::Unselected - }, - cx.listener(move |this, selection, cx| { - this.telemetry - .report_app_event("welcome page: toggle vim".to_string()); - this.update_settings::( - selection, - cx, - |setting, value| *setting = Some(value), - ); - }), - )) - .child(CheckboxWithLabel::new( - "enable-crash", - Label::new("Send Crash Reports"), - if TelemetrySettings::get_global(cx).diagnostics { - ui::Selection::Selected - } else { - ui::Selection::Unselected - }, - cx.listener(move |this, selection, cx| { - this.telemetry.report_app_event( - "welcome page: toggle diagnostic telemetry".to_string(), - ); - this.update_settings::(selection, cx, { - let telemetry = this.telemetry.clone(); - - move |settings, value| { - settings.diagnostics = Some(value); - - telemetry.report_setting_event( - "diagnostic telemetry", - value.to_string(), - ); - } - }); - }), - )) - .child(CheckboxWithLabel::new( - "enable-telemetry", - Label::new("Send Telemetry"), - if TelemetrySettings::get_global(cx).metrics { - ui::Selection::Selected - } else { - ui::Selection::Unselected - }, - cx.listener(move |this, selection, cx| { - this.telemetry.report_app_event( - "welcome page: toggle metric telemetry".to_string(), - ); - this.update_settings::(selection, cx, { - let telemetry = this.telemetry.clone(); - - move |settings, value| { - settings.metrics = Some(value); - - telemetry.report_setting_event( - "metric telemetry", - value.to_string(), - ); - } - }); - }), - )), + .child( + h_flex() + .justify_between() + .child( + CheckboxWithLabel::new( + "enable-vim", + Label::new("Enable Vim Mode"), + if VimModeSetting::get_global(cx).0 { + ui::ToggleState::Selected + } else { + ui::ToggleState::Unselected + }, + cx.listener(move |this, selection, cx| { + this.telemetry + .report_app_event("welcome page: toggle vim".to_string()); + this.update_settings::( + selection, + cx, + |setting, value| *setting = Some(value), + ); + }), + ) + .fill() + .elevation(ElevationIndex::ElevatedSurface), + ) + .child( + IconButton::new("vim-mode", IconName::Info) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(|cx| { + Tooltip::text( + "You can also toggle Vim Mode via the command palette or Editor Controls menu.", + cx, + ) + }), + ), + ) + .child( + CheckboxWithLabel::new( + "enable-crash", + Label::new("Send Crash Reports"), + if TelemetrySettings::get_global(cx).diagnostics { + ui::ToggleState::Selected + } else { + ui::ToggleState::Unselected + }, + cx.listener(move |this, selection, cx| { + this.telemetry.report_app_event( + "welcome page: toggle diagnostic telemetry".to_string(), + ); + this.update_settings::(selection, cx, { + move |settings, value| { + settings.diagnostics = Some(value); + telemetry::event!( + "Settings Changed", + setting = "diagnostic telemetry", + value + ); + } + }); + }), + ) + .fill() + .elevation(ElevationIndex::ElevatedSurface), + ) + .child( + CheckboxWithLabel::new( + "enable-telemetry", + Label::new("Send Telemetry"), + if TelemetrySettings::get_global(cx).metrics { + ui::ToggleState::Selected + } else { + ui::ToggleState::Unselected + }, + cx.listener(move |this, selection, cx| { + this.telemetry.report_app_event( + "welcome page: toggle metric telemetry".to_string(), + ); + this.update_settings::(selection, cx, { + move |settings, value| { + settings.metrics = Some(value); + telemetry::event!( + "Settings Changed", + setting = "metric telemetry", + value + ); + } + }); + }), + ) + .fill() + .elevation(ElevationIndex::ElevatedSurface), + ), ), ) } @@ -374,7 +394,7 @@ impl WelcomePage { fn update_settings( &mut self, - selection: &Selection, + selection: &ToggleState, cx: &mut ViewContext, callback: impl 'static + Send + Fn(&mut T::FileContent, bool), ) { @@ -383,8 +403,8 @@ impl WelcomePage { let selection = *selection; settings::update_settings_file::(fs, cx, move |settings, _| { let value = match selection { - Selection::Unselected => false, - Selection::Selected => true, + ToggleState::Unselected => false, + ToggleState::Selected => true, _ => return, }; diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 1fa4db2af8..3b17ed8dab 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -38,7 +38,6 @@ db.workspace = true derive_more.workspace = true fs.workspace = true futures.workspace = true -git.workspace = true gpui.workspace = true http_client.workspace = true itertools.workspace = true diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 30ab109879..35b7a379b0 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -3,10 +3,10 @@ use crate::{status_bar::StatusItemView, Workspace}; use crate::{DraggedDock, Event, Pane}; use client::proto; use gpui::{ - deferred, div, px, Action, AnchorCorner, AnyView, AppContext, Axis, Entity, EntityId, - EventEmitter, FocusHandle, FocusableView, IntoElement, KeyContext, MouseButton, MouseDownEvent, - MouseUpEvent, ParentElement, Render, SharedString, StyleRefinement, Styled, Subscription, View, - ViewContext, VisualContext, WeakView, WindowContext, + deferred, div, px, Action, AnyView, AppContext, Axis, Corner, Entity, EntityId, EventEmitter, + FocusHandle, FocusableView, IntoElement, KeyContext, MouseButton, MouseDownEvent, MouseUpEvent, + ParentElement, Render, SharedString, StyleRefinement, Styled, Subscription, View, ViewContext, + VisualContext, WeakView, WindowContext, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -15,7 +15,7 @@ use std::sync::Arc; use ui::{h_flex, ContextMenu, IconButton, Tooltip}; use ui::{prelude::*, right_click_menu}; -const RESIZE_HANDLE_SIZE: Pixels = Pixels(6.); +pub(crate) const RESIZE_HANDLE_SIZE: Pixels = Pixels(6.); pub enum PanelEvent { ZoomIn, @@ -53,6 +53,7 @@ pub trait Panel: FocusableView + EventEmitter { fn remote_id() -> Option { None } + fn activation_priority(&self) -> u32; } pub trait PanelHandle: Send + Sync { @@ -74,6 +75,7 @@ pub trait PanelHandle: Send + Sync { fn icon_label(&self, cx: &WindowContext) -> Option; fn focus_handle(&self, cx: &AppContext) -> FocusHandle; fn to_any(&self) -> AnyView; + fn activation_priority(&self, cx: &AppContext) -> u32; } impl PanelHandle for View @@ -151,6 +153,10 @@ where fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.read(cx).focus_handle(cx).clone() } + + fn activation_priority(&self, cx: &AppContext) -> u32 { + self.read(cx).activation_priority() + } } impl From<&dyn PanelHandle> for AnyView { @@ -164,8 +170,9 @@ impl From<&dyn PanelHandle> for AnyView { pub struct Dock { position: DockPosition, panel_entries: Vec, + workspace: WeakView, is_open: bool, - active_panel_index: usize, + active_panel_index: Option, focus_handle: FocusHandle, pub(crate) serialized_dock: Option, resizeable: bool, @@ -218,7 +225,7 @@ impl Dock { let workspace = cx.view().clone(); let dock = cx.new_view(|cx: &mut ViewContext| { let focus_subscription = cx.on_focus(&focus_handle, |dock, cx| { - if let Some(active_entry) = dock.panel_entries.get(dock.active_panel_index) { + if let Some(active_entry) = dock.active_panel_entry() { active_entry.panel.focus_handle(cx).focus(cx) } }); @@ -230,8 +237,9 @@ impl Dock { }); Self { position, + workspace: workspace.downgrade(), panel_entries: Default::default(), - active_panel_index: 0, + active_panel_index: None, is_open: false, focus_handle: focus_handle.clone(), _subscriptions: [focus_subscription, zoom_subscription], @@ -321,15 +329,19 @@ impl Dock { .position(|entry| entry.panel.remote_id() == Some(panel_id)) } - pub fn active_panel_index(&self) -> usize { + fn active_panel_entry(&self) -> Option<&PanelEntry> { self.active_panel_index + .and_then(|index| self.panel_entries.get(index)) } pub(crate) fn set_open(&mut self, open: bool, cx: &mut ViewContext) { if open != self.is_open { self.is_open = open; - if let Some(active_panel) = self.panel_entries.get(self.active_panel_index) { + if let Some(active_panel) = self.active_panel_entry() { active_panel.panel.set_active(open, cx); + if !open { + self.active_panel_index = None; + } } cx.notify(); @@ -347,6 +359,11 @@ impl Dock { } } + self.workspace + .update(cx, |workspace, cx| { + workspace.serialize_workspace(cx); + }) + .ok(); cx.notify(); } @@ -363,7 +380,7 @@ impl Dock { panel: View, workspace: WeakView, cx: &mut ViewContext, - ) { + ) -> usize { let subscriptions = [ cx.observe(&panel, |_, _, cx| cx.notify()), cx.observe_global::({ @@ -399,10 +416,10 @@ impl Dock { new_dock.update(cx, |new_dock, cx| { new_dock.remove_panel(&panel, cx); - new_dock.add_panel(panel.clone(), workspace.clone(), cx); + let index = new_dock.add_panel(panel.clone(), workspace.clone(), cx); if was_visible { new_dock.set_open(true, cx); - new_dock.activate_panel(new_dock.panels_len() - 1, cx); + new_dock.activate_panel(index, cx); } }); } @@ -456,17 +473,35 @@ impl Dock { }), ]; - self.panel_entries.push(PanelEntry { - panel: Arc::new(panel.clone()), - _subscriptions: subscriptions, - }); + let index = match self + .panel_entries + .binary_search_by_key(&panel.read(cx).activation_priority(), |entry| { + entry.panel.activation_priority(cx) + }) { + Ok(ix) => ix, + Err(ix) => ix, + }; + if let Some(active_index) = self.active_panel_index.as_mut() { + if *active_index >= index { + *active_index += 1; + } + } + self.panel_entries.insert( + index, + PanelEntry { + panel: Arc::new(panel.clone()), + _subscriptions: subscriptions, + }, + ); - if !self.restore_state(cx) && panel.read(cx).starts_open(cx) { - self.activate_panel(self.panel_entries.len() - 1, cx); + self.restore_state(cx); + if panel.read(cx).starts_open(cx) { + self.activate_panel(index, cx); self.set_open(true, cx); } - cx.notify() + cx.notify(); + index } pub fn restore_state(&mut self, cx: &mut ViewContext) -> bool { @@ -494,15 +529,17 @@ impl Dock { .iter() .position(|entry| entry.panel.panel_id() == Entity::entity_id(panel)) { - match panel_ix.cmp(&self.active_panel_index) { - std::cmp::Ordering::Less => { - self.active_panel_index -= 1; + if let Some(active_panel_index) = self.active_panel_index.as_mut() { + match panel_ix.cmp(active_panel_index) { + std::cmp::Ordering::Less => { + *active_panel_index -= 1; + } + std::cmp::Ordering::Equal => { + self.active_panel_index = None; + self.set_open(false, cx); + } + std::cmp::Ordering::Greater => {} } - std::cmp::Ordering::Equal => { - self.active_panel_index = 0; - self.set_open(false, cx); - } - std::cmp::Ordering::Greater => {} } self.panel_entries.remove(panel_ix); cx.notify(); @@ -514,13 +551,13 @@ impl Dock { } pub fn activate_panel(&mut self, panel_ix: usize, cx: &mut ViewContext) { - if panel_ix != self.active_panel_index { - if let Some(active_panel) = self.panel_entries.get(self.active_panel_index) { + if Some(panel_ix) != self.active_panel_index { + if let Some(active_panel) = self.active_panel_entry() { active_panel.panel.set_active(false, cx); } - self.active_panel_index = panel_ix; - if let Some(active_panel) = self.panel_entries.get(self.active_panel_index) { + self.active_panel_index = Some(panel_ix); + if let Some(active_panel) = self.active_panel_entry() { active_panel.panel.set_active(true, cx); } @@ -534,12 +571,13 @@ impl Dock { } pub fn active_panel(&self) -> Option<&Arc> { - Some(&self.panel_entries.get(self.active_panel_index)?.panel) + let panel_entry = self.active_panel_entry()?; + Some(&panel_entry.panel) } fn visible_entry(&self) -> Option<&PanelEntry> { if self.is_open { - self.panel_entries.get(self.active_panel_index) + self.active_panel_entry() } else { None } @@ -563,17 +601,16 @@ impl Dock { pub fn active_panel_size(&self, cx: &WindowContext) -> Option { if self.is_open { - self.panel_entries - .get(self.active_panel_index) - .map(|entry| entry.panel.size(cx)) + self.active_panel_entry().map(|entry| entry.panel.size(cx)) } else { None } } pub fn resize_active_panel(&mut self, size: Option, cx: &mut ViewContext) { - if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) { + if let Some(entry) = self.active_panel_entry() { let size = size.map(|size| size.max(RESIZE_HANDLE_SIZE).round()); + entry.panel.set_size(size, cx); cx.notify(); } @@ -593,6 +630,15 @@ impl Dock { dispatch_context } + + pub fn clamp_panel_size(&mut self, max_size: Pixels, cx: &mut WindowContext) { + let max_size = px((max_size.0 - RESIZE_HANDLE_SIZE.0).abs()); + for panel in self.panel_entries.iter().map(|entry| &entry.panel) { + if panel.size(cx) > max_size { + panel.set_size(Some(max_size.max(RESIZE_HANDLE_SIZE)), cx); + } + } + } } impl Render for Dock { @@ -617,9 +663,14 @@ impl Render for Dock { ) .on_mouse_up( MouseButton::Left, - cx.listener(|v, e: &MouseUpEvent, cx| { + cx.listener(|dock, e: &MouseUpEvent, cx| { if e.click_count == 2 { - v.resize_active_panel(None, cx); + dock.resize_active_panel(None, cx); + dock.workspace + .update(cx, |workspace, cx| { + workspace.serialize_workspace(cx); + }) + .ok(); cx.stop_propagation(); } }), @@ -709,10 +760,8 @@ impl Render for PanelButtons { let dock_position = dock.position; let (menu_anchor, menu_attach) = match dock.position { - DockPosition::Left => (AnchorCorner::BottomLeft, AnchorCorner::TopLeft), - DockPosition::Bottom | DockPosition::Right => { - (AnchorCorner::BottomRight, AnchorCorner::TopRight) - } + DockPosition::Left => (Corner::BottomLeft, Corner::TopLeft), + DockPosition::Bottom | DockPosition::Right => (Corner::BottomRight, Corner::TopRight), }; let buttons = dock @@ -725,7 +774,7 @@ impl Render for PanelButtons { let name = entry.panel.persistent_name(); let panel = entry.panel.clone(); - let is_active_button = i == active_index && is_open; + let is_active_button = Some(i) == active_index && is_open; let (action, tooltip) = if is_active_button { let action = dock.toggle_action(); @@ -771,7 +820,7 @@ impl Render for PanelButtons { .trigger( IconButton::new(name, icon) .icon_size(IconSize::Small) - .selected(is_active_button) + .toggle_state(is_active_button) .on_click({ let action = action.boxed_clone(); move |_, cx| cx.dispatch_action(action.boxed_clone()) @@ -836,7 +885,7 @@ pub mod test { "TestPanel" } - fn position(&self, _: &gpui::WindowContext) -> super::DockPosition { + fn position(&self, _: &WindowContext) -> super::DockPosition { self.position } @@ -880,6 +929,10 @@ pub mod test { fn set_active(&mut self, active: bool, _cx: &mut ViewContext) { self.active = active; } + + fn activation_priority(&self) -> u32 { + 100 + } } impl FocusableView for TestPanel { diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 5f14b9ba62..eaf321b3f7 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -42,6 +42,8 @@ pub struct ItemSettings { pub close_position: ClosePosition, pub activate_on_close: ActivateOnClose, pub file_icons: bool, + pub show_diagnostics: ShowDiagnostics, + pub always_show_close_button: bool, } #[derive(Deserialize)] @@ -59,12 +61,22 @@ pub enum ClosePosition { Right, } +#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ShowDiagnostics { + #[default] + Off, + Errors, + All, +} + #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -#[serde(rename_all = "lowercase")] +#[serde(rename_all = "snake_case")] pub enum ActivateOnClose { #[default] History, Neighbour, + LeftNeighbour, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] @@ -85,6 +97,15 @@ pub struct ItemSettingsContent { /// /// Default: history pub activate_on_close: Option, + /// Which files containing diagnostic errors/warnings to mark in the tabs. + /// This setting can take the following three values: + /// + /// Default: off + show_diagnostics: Option, + /// Whether to always show the close button on tabs. + /// + /// Default: false + always_show_close_button: Option, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] @@ -157,6 +178,11 @@ impl TabContentParams { } } +pub enum TabTooltipContent { + Text(SharedString), + Custom(Box AnyView>), +} + pub trait Item: FocusableView + EventEmitter { type Event; @@ -185,6 +211,25 @@ pub trait Item: FocusableView + EventEmitter { None } + /// Returns the tab tooltip text. + /// + /// Use this if you don't need to customize the tab tooltip content. + fn tab_tooltip_text(&self, _: &AppContext) -> Option { + None + } + + /// Returns the tab tooltip content. + /// + /// By default this returns a Tooltip text from + /// `tab_tooltip_text`. + fn tab_tooltip_content(&self, cx: &AppContext) -> Option { + self.tab_tooltip_text(cx).map(TabTooltipContent::Text) + } + + fn tab_description(&self, _: usize, _: &AppContext) -> Option { + None + } + fn to_item_events(_event: &Self::Event, _f: impl FnMut(ItemEvent)) {} fn deactivated(&mut self, _: &mut ViewContext) {} @@ -193,12 +238,6 @@ pub trait Item: FocusableView + EventEmitter { fn navigate(&mut self, _: Box, _: &mut ViewContext) -> bool { false } - fn tab_tooltip_text(&self, _: &AppContext) -> Option { - None - } - fn tab_description(&self, _: usize, _: &AppContext) -> Option { - None - } fn telemetry_event_text(&self) -> Option<&'static str> { None @@ -208,7 +247,7 @@ pub trait Item: FocusableView + EventEmitter { fn for_each_project_item( &self, _: &AppContext, - _: &mut dyn FnMut(EntityId, &dyn project::Item), + _: &mut dyn FnMut(EntityId, &dyn project::ProjectItem), ) { } fn is_singleton(&self, _cx: &AppContext) -> bool { @@ -278,7 +317,7 @@ pub trait Item: FocusableView + EventEmitter { None } - fn breadcrumb_location(&self) -> ToolbarItemLocation { + fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation { ToolbarItemLocation::Hidden } @@ -299,6 +338,10 @@ pub trait Item: FocusableView + EventEmitter { fn preserve_preview(&self, _cx: &AppContext) -> bool { false } + + fn include_in_nav_history() -> bool { + true + } } pub trait SerializableItem: Item { @@ -315,7 +358,7 @@ pub trait SerializableItem: Item { _workspace: WeakView, _workspace_id: WorkspaceId, _item_id: ItemId, - _cx: &mut ViewContext, + _cx: &mut WindowContext, ) -> Task>>; fn serialize( @@ -373,10 +416,11 @@ pub trait ItemHandle: 'static + Send { handler: Box, ) -> gpui::Subscription; fn focus_handle(&self, cx: &WindowContext) -> FocusHandle; - fn tab_tooltip_text(&self, cx: &AppContext) -> Option; fn tab_description(&self, detail: usize, cx: &AppContext) -> Option; fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement; fn tab_icon(&self, cx: &WindowContext) -> Option; + fn tab_tooltip_text(&self, cx: &AppContext) -> Option; + fn tab_tooltip_content(&self, cx: &AppContext) -> Option; fn telemetry_event_text(&self, cx: &WindowContext) -> Option<&'static str>; fn dragged_tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement; fn project_path(&self, cx: &AppContext) -> Option; @@ -386,7 +430,7 @@ pub trait ItemHandle: 'static + Send { fn for_each_project_item( &self, _: &AppContext, - _: &mut dyn FnMut(EntityId, &dyn project::Item), + _: &mut dyn FnMut(EntityId, &dyn project::ProjectItem), ); fn is_singleton(&self, cx: &AppContext) -> bool; fn boxed_clone(&self) -> Box; @@ -443,6 +487,7 @@ pub trait ItemHandle: 'static + Send { fn downgrade_item(&self) -> Box; fn workspace_settings<'a>(&self, cx: &'a AppContext) -> &'a WorkspaceSettings; fn preserve_preview(&self, cx: &AppContext) -> bool; + fn include_in_nav_history(&self) -> bool; } pub trait WeakItemHandle: Send + Sync { @@ -477,10 +522,6 @@ impl ItemHandle for View { self.focus_handle(cx) } - fn tab_tooltip_text(&self, cx: &AppContext) -> Option { - self.read(cx).tab_tooltip_text(cx) - } - fn telemetry_event_text(&self, cx: &WindowContext) -> Option<&'static str> { self.read(cx).telemetry_event_text() } @@ -497,6 +538,14 @@ impl ItemHandle for View { self.read(cx).tab_icon(cx) } + fn tab_tooltip_content(&self, cx: &AppContext) -> Option { + self.read(cx).tab_tooltip_content(cx) + } + + fn tab_tooltip_text(&self, cx: &AppContext) -> Option { + self.read(cx).tab_tooltip_text(cx) + } + fn dragged_tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { self.read(cx).tab_content( TabContentParams { @@ -563,7 +612,7 @@ impl ItemHandle for View { fn for_each_project_item( &self, cx: &AppContext, - f: &mut dyn FnMut(EntityId, &dyn project::Item), + f: &mut dyn FnMut(EntityId, &dyn project::ProjectItem), ) { self.read(cx).for_each_project_item(cx, f) } @@ -827,7 +876,7 @@ impl ItemHandle for View { } fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation { - self.read(cx).breadcrumb_location() + self.read(cx).breadcrumb_location(cx) } fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option> { @@ -856,6 +905,10 @@ impl ItemHandle for View { fn preserve_preview(&self, cx: &AppContext) -> bool { self.read(cx).preserve_preview(cx) } + + fn include_in_nav_history(&self) -> bool { + T::include_in_nav_history() + } } impl From> for AnyView { @@ -891,7 +944,7 @@ impl WeakItemHandle for WeakView { } pub trait ProjectItem: Item { - type Item: project::Item; + type Item: project::ProjectItem; fn for_project_item( project: Model, @@ -1032,11 +1085,11 @@ impl WeakFollowableItemHandle for WeakView { #[cfg(any(test, feature = "test-support"))] pub mod test { use super::{Item, ItemEvent, SerializableItem, TabContentParams}; - use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId}; + use crate::{ItemId, ItemNavHistory, Workspace, WorkspaceId}; use gpui::{ AnyElement, AppContext, Context as _, EntityId, EventEmitter, FocusableView, InteractiveElement, IntoElement, Model, Render, SharedString, Task, View, ViewContext, - VisualContext, WeakView, + VisualContext, WeakView, WindowContext, }; use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; use std::{any::Any, cell::Cell, path::Path}; @@ -1044,6 +1097,7 @@ pub mod test { pub struct TestProjectItem { pub entry_id: Option, pub project_path: Option, + pub is_dirty: bool, } pub struct TestItem { @@ -1064,7 +1118,7 @@ pub mod test { focus_handle: gpui::FocusHandle, } - impl project::Item for TestProjectItem { + impl project::ProjectItem for TestProjectItem { fn try_open( _project: &Model, _path: &ProjectPath, @@ -1072,7 +1126,6 @@ pub mod test { ) -> Option>>> { None } - fn entry_id(&self, _: &AppContext) -> Option { self.entry_id } @@ -1080,6 +1133,10 @@ pub mod test { fn project_path(&self, _: &AppContext) -> Option { self.project_path.clone() } + + fn is_dirty(&self) -> bool { + self.is_dirty + } } pub enum TestItemEvent { @@ -1096,6 +1153,7 @@ pub mod test { cx.new_model(|_| Self { entry_id, project_path, + is_dirty: false, }) } @@ -1103,6 +1161,7 @@ pub mod test { cx.new_model(|_| Self { project_path: None, entry_id: None, + is_dirty: false, }) } } @@ -1212,11 +1271,7 @@ pub mod test { None } - fn tab_content( - &self, - params: TabContentParams, - _cx: &ui::prelude::WindowContext, - ) -> AnyElement { + fn tab_content(&self, params: TabContentParams, _cx: &WindowContext) -> AnyElement { self.tab_detail.set(params.detail); gpui::div().into_any_element() } @@ -1224,7 +1279,7 @@ pub mod test { fn for_each_project_item( &self, cx: &AppContext, - f: &mut dyn FnMut(EntityId, &dyn project::Item), + f: &mut dyn FnMut(EntityId, &dyn project::ProjectItem), ) { self.project_items .iter() @@ -1339,7 +1394,7 @@ pub mod test { _workspace: WeakView, workspace_id: WorkspaceId, _item_id: ItemId, - cx: &mut ViewContext, + cx: &mut WindowContext, ) -> Task>> { let view = cx.new_view(|cx| Self::new_deserialized(workspace_id, cx)); Task::ready(Ok(view)) @@ -1348,7 +1403,7 @@ pub mod test { fn cleanup( _workspace_id: WorkspaceId, _alive_items: Vec, - _cx: &mut ui::WindowContext, + _cx: &mut WindowContext, ) -> Task> { Task::ready(Ok(())) } diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index eee3d16a4a..fd989554e9 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -5,7 +5,6 @@ use gpui::{ EventEmitter, Global, PromptLevel, Render, ScrollHandle, Task, View, ViewContext, VisualContext, WindowContext, }; -use language::DiagnosticSeverity; use std::{any::TypeId, ops::DerefMut, time::Duration}; use ui::{prelude::*, Tooltip}; @@ -266,89 +265,57 @@ impl Render for LanguageServerPrompt { return div().id("language_server_prompt_notification"); }; - h_flex() + let (icon, color) = match request.level { + PromptLevel::Info => (IconName::Info, Color::Accent), + PromptLevel::Warning => (IconName::Warning, Color::Warning), + PromptLevel::Critical => (IconName::XCircle, Color::Error), + }; + + div() .id("language_server_prompt_notification") + .group("language_server_prompt_notification") .occlude() - .elevation_3(cx) - .items_start() - .justify_between() - .p_2() - .gap_2() .w_full() .max_h(vh(0.8, cx)) + .elevation_3(cx) .overflow_y_scroll() .track_scroll(&self.scroll_handle) - .group("") .child( v_flex() - .w_full() + .p_3() .overflow_hidden() .child( h_flex() - .w_full() .justify_between() .child( h_flex() - .flex_grow() - .children( - match request.level { - PromptLevel::Info => None, - PromptLevel::Warning => { - Some(DiagnosticSeverity::WARNING) - } - PromptLevel::Critical => { - Some(DiagnosticSeverity::ERROR) - } - } - .map(|severity| { - svg() - .size(cx.text_style().font_size) - .flex_none() - .mr_1() - .mt(px(-2.0)) - .map(|icon| { - if severity == DiagnosticSeverity::ERROR { - icon.path(IconName::Warning.path()) - .text_color(Color::Error.color(cx)) - } else { - icon.path(IconName::Warning.path()) - .text_color(Color::Warning.color(cx)) - } - }) - }), - ) - .child( - Label::new(request.lsp_name.clone()) - .size(LabelSize::Default), - ), + .gap_2() + .child(Icon::new(icon).color(color)) + .child(Label::new(request.lsp_name.clone())), ) .child( - ui::IconButton::new("close", ui::IconName::Close) - .on_click(cx.listener(|_, _, cx| cx.emit(gpui::DismissEvent))), + h_flex() + .child( + IconButton::new("copy", IconName::Copy) + .on_click({ + let message = request.message.clone(); + move |_, cx| { + cx.write_to_clipboard( + ClipboardItem::new_string(message.clone()), + ) + } + }) + .tooltip(|cx| Tooltip::text("Copy Description", cx)), + ) + .child(IconButton::new("close", IconName::Close).on_click( + cx.listener(|_, _, cx| cx.emit(gpui::DismissEvent)), + )), ), ) - .child( - v_flex() - .child( - h_flex().absolute().right_0().rounded_md().child( - ui::IconButton::new("copy", ui::IconName::Copy) - .on_click({ - let message = request.message.clone(); - move |_, cx| { - cx.write_to_clipboard(ClipboardItem::new_string( - message.clone(), - )) - } - }) - .tooltip(|cx| Tooltip::text("Copy", cx)) - .visible_on_hover(""), - ), - ) - .child(Label::new(request.message.to_string()).size(LabelSize::Small)), - ) + .child(Label::new(request.message.to_string()).size(LabelSize::Small)) .children(request.actions.iter().enumerate().map(|(ix, action)| { let this_handle = cx.view().clone(); - ui::Button::new(ix, action.title.clone()) + Button::new(ix, action.title.clone()) .size(ButtonSize::Large) .on_click(move |_, cx| { let this_handle = this_handle.clone(); @@ -444,12 +411,10 @@ impl EventEmitter for ErrorMessagePrompt {} pub mod simple_message_notification { use gpui::{ - div, DismissEvent, EventEmitter, InteractiveElement, ParentElement, Render, SharedString, - StatefulInteractiveElement, Styled, ViewContext, + div, DismissEvent, EventEmitter, ParentElement, Render, SharedString, Styled, ViewContext, }; use std::sync::Arc; use ui::prelude::*; - use ui::{h_flex, v_flex, Button, Icon, IconName, Label, StyledExt}; pub struct MessageNotification { message: SharedString, @@ -515,36 +480,43 @@ pub mod simple_message_notification { impl Render for MessageNotification { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { v_flex() + .p_3() + .gap_2() .elevation_3(cx) - .p_4() .child( h_flex() + .gap_4() .justify_between() .child(div().max_w_80().child(Label::new(self.message.clone()))) .child( - div() - .id("cancel") - .child(Icon::new(IconName::Close)) - .cursor_pointer() + IconButton::new("close", IconName::Close) .on_click(cx.listener(|this, _, cx| this.dismiss(cx))), ), ) .child( h_flex() - .gap_3() + .gap_2() .children(self.click_message.iter().map(|message| { - Button::new(message.clone(), message.clone()).on_click(cx.listener( - |this, _, cx| { + Button::new(message.clone(), message.clone()) + .label_size(LabelSize::Small) + .icon(IconName::Check) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .icon_color(Color::Success) + .on_click(cx.listener(|this, _, cx| { if let Some(on_click) = this.on_click.as_ref() { (on_click)(cx) }; this.dismiss(cx) - }, - )) + })) })) .children(self.secondary_click_message.iter().map(|message| { Button::new(message.clone(), message.clone()) - .style(ButtonStyle::Filled) + .label_size(LabelSize::Small) + .icon(IconName::Close) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .icon_color(Color::Error) .on_click(cx.listener(|this, _, cx| { if let Some(on_click) = this.secondary_on_click.as_ref() { (on_click)(cx) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 22d06ec21a..05ab9a8f90 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1,7 +1,7 @@ use crate::{ item::{ ActivateOnClose, ClosePosition, Item, ItemHandle, ItemSettings, PreviewTabsSettings, - TabContentParams, WeakItemHandle, + ShowDiagnostics, TabContentParams, TabTooltipContent, WeakItemHandle, }, move_item, notifications::NotifyResultExt, @@ -13,16 +13,16 @@ use crate::{ use anyhow::Result; use collections::{BTreeSet, HashMap, HashSet, VecDeque}; use futures::{stream::FuturesUnordered, StreamExt}; -use git::repository::GitFileStatus; use gpui::{ - actions, anchored, deferred, impl_actions, prelude::*, Action, AnchorCorner, AnyElement, - AppContext, AsyncWindowContext, ClickEvent, ClipboardItem, Div, DragMoveEvent, EntityId, + actions, anchored, deferred, impl_actions, prelude::*, Action, AnyElement, AppContext, + AsyncWindowContext, ClickEvent, ClipboardItem, Corner, Div, DragMoveEvent, EntityId, EventEmitter, ExternalPaths, FocusHandle, FocusOutEvent, FocusableView, KeyContext, Model, MouseButton, MouseDownEvent, NavigationDirection, Pixels, Point, PromptLevel, Render, - ScrollHandle, Subscription, Task, View, ViewContext, VisualContext, WeakFocusHandle, WeakView, - WindowContext, + ScrollHandle, Subscription, Task, View, ViewContext, VisualContext, WeakFocusHandle, WeakModel, + WeakView, WindowContext, }; use itertools::Itertools; +use language::DiagnosticSeverity; use parking_lot::Mutex; use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; use serde::Deserialize; @@ -39,16 +39,16 @@ use std::{ }, }; use theme::ThemeSettings; - use ui::{ - prelude::*, right_click_menu, ButtonSize, Color, IconButton, IconButtonShape, IconName, - IconSize, Indicator, Label, PopoverMenu, PopoverMenuHandle, Tab, TabBar, TabPosition, Tooltip, + prelude::*, right_click_menu, ButtonSize, Color, DecoratedIcon, IconButton, IconButtonShape, + IconDecoration, IconDecorationKind, IconName, IconSize, Indicator, Label, PopoverMenu, + PopoverMenuHandle, Tab, TabBar, TabPosition, Tooltip, }; use ui::{v_flex, ContextMenu}; use util::{debug_panic, maybe, truncate_and_remove_front, ResultExt}; /// A selected entry in e.g. project panel. -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SelectedEntry { pub worktree_id: WorktreeId, pub entry_id: ProjectEntryId, @@ -206,6 +206,7 @@ pub enum Event { }, ActivateItem { local: bool, + focus_changed: bool, }, Remove { focus_on_pane: Option>, @@ -236,7 +237,7 @@ impl fmt::Debug for Event { .debug_struct("AddItem") .field("item", &item.item_id()) .finish(), - Event::ActivateItem { local } => f + Event::ActivateItem { local, .. } => f .debug_struct("ActivateItem") .field("local", local) .finish(), @@ -286,12 +287,12 @@ pub struct Pane { nav_history: NavHistory, toolbar: View, pub(crate) workspace: WeakView, - project: Model, + project: WeakModel, drag_split_direction: Option, can_drop_predicate: Option bool>>, custom_drop_handle: Option) -> ControlFlow<(), ()>>>, - can_split: bool, + can_split_predicate: Option) -> bool>>, should_display_tab_bar: Rc) -> bool>, render_tab_bar_buttons: Rc) -> (Option, Option)>, @@ -303,8 +304,10 @@ pub struct Pane { double_click_dispatch_action: Box, save_modals_spawned: HashSet, pub new_item_context_menu_handle: PopoverMenuHandle, - split_item_context_menu_handle: PopoverMenuHandle, + pub split_item_context_menu_handle: PopoverMenuHandle, pinned_tab_count: usize, + diagnostics: HashMap, + zoom_out_on_close: bool, } pub struct ActivationHistoryEntry { @@ -381,6 +384,7 @@ impl Pane { cx.on_focus_in(&focus_handle, Pane::focus_in), cx.on_focus_out(&focus_handle, Pane::focus_out), cx.observe_global::(Self::settings_changed), + cx.subscribe(&project, Self::project_events), ]; let handle = cx.view().downgrade(); @@ -408,10 +412,10 @@ impl Pane { tab_bar_scroll_handle: ScrollHandle::new(), drag_split_direction: None, workspace, - project, + project: project.downgrade(), can_drop_predicate, custom_drop_handle: None, - can_split: true, + can_split_predicate: None, should_display_tab_bar: Rc::new(|cx| TabBarSettings::get_global(cx).show), render_tab_bar_buttons: Rc::new(move |pane, cx| { if !pane.has_focus(cx) && !pane.context_menu_focused(cx) { @@ -429,7 +433,7 @@ impl Pane { .icon_size(IconSize::Small) .tooltip(|cx| Tooltip::text("New...", cx)), ) - .anchor(AnchorCorner::TopRight) + .anchor(Corner::TopRight) .with_handle(pane.new_item_context_menu_handle.clone()) .menu(move |cx| { Some(ContextMenu::build(cx, |menu, _| { @@ -462,7 +466,7 @@ impl Pane { .icon_size(IconSize::Small) .tooltip(|cx| Tooltip::text("Split Pane", cx)), ) - .anchor(AnchorCorner::TopRight) + .anchor(Corner::TopRight) .with_handle(pane.split_item_context_menu_handle.clone()) .menu(move |cx| { ContextMenu::build(cx, |menu, _| { @@ -478,7 +482,7 @@ impl Pane { let zoomed = pane.is_zoomed(); IconButton::new("toggle_zoom", IconName::Maximize) .icon_size(IconSize::Small) - .selected(zoomed) + .toggle_state(zoomed) .selected_icon(IconName::Minimize) .on_click(cx.listener(|pane, _, cx| { pane.toggle_zoom(&crate::ToggleZoom, cx); @@ -504,6 +508,8 @@ impl Pane { split_item_context_menu_handle: Default::default(), new_item_context_menu_handle: Default::default(), pinned_tab_count: 0, + diagnostics: Default::default(), + zoom_out_on_close: true, } } @@ -598,6 +604,50 @@ impl Pane { cx.notify(); } + fn project_events( + this: &mut Pane, + _project: Model, + event: &project::Event, + cx: &mut ViewContext, + ) { + match event { + project::Event::DiskBasedDiagnosticsFinished { .. } + | project::Event::DiagnosticsUpdated { .. } => { + if ItemSettings::get_global(cx).show_diagnostics != ShowDiagnostics::Off { + this.update_diagnostics(cx); + cx.notify(); + } + } + _ => {} + } + } + + fn update_diagnostics(&mut self, cx: &mut ViewContext) { + let Some(project) = self.project.upgrade() else { + return; + }; + let show_diagnostics = ItemSettings::get_global(cx).show_diagnostics; + self.diagnostics = if show_diagnostics != ShowDiagnostics::Off { + project + .read(cx) + .diagnostic_summaries(false, cx) + .filter_map(|(project_path, _, diagnostic_summary)| { + if diagnostic_summary.error_count > 0 { + Some((project_path, DiagnosticSeverity::ERROR)) + } else if diagnostic_summary.warning_count > 0 + && show_diagnostics != ShowDiagnostics::Errors + { + Some((project_path, DiagnosticSeverity::WARNING)) + } else { + None + } + }) + .collect() + } else { + HashMap::default() + } + } + fn settings_changed(&mut self, cx: &mut ViewContext) { if let Some(display_nav_history_buttons) = self.display_nav_history_buttons.as_mut() { *display_nav_history_buttons = TabBarSettings::get_global(cx).show_nav_history_buttons; @@ -605,6 +655,7 @@ impl Pane { if !PreviewTabsSettings::get_global(cx).enabled { self.preview_item_id = None; } + self.update_diagnostics(cx); cx.notify(); } @@ -623,9 +674,13 @@ impl Pane { self.should_display_tab_bar = Rc::new(should_display_tab_bar); } - pub fn set_can_split(&mut self, can_split: bool, cx: &mut ViewContext) { - self.can_split = can_split; - cx.notify(); + pub fn set_can_split( + &mut self, + can_split_predicate: Option< + Arc) -> bool + 'static>, + >, + ) { + self.can_split_predicate = can_split_predicate; } pub fn set_can_navigate(&mut self, can_navigate: bool, cx: &mut ViewContext) { @@ -824,9 +879,10 @@ impl Pane { pub fn close_current_preview_item(&mut self, cx: &mut ViewContext) -> Option { let item_idx = self.preview_item_idx()?; + let id = self.preview_item_id()?; let prev_active_item_index = self.active_item_index; - self.remove_item(item_idx, false, false, cx); + self.remove_item(id, false, false, cx); self.active_item_index = prev_active_item_index; if item_idx < self.items.len() { @@ -844,9 +900,14 @@ impl Pane { destination_index: Option, cx: &mut ViewContext, ) { + self.close_items_over_max_tabs(cx); + if item.is_singleton(cx) { if let Some(&entry_id) = item.project_entry_ids(cx).first() { - let project = self.project.read(cx); + let Some(project) = self.project.upgrade() else { + return; + }; + let project = project.read(cx); if let Some(project_path) = project.path_for_entry(entry_id, cx) { let abs_path = project.absolute_path(&project_path, cx); self.nav_history @@ -1032,9 +1093,6 @@ impl Pane { prev_item.deactivated(cx); } } - cx.emit(Event::ActivateItem { - local: activate_pane, - }); if let Some(newly_active_item) = self.items.get(index) { self.activation_history @@ -1054,6 +1112,11 @@ impl Pane { self.focus_active_item(cx); } + cx.emit(Event::ActivateItem { + local: activate_pane, + focus_changed: focus_item, + }); + if !self.is_tab_pinned(index) { self.tab_bar_scroll_handle .scroll_to_item(index - self.pinned_tab_count); @@ -1180,12 +1243,13 @@ impl Pane { } let active_item_id = self.items[self.active_item_index].item_id(); let non_closeable_items = self.get_non_closeable_item_ids(action.close_pinned); - Some(self.close_items_to_the_left_by_id(active_item_id, non_closeable_items, cx)) + Some(self.close_items_to_the_left_by_id(active_item_id, action, non_closeable_items, cx)) } pub fn close_items_to_the_left_by_id( &mut self, item_id: EntityId, + action: &CloseItemsToTheLeft, non_closeable_items: Vec, cx: &mut ViewContext, ) -> Task> { @@ -1195,7 +1259,9 @@ impl Pane { .map(|item| item.item_id()) .collect(); self.close_items(cx, SaveIntent::Close, move |item_id| { - item_ids.contains(&item_id) && !non_closeable_items.contains(&item_id) + item_ids.contains(&item_id) + && !action.close_pinned + && !non_closeable_items.contains(&item_id) }) } @@ -1209,12 +1275,13 @@ impl Pane { } let active_item_id = self.items[self.active_item_index].item_id(); let non_closeable_items = self.get_non_closeable_item_ids(action.close_pinned); - Some(self.close_items_to_the_right_by_id(active_item_id, non_closeable_items, cx)) + Some(self.close_items_to_the_right_by_id(active_item_id, action, non_closeable_items, cx)) } pub fn close_items_to_the_right_by_id( &mut self, item_id: EntityId, + action: &CloseItemsToTheRight, non_closeable_items: Vec, cx: &mut ViewContext, ) -> Task> { @@ -1225,7 +1292,9 @@ impl Pane { .map(|item| item.item_id()) .collect(); self.close_items(cx, SaveIntent::Close, move |item_id| { - item_ids.contains(&item_id) && !non_closeable_items.contains(&item_id) + item_ids.contains(&item_id) + && !action.close_pinned + && !non_closeable_items.contains(&item_id) }) } @@ -1246,6 +1315,43 @@ impl Pane { )) } + pub fn close_items_over_max_tabs(&mut self, cx: &mut ViewContext) { + let Some(max_tabs) = WorkspaceSettings::get_global(cx).max_tabs.map(|i| i.get()) else { + return; + }; + + // Reduce over the activation history to get every dirty items up to max_tabs + // count. + let mut index_list = Vec::new(); + let mut items_len = self.items_len(); + let mut indexes: HashMap = HashMap::default(); + for (index, item) in self.items.iter().enumerate() { + indexes.insert(item.item_id(), index); + } + for entry in self.activation_history.iter() { + if items_len < max_tabs { + break; + } + let Some(&index) = indexes.get(&entry.entity_id) else { + continue; + }; + if let Some(true) = self.items.get(index).map(|item| item.is_dirty(cx)) { + continue; + } + + index_list.push(index); + items_len -= 1; + } + // The sort and reverse is necessary since we remove items + // using their index position, hence removing from the end + // of the list first to avoid changing indexes. + index_list.sort_unstable(); + index_list + .iter() + .rev() + .for_each(|&index| self._remove_item(index, false, false, None, cx)); + } + pub(super) fn file_names_for_prompt( items: &mut dyn Iterator>, all_dirty_items: usize, @@ -1291,10 +1397,12 @@ impl Pane { ) -> Task> { // Find the items to close. let mut items_to_close = Vec::new(); + let mut item_ids_to_close = HashSet::default(); let mut dirty_items = Vec::new(); for item in &self.items { if should_close(item.item_id()) { items_to_close.push(item.boxed_clone()); + item_ids_to_close.insert(item.item_id()); if item.is_dirty(cx) { dirty_items.push(item.boxed_clone()); } @@ -1335,16 +1443,23 @@ impl Pane { } } let mut saved_project_items_ids = HashSet::default(); - for item in items_to_close.clone() { - // Find the item's current index and its set of project item models. Avoid + for item_to_close in items_to_close { + // Find the item's current index and its set of dirty project item models. Avoid // storing these in advance, in case they have changed since this task // was started. - let (item_ix, mut project_item_ids) = pane.update(&mut cx, |pane, cx| { - (pane.index_for_item(&*item), item.project_item_model_ids(cx)) - })?; - let item_ix = if let Some(ix) = item_ix { - ix - } else { + let mut dirty_project_item_ids = Vec::new(); + let Some(item_ix) = pane.update(&mut cx, |pane, cx| { + item_to_close.for_each_project_item( + cx, + &mut |project_item_id, project_item| { + if project_item.is_dirty() { + dirty_project_item_ids.push(project_item_id); + } + }, + ); + pane.index_for_item(&*item_to_close) + })? + else { continue; }; @@ -1352,27 +1467,34 @@ impl Pane { // in the workspace, AND that the user has not already been prompted to save. // If there are any such project entries, prompt the user to save this item. let project = workspace.update(&mut cx, |workspace, cx| { - for item in workspace.items(cx) { - if !items_to_close - .iter() - .any(|item_to_close| item_to_close.item_id() == item.item_id()) - { - let other_project_item_ids = item.project_item_model_ids(cx); - project_item_ids.retain(|id| !other_project_item_ids.contains(id)); + for open_item in workspace.items(cx) { + let open_item_id = open_item.item_id(); + if !item_ids_to_close.contains(&open_item_id) { + let other_project_item_ids = open_item.project_item_model_ids(cx); + dirty_project_item_ids + .retain(|id| !other_project_item_ids.contains(id)); } } workspace.project().clone() })?; - let should_save = project_item_ids + let should_save = dirty_project_item_ids .iter() - .any(|id| saved_project_items_ids.insert(*id)); + .any(|id| saved_project_items_ids.insert(*id)) + // Always propose to save singleton files without any project paths: those cannot be saved via multibuffer, as require a file path selection modal. + || cx + .update(|cx| { + item_to_close.can_save(cx) && item_to_close.is_dirty(cx) + && item_to_close.is_singleton(cx) + && item_to_close.project_path(cx).is_none() + }) + .unwrap_or(false); if should_save && !Self::save_item( project.clone(), &pane, item_ix, - &*item, + &*item_to_close, save_intent, &mut cx, ) @@ -1383,13 +1505,7 @@ impl Pane { // Remove the item from the pane. pane.update(&mut cx, |pane, cx| { - if let Some(item_ix) = pane - .items - .iter() - .position(|i| i.item_id() == item.item_id()) - { - pane.remove_item(item_ix, false, true, cx); - } + pane.remove_item(item_to_close.item_id(), false, true, cx); }) .ok(); } @@ -1401,11 +1517,14 @@ impl Pane { pub fn remove_item( &mut self, - item_index: usize, + item_id: EntityId, activate_pane: bool, close_pane_if_empty: bool, cx: &mut ViewContext, ) { + let Some(item_index) = self.index_for_item_id(item_id) else { + return; + }; self._remove_item(item_index, activate_pane, close_pane_if_empty, None, cx) } @@ -1441,6 +1560,7 @@ impl Pane { self.pinned_tab_count -= 1; } if item_index == self.active_item_index { + let left_neighbour_index = || item_index.min(self.items.len()).saturating_sub(1); let index_to_activate = match activate_on_close { ActivateOnClose::History => self .activation_history @@ -1452,7 +1572,7 @@ impl Pane { }) // We didn't have a valid activation history entry, so fallback // to activating the item to the left - .unwrap_or_else(|| item_index.min(self.items.len()).saturating_sub(1)), + .unwrap_or_else(left_neighbour_index), ActivateOnClose::Neighbour => { self.activation_history.pop(); if item_index + 1 < self.items.len() { @@ -1461,6 +1581,10 @@ impl Pane { item_index.saturating_sub(1) } } + ActivateOnClose::LeftNeighbour => { + self.activation_history.pop(); + left_neighbour_index() + } }; let should_activate = activate_pane || self.has_focus(cx); @@ -1523,7 +1647,7 @@ impl Pane { .remove(&item.item_id()); } - if self.items.is_empty() && close_pane_if_empty && self.zoomed { + if self.zoom_out_on_close && self.items.is_empty() && close_pane_if_empty && self.zoomed { cx.emit(Event::ZoomOut); } @@ -1595,7 +1719,9 @@ impl Pane { .await? } Ok(1) => { - pane.update(cx, |pane, cx| pane.remove_item(item_ix, false, false, cx))?; + pane.update(cx, |pane, cx| { + pane.remove_item(item.item_id(), false, false, cx) + })?; } _ => return Ok(false), } @@ -1689,9 +1815,7 @@ impl Pane { if let Some(abs_path) = abs_path.await.ok().flatten() { pane.update(cx, |pane, cx| { if let Some(item) = pane.item_for_path(abs_path.clone(), cx) { - if let Some(idx) = pane.index_for_item(&*item) { - pane.remove_item(idx, false, false, cx); - } + pane.remove_item(item.item_id(), false, false, cx); } item.save_as(project, abs_path, cx) @@ -1757,15 +1881,15 @@ impl Pane { entry_id: ProjectEntryId, cx: &mut ViewContext, ) -> Option<()> { - let (item_index_to_delete, item_id) = self.items().enumerate().find_map(|(i, item)| { + let item_id = self.items().find_map(|item| { if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [entry_id] { - Some((i, item.item_id())) + Some(item.item_id()) } else { None } })?; - self.remove_item(item_index_to_delete, false, true, cx); + self.remove_item(item_id, false, true, cx); self.nav_history.remove_item(item_id); Some(()) @@ -1821,24 +1945,7 @@ impl Pane { } } - pub fn git_aware_icon_color( - git_status: Option, - ignored: bool, - selected: bool, - ) -> Color { - if ignored { - Color::Ignored - } else { - match git_status { - Some(GitFileStatus::Added) => Color::Created, - Some(GitFileStatus::Modified) => Color::Modified, - Some(GitFileStatus::Conflict) => Color::Conflict, - None => Self::icon_color(selected), - } - } - } - - fn toggle_pin_tab(&mut self, _: &TogglePinTab, cx: &mut ViewContext<'_, Self>) { + fn toggle_pin_tab(&mut self, _: &TogglePinTab, cx: &mut ViewContext) { if self.items.is_empty() { return; } @@ -1850,13 +1957,17 @@ impl Pane { } } - fn pin_tab_at(&mut self, ix: usize, cx: &mut ViewContext<'_, Self>) { + fn pin_tab_at(&mut self, ix: usize, cx: &mut ViewContext) { maybe!({ let pane = cx.view().clone(); let destination_index = self.pinned_tab_count.min(ix); self.pinned_tab_count += 1; let id = self.item_for_index(ix)?.item_id(); + if self.is_active_preview_item(id) { + self.set_preview_item_id(None, cx); + } + self.workspace .update(cx, |_, cx| { cx.defer(move |_, cx| move_item(&pane, &pane, id, destination_index, cx)); @@ -1867,10 +1978,10 @@ impl Pane { }); } - fn unpin_tab_at(&mut self, ix: usize, cx: &mut ViewContext<'_, Self>) { + fn unpin_tab_at(&mut self, ix: usize, cx: &mut ViewContext) { maybe!({ let pane = cx.view().clone(); - self.pinned_tab_count = self.pinned_tab_count.checked_sub(1).unwrap(); + self.pinned_tab_count = self.pinned_tab_count.checked_sub(1)?; let destination_index = self.pinned_tab_count; let id = self.item_for_index(ix)?.item_id(); @@ -1899,10 +2010,8 @@ impl Pane { item: &dyn ItemHandle, detail: usize, focus_handle: &FocusHandle, - cx: &mut ViewContext<'_, Pane>, + cx: &mut ViewContext, ) -> impl IntoElement { - let project_path = item.project_path(cx); - let is_active = ix == self.active_item_index; let is_preview = self .preview_item_id @@ -1918,20 +2027,56 @@ impl Pane { cx, ); - let icon_color = if ItemSettings::get_global(cx).git_status { - project_path - .as_ref() - .and_then(|path| self.project.read(cx).entry_for_path(path, cx)) - .map(|entry| { - Self::git_aware_icon_color(entry.git_status, entry.is_ignored, is_active) - }) - .unwrap_or_else(|| Self::icon_color(is_active)) + let item_diagnostic = item + .project_path(cx) + .map_or(None, |project_path| self.diagnostics.get(&project_path)); + + let decorated_icon = item_diagnostic.map_or(None, |diagnostic| { + let icon = match item.tab_icon(cx) { + Some(icon) => icon, + None => return None, + }; + + let knockout_item_color = if is_active { + cx.theme().colors().tab_active_background + } else { + cx.theme().colors().tab_bar_background + }; + + let (icon_decoration, icon_color) = if matches!(diagnostic, &DiagnosticSeverity::ERROR) + { + (IconDecorationKind::X, Color::Error) + } else { + (IconDecorationKind::Triangle, Color::Warning) + }; + + Some(DecoratedIcon::new( + icon.size(IconSize::Small).color(Color::Muted), + Some( + IconDecoration::new(icon_decoration, knockout_item_color, cx) + .color(icon_color.color(cx)) + .position(Point { + x: px(-2.), + y: px(-2.), + }), + ), + )) + }); + + let icon = if decorated_icon.is_none() { + match item_diagnostic { + Some(&DiagnosticSeverity::ERROR) => None, + Some(&DiagnosticSeverity::WARNING) => None, + _ => item.tab_icon(cx).map(|icon| icon.color(Color::Muted)), + } + .map(|icon| icon.size(IconSize::Small)) } else { - Self::icon_color(is_active) + None }; - let icon = item.tab_icon(cx); - let close_side = &ItemSettings::get_global(cx).close_position; + let settings = ItemSettings::get_global(cx); + let close_side = &settings.close_position; + let always_show_close_button = settings.always_show_close_button; let indicator = render_item_indicator(item.boxed_clone(), cx); let item_id = item.item_id(); let is_first_item = ix == 0; @@ -1951,7 +2096,7 @@ impl Pane { ClosePosition::Left => ui::TabCloseSide::Start, ClosePosition::Right => ui::TabCloseSide::End, }) - .selected(is_active) + .toggle_state(is_active) .on_click( cx.listener(move |pane: &mut Self, _, cx| pane.activate_item(ix, true, true, cx)), ) @@ -2004,8 +2149,11 @@ impl Pane { this.drag_split_direction = None; this.handle_external_paths_drop(paths, cx) })) - .when_some(item.tab_tooltip_text(cx), |tab, text| { - tab.tooltip(move |cx| Tooltip::text(text.clone(), cx)) + .when_some(item.tab_tooltip_content(cx), |tab, content| match content { + TabTooltipContent::Text(text) => { + tab.tooltip(move |cx| Tooltip::text(text.clone(), cx)) + } + TabTooltipContent::Custom(element_fn) => tab.tooltip(move |cx| element_fn(cx)), }) .start_slot::(indicator) .map(|this| { @@ -2026,7 +2174,9 @@ impl Pane { end_slot_action = &CloseActiveItem { save_intent: None }; end_slot_tooltip_text = "Close Tab"; IconButton::new("close tab", IconName::Close) - .visible_on_hover("") + .when(!always_show_close_button, |button| { + button.visible_on_hover("") + }) .shape(IconButtonShape::Square) .icon_color(Color::Muted) .size(ButtonSize::None) @@ -2056,7 +2206,17 @@ impl Pane { .child( h_flex() .gap_1() - .children(icon.map(|icon| icon.size(IconSize::Small).color(icon_color))) + .items_center() + .children( + std::iter::once(if let Some(decorated_icon) = decorated_icon { + Some(div().child(decorated_icon.into_any_element())) + } else if let Some(icon) = icon { + Some(div().child(icon.into_any_element())) + } else { + None + }) + .flatten(), + ) .child(label), ); @@ -2071,8 +2231,10 @@ impl Pane { let is_pinned = self.is_tab_pinned(ix); let pane = cx.view().downgrade(); + let menu_context = item.focus_handle(cx); right_click_menu(ix).trigger(tab).menu(move |cx| { let pane = pane.clone(); + let menu_context = menu_context.clone(); ContextMenu::build(cx, move |mut menu, cx| { if let Some(pane) = pane.upgrade() { menu = menu @@ -2104,6 +2266,9 @@ impl Pane { cx.handler_for(&pane, move |pane, cx| { pane.close_items_to_the_left_by_id( item_id, + &CloseItemsToTheLeft { + close_pinned: false, + }, pane.get_non_closeable_item_ids(false), cx, ) @@ -2118,6 +2283,9 @@ impl Pane { cx.handler_for(&pane, move |pane, cx| { pane.close_items_to_the_right_by_id( item_id, + &CloseItemsToTheRight { + close_pinned: false, + }, pane.get_non_closeable_item_ids(false), cx, ) @@ -2225,11 +2393,13 @@ impl Pane { entry_id: Some(entry_id), })), cx.handler_for(&pane, move |pane, cx| { - pane.project.update(cx, |_, cx| { - cx.emit(project::Event::RevealInProjectPanel( - ProjectEntryId::from_proto(entry_id), - )) - }); + pane.project + .update(cx, |_, cx| { + cx.emit(project::Event::RevealInProjectPanel( + ProjectEntryId::from_proto(entry_id), + )) + }) + .ok(); }), ) .when_some(parent_abs_path, |menu, parent_abs_path| { @@ -2251,12 +2421,12 @@ impl Pane { } } - menu + menu.context(menu_context) }) }) } - fn render_tab_bar(&mut self, cx: &mut ViewContext<'_, Pane>) -> impl IntoElement { + fn render_tab_bar(&mut self, cx: &mut ViewContext) -> impl IntoElement { let focus_handle = self.focus_handle.clone(); let navigate_backward = IconButton::new("navigate_backward", IconName::ArrowLeft) .icon_size(IconSize::Small) @@ -2366,12 +2536,7 @@ impl Pane { pub fn render_menu_overlay(menu: &View) -> Div { div().absolute().bottom_0().right_0().size_0().child( - deferred( - anchored() - .anchor(AnchorCorner::TopRight) - .child(menu.clone()), - ) - .with_priority(1), + deferred(anchored().anchor(Corner::TopRight).child(menu.clone())).with_priority(1), ) } @@ -2384,8 +2549,18 @@ impl Pane { self.zoomed } - fn handle_drag_move(&mut self, event: &DragMoveEvent, cx: &mut ViewContext) { - if !self.can_split { + fn handle_drag_move( + &mut self, + event: &DragMoveEvent, + cx: &mut ViewContext, + ) { + let can_split_predicate = self.can_split_predicate.take(); + let can_split = match &can_split_predicate { + Some(can_split_predicate) => can_split_predicate(self, event.dragged_item(), cx), + None => false, + }; + self.can_split_predicate = can_split_predicate; + if !can_split { return; } @@ -2427,12 +2602,7 @@ impl Pane { } } - fn handle_tab_drop( - &mut self, - dragged_tab: &DraggedTab, - ix: usize, - cx: &mut ViewContext<'_, Self>, - ) { + fn handle_tab_drop(&mut self, dragged_tab: &DraggedTab, ix: usize, cx: &mut ViewContext) { if let Some(custom_drop_handle) = self.custom_drop_handle.clone() { if let ControlFlow::Break(()) = custom_drop_handle(self, dragged_tab, cx) { return; @@ -2455,6 +2625,8 @@ impl Pane { to_pane = workspace.split_pane(to_pane, split_direction, cx); } let old_ix = from_pane.read(cx).index_for_item_id(item_id); + let old_len = to_pane.read(cx).items.len(); + move_item(&from_pane, &to_pane, item_id, ix, cx); if to_pane == from_pane { if let Some(old_index) = old_ix { to_pane.update(cx, |this, _| { @@ -2472,7 +2644,10 @@ impl Pane { } } else { to_pane.update(cx, |this, _| { - if this.has_pinned_tabs() && ix < this.pinned_tab_count { + if this.items.len() > old_len // Did we not deduplicate on drag? + && this.has_pinned_tabs() + && ix < this.pinned_tab_count + { this.pinned_tab_count += 1; } }); @@ -2484,7 +2659,6 @@ impl Pane { } }) } - move_item(&from_pane, &to_pane, item_id, ix, cx); }); }) .log_err(); @@ -2494,7 +2668,7 @@ impl Pane { &mut self, dragged_selection: &DraggedSelection, dragged_onto: Option, - cx: &mut ViewContext<'_, Self>, + cx: &mut ViewContext, ) { if let Some(custom_drop_handle) = self.custom_drop_handle.clone() { if let ControlFlow::Break(()) = custom_drop_handle(self, dragged_selection, cx) { @@ -2512,7 +2686,7 @@ impl Pane { &mut self, project_entry_id: &ProjectEntryId, target: Option, - cx: &mut ViewContext<'_, Self>, + cx: &mut ViewContext, ) { if let Some(custom_drop_handle) = self.custom_drop_handle.clone() { if let ControlFlow::Break(()) = custom_drop_handle(self, project_entry_id, cx) { @@ -2577,11 +2751,7 @@ impl Pane { .log_err(); } - fn handle_external_paths_drop( - &mut self, - paths: &ExternalPaths, - cx: &mut ViewContext<'_, Self>, - ) { + fn handle_external_paths_drop(&mut self, paths: &ExternalPaths, cx: &mut ViewContext) { if let Some(custom_drop_handle) = self.custom_drop_handle.clone() { if let ControlFlow::Break(()) = custom_drop_handle(self, paths, cx) { return; @@ -2675,6 +2845,14 @@ impl Pane { }) .collect() } + + pub fn drag_split_direction(&self) -> Option { + self.drag_split_direction + } + + pub fn set_zoom_out_on_close(&mut self, zoom_out_on_close: bool) { + self.zoom_out_on_close = zoom_out_on_close; + } } impl FocusableView for Pane { @@ -2693,7 +2871,10 @@ impl Render for Pane { let should_display_tab_bar = self.should_display_tab_bar.clone(); let display_tab_bar = should_display_tab_bar(cx); - let is_local = self.project.read(cx).is_local(); + let Some(project) = self.project.upgrade() else { + return div().track_focus(&self.focus_handle(cx)); + }; + let is_local = project.read(cx).is_local(); v_flex() .key_context(key_context) @@ -2803,9 +2984,11 @@ impl Render for Pane { .map(ProjectEntryId::from_proto) .or_else(|| pane.active_item()?.project_entry_ids(cx).first().copied()); if let Some(entry_id) = entry_id { - pane.project.update(cx, |_, cx| { - cx.emit(project::Event::RevealInProjectPanel(entry_id)) - }); + pane.project + .update(cx, |_, cx| { + cx.emit(project::Event::RevealInProjectPanel(entry_id)) + }) + .ok(); } }), ) @@ -2813,7 +2996,7 @@ impl Render for Pane { pane.child(self.render_tab_bar(cx)) }) .child({ - let has_worktrees = self.project.read(cx).worktrees(cx).next().is_some(); + let has_worktrees = project.read(cx).worktrees(cx).next().is_some(); // main content div() .flex_1() @@ -2918,8 +3101,14 @@ impl Render for Pane { impl ItemNavHistory { pub fn push(&mut self, data: Option, cx: &mut WindowContext) { - self.history - .push(data, self.item.clone(), self.is_preview, cx); + if self + .item + .upgrade() + .is_some_and(|item| item.include_in_nav_history()) + { + self.history + .push(data, self.item.clone(), self.is_preview, cx); + } } pub fn pop_backward(&mut self, cx: &mut WindowContext) -> Option { @@ -3150,7 +3339,7 @@ impl Render for DraggedTab { cx, ); Tab::new("") - .selected(self.is_active) + .toggle_state(self.is_active) .child(label) .render(cx) .font(ui_font) @@ -3159,6 +3348,8 @@ impl Render for DraggedTab { #[cfg(test)] mod tests { + use std::num::NonZero; + use super::*; use crate::item::test::{TestItem, TestProjectItem}; use gpui::{TestAppContext, VisualTestContext}; @@ -3182,6 +3373,54 @@ mod tests { }); } + #[gpui::test] + async fn test_add_item_capped_to_max_tabs(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + for i in 0..7 { + add_labeled_item(&pane, format!("{}", i).as_str(), false, cx); + } + set_max_tabs(cx, Some(5)); + add_labeled_item(&pane, "7", false, cx); + // Remove items to respect the max tab cap. + assert_item_labels(&pane, ["3", "4", "5", "6", "7*"], cx); + pane.update(cx, |pane, cx| { + pane.activate_item(0, false, false, cx); + }); + add_labeled_item(&pane, "X", false, cx); + // Respect activation order. + assert_item_labels(&pane, ["3", "X*", "5", "6", "7"], cx); + + for i in 0..7 { + add_labeled_item(&pane, format!("D{}", i).as_str(), true, cx); + } + // Keeps dirty items, even over max tab cap. + assert_item_labels( + &pane, + ["D0^", "D1^", "D2^", "D3^", "D4^", "D5^", "D6*^"], + cx, + ); + + set_max_tabs(cx, None); + for i in 0..7 { + add_labeled_item(&pane, format!("N{}", i).as_str(), false, cx); + } + // No cap when max tabs is None. + assert_item_labels( + &pane, + [ + "D0^", "D1^", "D2^", "D3^", "D4^", "D5^", "D6^", "N0", "N1", "N2", "N3", "N4", + "N5", "N6*", + ], + cx, + ); + } + #[gpui::test] async fn test_add_item_with_new_item(cx: &mut TestAppContext) { init_test(cx); @@ -3548,6 +3787,69 @@ mod tests { assert_item_labels(&pane, ["A*"], cx); } + #[gpui::test] + async fn test_remove_item_ordering_left_neighbour(cx: &mut TestAppContext) { + init_test(cx); + cx.update_global::(|s, cx| { + s.update_user_settings::(cx, |s| { + s.activate_on_close = Some(ActivateOnClose::LeftNeighbour); + }); + }); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + add_labeled_item(&pane, "C", false, cx); + add_labeled_item(&pane, "D", false, cx); + assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); + + pane.update(cx, |pane, cx| pane.activate_item(1, false, false, cx)); + add_labeled_item(&pane, "1", false, cx); + assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx); + + pane.update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .unwrap() + .await + .unwrap(); + assert_item_labels(&pane, ["A", "B*", "C", "D"], cx); + + pane.update(cx, |pane, cx| pane.activate_item(3, false, false, cx)); + assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); + + pane.update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .unwrap() + .await + .unwrap(); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + + pane.update(cx, |pane, cx| pane.activate_item(0, false, false, cx)); + assert_item_labels(&pane, ["A*", "B", "C"], cx); + + pane.update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .unwrap() + .await + .unwrap(); + assert_item_labels(&pane, ["B*", "C"], cx); + + pane.update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .unwrap() + .await + .unwrap(); + assert_item_labels(&pane, ["C*"], cx); + } + #[gpui::test] async fn test_close_inactive_items(cx: &mut TestAppContext) { init_test(cx); @@ -3701,11 +4003,41 @@ mod tests { assert_item_labels(&pane, [], cx); + add_labeled_item(&pane, "A", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new(1, "A.txt", cx)) + }); + add_labeled_item(&pane, "B", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new(2, "B.txt", cx)) + }); + add_labeled_item(&pane, "C", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new(3, "C.txt", cx)) + }); + assert_item_labels(&pane, ["A^", "B^", "C*^"], cx); + + let save = pane + .update(cx, |pane, cx| { + pane.close_all_items( + &CloseAllItems { + save_intent: None, + close_pinned: false, + }, + cx, + ) + }) + .unwrap(); + + cx.executor().run_until_parked(); + cx.simulate_prompt_answer(2); + save.await.unwrap(); + assert_item_labels(&pane, [], cx); + add_labeled_item(&pane, "A", true, cx); add_labeled_item(&pane, "B", true, cx); add_labeled_item(&pane, "C", true, cx); assert_item_labels(&pane, ["A^", "B^", "C*^"], cx); - let save = pane .update(cx, |pane, cx| { pane.close_all_items( @@ -3765,6 +4097,14 @@ mod tests { }); } + fn set_max_tabs(cx: &mut TestAppContext, value: Option) { + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings::(cx, |settings| { + settings.max_tabs = value.map(|v| NonZero::new(v).unwrap()) + }); + }); + } + fn add_labeled_item( pane: &View, label: &str, @@ -3809,14 +4149,14 @@ mod tests { } // Assert the item label, with the active item label suffixed with a '*' + #[track_caller] fn assert_item_labels( pane: &View, expected_states: [&str; COUNT], cx: &mut VisualTestContext, ) { - pane.update(cx, |pane, cx| { - let actual_states = pane - .items + let actual_states = pane.update(cx, |pane, cx| { + pane.items .iter() .enumerate() .map(|(ix, item)| { @@ -3835,12 +4175,11 @@ mod tests { } state }) - .collect::>(); - - assert_eq!( - actual_states, expected_states, - "pane items do not match expectation" - ); - }) + .collect::>() + }); + assert_eq!( + actual_states, expected_states, + "pane items do not match expectation" + ); } } diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 390fa6d174..168f6539e0 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -8,8 +8,8 @@ use call::{ActiveCall, ParticipantLocation}; use client::proto::PeerId; use collections::HashMap; use gpui::{ - point, size, AnyView, AnyWeakView, Axis, Bounds, IntoElement, Model, MouseButton, Pixels, - Point, StyleRefinement, View, ViewContext, + point, size, Along, AnyView, AnyWeakView, Axis, Bounds, IntoElement, Model, MouseButton, + Pixels, Point, StyleRefinement, View, ViewContext, }; use parking_lot::Mutex; use project::Project; @@ -27,11 +27,11 @@ const VERTICAL_MIN_SIZE: f32 = 100.; /// Single-pane group is a regular pane. #[derive(Clone)] pub struct PaneGroup { - pub(crate) root: Member, + pub root: Member, } impl PaneGroup { - pub(crate) fn with_root(root: Member) -> Self { + pub fn with_root(root: Member) -> Self { Self { root } } @@ -90,6 +90,30 @@ impl PaneGroup { } } + pub fn resize( + &mut self, + pane: &View, + direction: Axis, + amount: Pixels, + bounds: &Bounds, + ) { + match &mut self.root { + Member::Pane(_) => {} + Member::Axis(axis) => { + let _ = axis.resize(pane, direction, amount, bounds); + } + }; + } + + pub fn reset_pane_sizes(&mut self) { + match &mut self.root { + Member::Pane(_) => {} + Member::Axis(axis) => { + let _ = axis.reset_pane_sizes(); + } + }; + } + pub fn swap(&mut self, from: &View, to: &View) { match &mut self.root { Member::Pane(_) => {} @@ -98,7 +122,7 @@ impl PaneGroup { } #[allow(clippy::too_many_arguments)] - pub(crate) fn render( + pub fn render( &self, project: &Model, follower_states: &HashMap, @@ -120,19 +144,51 @@ impl PaneGroup { ) } - pub(crate) fn panes(&self) -> Vec<&View> { + pub fn panes(&self) -> Vec<&View> { let mut panes = Vec::new(); self.root.collect_panes(&mut panes); panes } - pub(crate) fn first_pane(&self) -> View { + pub fn first_pane(&self) -> View { self.root.first_pane() } + + pub fn find_pane_in_direction( + &mut self, + active_pane: &View, + direction: SplitDirection, + cx: &WindowContext, + ) -> Option<&View> { + let bounding_box = self.bounding_box_for_pane(active_pane)?; + let cursor = active_pane.read(cx).pixel_position_of_cursor(cx); + let center = match cursor { + Some(cursor) if bounding_box.contains(&cursor) => cursor, + _ => bounding_box.center(), + }; + + let distance_to_next = crate::HANDLE_HITBOX_SIZE; + + let target = match direction { + SplitDirection::Left => { + Point::new(bounding_box.left() - distance_to_next.into(), center.y) + } + SplitDirection::Right => { + Point::new(bounding_box.right() + distance_to_next.into(), center.y) + } + SplitDirection::Up => { + Point::new(center.x, bounding_box.top() - distance_to_next.into()) + } + SplitDirection::Down => { + Point::new(center.x, bounding_box.bottom() + distance_to_next.into()) + } + }; + self.pane_at_pixel_position(target) + } } -#[derive(Clone)] -pub(crate) enum Member { +#[derive(Debug, Clone)] +pub enum Member { Axis(PaneAxis), Pane(View), } @@ -335,8 +391,8 @@ impl Member { } } -#[derive(Clone)] -pub(crate) struct PaneAxis { +#[derive(Debug, Clone)] +pub struct PaneAxis { pub axis: Axis, pub members: Vec, pub flexes: Arc>>, @@ -445,6 +501,125 @@ impl PaneAxis { } } + fn reset_pane_sizes(&self) { + *self.flexes.lock() = vec![1.; self.members.len()]; + for member in self.members.iter() { + if let Member::Axis(axis) = member { + axis.reset_pane_sizes(); + } + } + } + + fn resize( + &mut self, + pane: &View, + axis: Axis, + amount: Pixels, + bounds: &Bounds, + ) -> Option { + let container_size = self + .bounding_boxes + .lock() + .iter() + .filter_map(|e| *e) + .reduce(|acc, e| acc.union(&e)) + .unwrap_or(*bounds) + .size; + + let found_pane = self + .members + .iter() + .any(|member| matches!(member, Member::Pane(p) if p == pane)); + + if found_pane && self.axis != axis { + return Some(false); // pane found but this is not the correct axis direction + } + let mut found_axis_index: Option = None; + if !found_pane { + for (i, pa) in self.members.iter_mut().enumerate() { + if let Member::Axis(pa) = pa { + if let Some(done) = pa.resize(pane, axis, amount, bounds) { + if done { + return Some(true); // pane found and operations already done + } else if self.axis != axis { + return Some(false); // pane found but this is not the correct axis direction + } else { + found_axis_index = Some(i); // pane found and this is correct direction + } + } + } + } + found_axis_index?; // no pane found + } + + let min_size = match axis { + Axis::Horizontal => px(HORIZONTAL_MIN_SIZE), + Axis::Vertical => px(VERTICAL_MIN_SIZE), + }; + let mut flexes = self.flexes.lock(); + + let ix = if found_pane { + self.members.iter().position(|m| { + if let Member::Pane(p) = m { + p == pane + } else { + false + } + }) + } else { + found_axis_index + }; + + if ix.is_none() { + return Some(true); + } + + let ix = ix.unwrap_or(0); + + let size = move |ix, flexes: &[f32]| { + container_size.along(axis) * (flexes[ix] / flexes.len() as f32) + }; + + // Don't allow resizing to less than the minimum size, if elements are already too small + if min_size - px(1.) > size(ix, flexes.as_slice()) { + return Some(true); + } + + let flex_changes = |pixel_dx, target_ix, next: isize, flexes: &[f32]| { + let flex_change = flexes.len() as f32 * pixel_dx / container_size.along(axis); + let current_target_flex = flexes[target_ix] + flex_change; + let next_target_flex = flexes[(target_ix as isize + next) as usize] - flex_change; + (current_target_flex, next_target_flex) + }; + + let apply_changes = + |current_ix: usize, proposed_current_pixel_change: Pixels, flexes: &mut [f32]| { + let next_target_size = Pixels::max( + size(current_ix + 1, flexes) - proposed_current_pixel_change, + min_size, + ); + let current_target_size = Pixels::max( + size(current_ix, flexes) + size(current_ix + 1, flexes) - next_target_size, + min_size, + ); + + let current_pixel_change = current_target_size - size(current_ix, flexes); + + let (current_target_flex, next_target_flex) = + flex_changes(current_pixel_change, current_ix, 1, flexes); + + flexes[current_ix] = current_target_flex; + flexes[current_ix + 1] = next_target_flex; + }; + + if ix + 1 == flexes.len() { + apply_changes(ix - 1, -1.0 * amount, flexes.as_mut_slice()); + } else { + apply_changes(ix, amount, flexes.as_mut_slice()); + } + Some(true) + } + fn swap(&mut self, from: &View, to: &View) { for member in self.members.iter_mut() { match member { @@ -583,9 +758,9 @@ impl SplitDirection { pub fn edge(&self, rect: Bounds) -> Pixels { match self { Self::Up => rect.origin.y, - Self::Down => rect.lower_left().y, - Self::Left => rect.lower_left().x, - Self::Right => rect.lower_right().x, + Self::Down => rect.bottom_left().y, + Self::Left => rect.bottom_left().x, + Self::Right => rect.bottom_right().x, } } @@ -596,7 +771,7 @@ impl SplitDirection { size: size(bounds.size.width, length), }, Self::Down => Bounds { - origin: point(bounds.lower_left().x, bounds.lower_left().y - length), + origin: point(bounds.bottom_left().x, bounds.bottom_left().y - length), size: size(bounds.size.width, length), }, Self::Left => Bounds { @@ -604,7 +779,7 @@ impl SplitDirection { size: size(length, bounds.size.height), }, Self::Right => Bounds { - origin: point(bounds.lower_right().x - length, bounds.lower_left().y), + origin: point(bounds.bottom_right().x - length, bounds.bottom_left().y), size: size(length, bounds.size.height), }, } @@ -625,8 +800,15 @@ impl SplitDirection { } } -mod element { +#[derive(Clone, Copy, Debug, Deserialize, PartialEq)] +pub enum ResizeIntent { + Lengthen, + Shorten, + Widen, + Narrow, +} +mod element { use std::mem; use std::{cell::RefCell, iter, rc::Rc, sync::Arc}; @@ -836,7 +1018,7 @@ mod element { fn request_layout( &mut self, _global_id: Option<&GlobalElementId>, - cx: &mut ui::prelude::WindowContext, + cx: &mut WindowContext, ) -> (gpui::LayoutId, Self::RequestLayoutState) { let style = Style { flex_grow: 1., @@ -937,7 +1119,7 @@ mod element { bounds: gpui::Bounds, _: &mut Self::RequestLayoutState, layout: &mut Self::PrepaintState, - cx: &mut ui::prelude::WindowContext, + cx: &mut WindowContext, ) { for child in &mut layout.children { child.element.paint(cx); diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 925d56a921..8bd5c87286 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -1,6 +1,6 @@ pub mod model; -use std::path::Path; +use std::{path::Path, str::FromStr}; use anyhow::{anyhow, bail, Context, Result}; use client::DevServerProjectId; @@ -179,7 +179,7 @@ define_connection! { // group_id: usize, // Primary key for pane_groups // workspace_id: usize, // References workspaces table // parent_group_id: Option, // None indicates that this is the root node - // position: Optiopn, // None indicates that this is the root node + // position: Option, // None indicates that this is the root node // axis: Option, // 'Vertical', 'Horizontal' // flexes: Option>, // A JSON array of floats // ) @@ -380,6 +380,9 @@ define_connection! { PRIMARY KEY (workspace_id, worktree_id, language_name) ); ), + sql!( + ALTER TABLE toolchains ADD COLUMN raw_json TEXT DEFAULT "{}"; + ), ]; } @@ -1080,18 +1083,19 @@ impl WorkspaceDb { self.write(move |this| { let mut select = this .select_bound(sql!( - SELECT name, path FROM toolchains WHERE workspace_id = ? AND language_name = ? AND worktree_id = ? + SELECT name, path, raw_json FROM toolchains WHERE workspace_id = ? AND language_name = ? AND worktree_id = ? )) .context("Preparing insertion")?; - let toolchain: Vec<(String, String)> = + let toolchain: Vec<(String, String, String)> = select((workspace_id, language_name.0.to_owned(), worktree_id.to_usize()))?; - Ok(toolchain.into_iter().next().map(|(name, path)| Toolchain { + Ok(toolchain.into_iter().next().and_then(|(name, path, raw_json)| Some(Toolchain { name: name.into(), path: path.into(), language_name, - })) + as_json: serde_json::Value::from_str(&raw_json).ok()? + }))) }) .await } @@ -1103,18 +1107,19 @@ impl WorkspaceDb { self.write(move |this| { let mut select = this .select_bound(sql!( - SELECT name, path, worktree_id, language_name FROM toolchains WHERE workspace_id = ? + SELECT name, path, worktree_id, language_name, raw_json FROM toolchains WHERE workspace_id = ? )) .context("Preparing insertion")?; - let toolchain: Vec<(String, String, u64, String)> = + let toolchain: Vec<(String, String, u64, String, String)> = select(workspace_id)?; - Ok(toolchain.into_iter().map(|(name, path, worktree_id, language_name)| (Toolchain { + Ok(toolchain.into_iter().filter_map(|(name, path, worktree_id, language_name, raw_json)| Some((Toolchain { name: name.into(), path: path.into(), language_name: LanguageName::new(&language_name), - }, WorktreeId::from_proto(worktree_id))).collect()) + as_json: serde_json::Value::from_str(&raw_json).ok()? + }, WorktreeId::from_proto(worktree_id)))).collect()) }) .await } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index a2510b8bec..ac0c144aa3 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -9,6 +9,7 @@ use db::sqlez::{ statement::Statement, }; use gpui::{AsyncWindowContext, Model, View, WeakView}; +use itertools::Itertools as _; use project::Project; use remote::ssh_session::SshProjectId; use serde::{Deserialize, Serialize}; @@ -228,6 +229,28 @@ impl SerializedWorkspaceLocation { Self::Local(LocalPaths::new(sorted_paths), LocalPathsOrder::new(order)) } + + /// Get sorted paths + pub fn sorted_paths(&self) -> Arc> { + match self { + SerializedWorkspaceLocation::Local(paths, order) => { + if order.order().len() == 0 { + paths.paths().clone() + } else { + Arc::new( + order + .order() + .iter() + .zip(paths.paths().iter()) + .sorted_by_key(|(i, _)| **i) + .map(|(_, p)| p.clone()) + .collect(), + ) + } + } + SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), + } + } } #[derive(Debug, PartialEq, Clone)] @@ -473,7 +496,7 @@ impl SerializedPane { })?; } pane.update(cx, |pane, _| { - pane.set_pinned_count(self.pinned_count); + pane.set_pinned_count(self.pinned_count.min(items.len())); })?; anyhow::Ok(items) @@ -564,4 +587,62 @@ mod tests { ) ); } + + #[test] + fn test_sorted_paths() { + let paths = vec!["b", "a", "c"]; + let serialized = SerializedWorkspaceLocation::from_local_paths(paths); + assert_eq!( + serialized.sorted_paths(), + Arc::new(vec![ + PathBuf::from("b"), + PathBuf::from("a"), + PathBuf::from("c"), + ]) + ); + + let paths = Arc::new(vec![ + PathBuf::from("a"), + PathBuf::from("b"), + PathBuf::from("c"), + ]); + let order = vec![2, 0, 1]; + let serialized = + SerializedWorkspaceLocation::Local(LocalPaths(paths.clone()), LocalPathsOrder(order)); + assert_eq!( + serialized.sorted_paths(), + Arc::new(vec![ + PathBuf::from("b"), + PathBuf::from("c"), + PathBuf::from("a"), + ]) + ); + + let paths = Arc::new(vec![ + PathBuf::from("a"), + PathBuf::from("b"), + PathBuf::from("c"), + ]); + let order = vec![]; + let serialized = + SerializedWorkspaceLocation::Local(LocalPaths(paths.clone()), LocalPathsOrder(order)); + assert_eq!(serialized.sorted_paths(), paths); + + let urls = ["/a", "/b", "/c"]; + let serialized = SerializedWorkspaceLocation::Ssh(SerializedSshProject { + id: SshProjectId(0), + host: "host".to_string(), + port: Some(22), + paths: urls.iter().map(|s| s.to_string()).collect(), + user: Some("user".to_string()), + }); + assert_eq!( + serialized.sorted_paths(), + Arc::new( + urls.iter() + .map(|p| PathBuf::from(format!("user@host:22{}", p))) + .collect() + ) + ); + } } diff --git a/crates/workspace/src/shared_screen.rs b/crates/workspace/src/shared_screen.rs index 59df859488..1d17cfa145 100644 --- a/crates/workspace/src/shared_screen.rs +++ b/crates/workspace/src/shared_screen.rs @@ -1,126 +1,11 @@ -use crate::{ - item::{Item, ItemEvent}, - ItemNavHistory, WorkspaceId, -}; -use anyhow::Result; -use call::participant::{Frame, RemoteVideoTrack}; -use client::{proto::PeerId, User}; -use futures::StreamExt; -use gpui::{ - div, surface, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement, - ParentElement, Render, SharedString, Styled, Task, View, ViewContext, VisualContext, - WindowContext, -}; -use std::sync::{Arc, Weak}; -use ui::{prelude::*, Icon, IconName}; +#[cfg(target_os = "macos")] +mod macos; -pub enum Event { - Close, -} +#[cfg(target_os = "macos")] +pub use macos::*; -pub struct SharedScreen { - track: Weak, - frame: Option, - pub peer_id: PeerId, - user: Arc, - nav_history: Option, - _maintain_frame: Task>, - focus: FocusHandle, -} +#[cfg(not(target_os = "macos"))] +mod cross_platform; -impl SharedScreen { - pub fn new( - track: &Arc, - peer_id: PeerId, - user: Arc, - cx: &mut ViewContext, - ) -> Self { - cx.focus_handle(); - let mut frames = track.frames(); - Self { - track: Arc::downgrade(track), - frame: None, - peer_id, - user, - nav_history: Default::default(), - _maintain_frame: cx.spawn(|this, mut cx| async move { - while let Some(frame) = frames.next().await { - this.update(&mut cx, |this, cx| { - this.frame = Some(frame); - cx.notify(); - })?; - } - this.update(&mut cx, |_, cx| cx.emit(Event::Close))?; - Ok(()) - }), - focus: cx.focus_handle(), - } - } -} - -impl EventEmitter for SharedScreen {} - -impl FocusableView for SharedScreen { - fn focus_handle(&self, _: &AppContext) -> FocusHandle { - self.focus.clone() - } -} -impl Render for SharedScreen { - fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - div() - .bg(cx.theme().colors().editor_background) - .track_focus(&self.focus) - .key_context("SharedScreen") - .size_full() - .children( - self.frame - .as_ref() - .map(|frame| surface(frame.image()).size_full()), - ) - } -} - -impl Item for SharedScreen { - type Event = Event; - - fn tab_tooltip_text(&self, _: &AppContext) -> Option { - Some(format!("{}'s screen", self.user.github_login).into()) - } - - fn deactivated(&mut self, cx: &mut ViewContext) { - if let Some(nav_history) = self.nav_history.as_mut() { - nav_history.push::<()>(None, cx); - } - } - - fn tab_icon(&self, _cx: &WindowContext) -> Option { - Some(Icon::new(IconName::Screen)) - } - - fn tab_content_text(&self, _cx: &WindowContext) -> Option { - Some(format!("{}'s screen", self.user.github_login).into()) - } - - fn telemetry_event_text(&self) -> Option<&'static str> { - None - } - - fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext) { - self.nav_history = Some(history); - } - - fn clone_on_split( - &self, - _workspace_id: Option, - cx: &mut ViewContext, - ) -> Option> { - let track = self.track.upgrade()?; - Some(cx.new_view(|cx| Self::new(&track, self.peer_id, self.user.clone(), cx))) - } - - fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) { - match event { - Event::Close => f(ItemEvent::CloseItem), - } - } -} +#[cfg(not(target_os = "macos"))] +pub use cross_platform::*; diff --git a/crates/workspace/src/shared_screen/cross_platform.rs b/crates/workspace/src/shared_screen/cross_platform.rs new file mode 100644 index 0000000000..285946cce0 --- /dev/null +++ b/crates/workspace/src/shared_screen/cross_platform.rs @@ -0,0 +1,114 @@ +use crate::{ + item::{Item, ItemEvent}, + ItemNavHistory, WorkspaceId, +}; +use call::{RemoteVideoTrack, RemoteVideoTrackView}; +use client::{proto::PeerId, User}; +use gpui::{ + div, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement, ParentElement, + Render, SharedString, Styled, View, ViewContext, VisualContext, WindowContext, +}; +use std::sync::Arc; +use ui::{prelude::*, Icon, IconName}; + +pub enum Event { + Close, +} + +pub struct SharedScreen { + pub peer_id: PeerId, + user: Arc, + nav_history: Option, + view: View, + focus: FocusHandle, +} + +impl SharedScreen { + pub fn new( + track: RemoteVideoTrack, + peer_id: PeerId, + user: Arc, + cx: &mut ViewContext, + ) -> Self { + let view = cx.new_view(|cx| RemoteVideoTrackView::new(track.clone(), cx)); + cx.subscribe(&view, |_, _, ev, cx| match ev { + call::RemoteVideoTrackViewEvent::Close => cx.emit(Event::Close), + }) + .detach(); + Self { + view, + peer_id, + user, + nav_history: Default::default(), + focus: cx.focus_handle(), + } + } +} + +impl EventEmitter for SharedScreen {} + +impl FocusableView for SharedScreen { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus.clone() + } +} +impl Render for SharedScreen { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .bg(cx.theme().colors().editor_background) + .track_focus(&self.focus) + .key_context("SharedScreen") + .size_full() + .child(self.view.clone()) + } +} + +impl Item for SharedScreen { + type Event = Event; + + fn tab_tooltip_text(&self, _: &AppContext) -> Option { + Some(format!("{}'s screen", self.user.github_login).into()) + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + if let Some(nav_history) = self.nav_history.as_mut() { + nav_history.push::<()>(None, cx); + } + } + + fn tab_icon(&self, _cx: &WindowContext) -> Option { + Some(Icon::new(IconName::Screen)) + } + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some(format!("{}'s screen", self.user.github_login).into()) + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + None + } + + fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext) { + self.nav_history = Some(history); + } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> { + Some(cx.new_view(|cx| Self { + view: self.view.update(cx, |view, cx| view.clone(cx)), + peer_id: self.peer_id, + user: self.user.clone(), + nav_history: Default::default(), + focus: cx.focus_handle(), + })) + } + + fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) { + match event { + Event::Close => f(ItemEvent::CloseItem), + } + } +} diff --git a/crates/workspace/src/shared_screen/macos.rs b/crates/workspace/src/shared_screen/macos.rs new file mode 100644 index 0000000000..ad0b4c4275 --- /dev/null +++ b/crates/workspace/src/shared_screen/macos.rs @@ -0,0 +1,126 @@ +use crate::{ + item::{Item, ItemEvent}, + ItemNavHistory, WorkspaceId, +}; +use anyhow::Result; +use call::participant::{Frame, RemoteVideoTrack}; +use client::{proto::PeerId, User}; +use futures::StreamExt; +use gpui::{ + div, surface, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement, + ParentElement, Render, SharedString, Styled, Task, View, ViewContext, VisualContext, + WindowContext, +}; +use std::sync::{Arc, Weak}; +use ui::{prelude::*, Icon, IconName}; + +pub enum Event { + Close, +} + +pub struct SharedScreen { + track: Weak, + frame: Option, + pub peer_id: PeerId, + user: Arc, + nav_history: Option, + _maintain_frame: Task>, + focus: FocusHandle, +} + +impl SharedScreen { + pub fn new( + track: Arc, + peer_id: PeerId, + user: Arc, + cx: &mut ViewContext, + ) -> Self { + cx.focus_handle(); + let mut frames = track.frames(); + Self { + track: Arc::downgrade(&track), + frame: None, + peer_id, + user, + nav_history: Default::default(), + _maintain_frame: cx.spawn(|this, mut cx| async move { + while let Some(frame) = frames.next().await { + this.update(&mut cx, |this, cx| { + this.frame = Some(frame); + cx.notify(); + })?; + } + this.update(&mut cx, |_, cx| cx.emit(Event::Close))?; + Ok(()) + }), + focus: cx.focus_handle(), + } + } +} + +impl EventEmitter for SharedScreen {} + +impl FocusableView for SharedScreen { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus.clone() + } +} +impl Render for SharedScreen { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .bg(cx.theme().colors().editor_background) + .track_focus(&self.focus) + .key_context("SharedScreen") + .size_full() + .children( + self.frame + .as_ref() + .map(|frame| surface(frame.image()).size_full()), + ) + } +} + +impl Item for SharedScreen { + type Event = Event; + + fn tab_tooltip_text(&self, _: &AppContext) -> Option { + Some(format!("{}'s screen", self.user.github_login).into()) + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + if let Some(nav_history) = self.nav_history.as_mut() { + nav_history.push::<()>(None, cx); + } + } + + fn tab_icon(&self, _cx: &WindowContext) -> Option { + Some(Icon::new(IconName::Screen)) + } + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some(format!("{}'s screen", self.user.github_login).into()) + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + None + } + + fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext) { + self.nav_history = Some(history); + } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> { + let track = self.track.upgrade()?; + Some(cx.new_view(|cx| Self::new(track, self.peer_id, self.user.clone(), cx))) + } + + fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) { + match event { + Event::Close => f(ItemEvent::CloseItem), + } + } +} diff --git a/crates/workspace/src/status_bar.rs b/crates/workspace/src/status_bar.rs index 00a0078032..585b2700b4 100644 --- a/crates/workspace/src/status_bar.rs +++ b/crates/workspace/src/status_bar.rs @@ -64,14 +64,14 @@ impl Render for StatusBar { impl StatusBar { fn render_left_tools(&self, cx: &mut ViewContext) -> impl IntoElement { h_flex() - .gap(DynamicSpacing::Base08.rems(cx)) + .gap(DynamicSpacing::Base04.rems(cx)) .overflow_x_hidden() .children(self.left_items.iter().map(|item| item.to_any())) } fn render_right_tools(&self, cx: &mut ViewContext) -> impl IntoElement { h_flex() - .gap(DynamicSpacing::Base08.rems(cx)) + .gap(DynamicSpacing::Base04.rems(cx)) .children(self.right_items.iter().rev().map(|item| item.to_any())) } } diff --git a/crates/workspace/src/tasks.rs b/crates/workspace/src/tasks.rs index c2b6e51acd..c01e2ae52b 100644 --- a/crates/workspace/src/tasks.rs +++ b/crates/workspace/src/tasks.rs @@ -6,12 +6,12 @@ use ui::ViewContext; use crate::Workspace; pub fn schedule_task( - workspace: &Workspace, + workspace: &mut Workspace, task_source_kind: TaskSourceKind, task_to_resolve: &TaskTemplate, task_cx: &TaskContext, omit_history: bool, - cx: &mut ViewContext<'_, Workspace>, + cx: &mut ViewContext, ) { match workspace.project.read(cx).ssh_connection_state(cx) { None | Some(ConnectionState::Connected) => {} @@ -40,11 +40,11 @@ pub fn schedule_task( } pub fn schedule_resolved_task( - workspace: &Workspace, + workspace: &mut Workspace, task_source_kind: TaskSourceKind, mut resolved_task: ResolvedTask, omit_history: bool, - cx: &mut ViewContext<'_, Workspace>, + cx: &mut ViewContext, ) { if let Some(spawn_in_terminal) = resolved_task.resolved.take() { if !omit_history { @@ -59,6 +59,9 @@ pub fn schedule_resolved_task( } }); } - cx.emit(crate::Event::SpawnTask(Box::new(spawn_in_terminal))); + + cx.emit(crate::Event::SpawnTask { + action: Box::new(spawn_in_terminal), + }); } } diff --git a/crates/workspace/src/theme_preview.rs b/crates/workspace/src/theme_preview.rs index 337e603d84..3dc4b46150 100644 --- a/crates/workspace/src/theme_preview.rs +++ b/crates/workspace/src/theme_preview.rs @@ -6,7 +6,7 @@ use ui::{ element_cell, prelude::*, string_cell, utils::calculate_contrast_ratio, AudioStatus, Availability, Avatar, AvatarAudioStatusIndicator, AvatarAvailabilityIndicator, ButtonLike, Checkbox, CheckboxWithLabel, ContentGroup, DecoratedIcon, ElevationIndex, Facepile, - IconDecoration, Indicator, Table, TintColor, Tooltip, + IconDecoration, Indicator, Switch, SwitchWithLabel, Table, TintColor, Tooltip, }; use crate::{Item, Workspace}; @@ -108,147 +108,6 @@ impl ThemePreview { cx.theme().colors().editor_background } - fn render_avatars(&self, cx: &ViewContext) -> impl IntoElement { - v_flex() - .gap_1() - .child( - Headline::new("Avatars") - .size(HeadlineSize::Small) - .color(Color::Muted), - ) - .child( - h_flex() - .items_start() - .gap_4() - .child(Avatar::new(AVATAR_URL).size(px(24.))) - .child(Avatar::new(AVATAR_URL).size(px(24.)).grayscale(true)) - .child( - Avatar::new(AVATAR_URL) - .size(px(24.)) - .indicator(AvatarAudioStatusIndicator::new(AudioStatus::Muted)), - ) - .child( - Avatar::new(AVATAR_URL) - .size(px(24.)) - .indicator(AvatarAudioStatusIndicator::new(AudioStatus::Deafened)), - ) - .child( - Avatar::new(AVATAR_URL) - .size(px(24.)) - .indicator(AvatarAvailabilityIndicator::new(Availability::Free)), - ) - .child( - Avatar::new(AVATAR_URL) - .size(px(24.)) - .indicator(AvatarAvailabilityIndicator::new(Availability::Free)), - ) - .child( - Facepile::empty() - .child( - Avatar::new(AVATAR_URL) - .border_color(Self::preview_bg(cx)) - .size(px(22.)) - .into_any_element(), - ) - .child( - Avatar::new(AVATAR_URL) - .border_color(Self::preview_bg(cx)) - .size(px(22.)) - .into_any_element(), - ) - .child( - Avatar::new(AVATAR_URL) - .border_color(Self::preview_bg(cx)) - .size(px(22.)) - .into_any_element(), - ) - .child( - Avatar::new(AVATAR_URL) - .border_color(Self::preview_bg(cx)) - .size(px(22.)) - .into_any_element(), - ), - ), - ) - } - - fn render_buttons(&self, layer: ElevationIndex, cx: &ViewContext) -> impl IntoElement { - v_flex() - .gap_1() - .child( - Headline::new("Buttons") - .size(HeadlineSize::Small) - .color(Color::Muted), - ) - .child( - h_flex() - .items_start() - .gap_px() - .child( - IconButton::new("icon_button_transparent", IconName::Check) - .style(ButtonStyle::Transparent), - ) - .child( - IconButton::new("icon_button_subtle", IconName::Check) - .style(ButtonStyle::Subtle), - ) - .child( - IconButton::new("icon_button_filled", IconName::Check) - .style(ButtonStyle::Filled), - ) - .child( - IconButton::new("icon_button_selected_accent", IconName::Check) - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .selected(true), - ) - .child(IconButton::new("icon_button_selected", IconName::Check).selected(true)) - .child( - IconButton::new("icon_button_positive", IconName::Check) - .style(ButtonStyle::Tinted(TintColor::Positive)), - ) - .child( - IconButton::new("icon_button_warning", IconName::Check) - .style(ButtonStyle::Tinted(TintColor::Warning)), - ) - .child( - IconButton::new("icon_button_negative", IconName::Check) - .style(ButtonStyle::Tinted(TintColor::Negative)), - ), - ) - .child( - h_flex() - .gap_px() - .child( - Button::new("button_transparent", "Transparent") - .style(ButtonStyle::Transparent), - ) - .child(Button::new("button_subtle", "Subtle").style(ButtonStyle::Subtle)) - .child(Button::new("button_filled", "Filled").style(ButtonStyle::Filled)) - .child( - Button::new("button_selected", "Selected") - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .selected(true), - ) - .child( - Button::new("button_selected_tinted", "Selected (Tinted)") - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .selected(true), - ) - .child( - Button::new("button_positive", "Tint::Positive") - .style(ButtonStyle::Tinted(TintColor::Positive)), - ) - .child( - Button::new("button_warning", "Tint::Warning") - .style(ButtonStyle::Tinted(TintColor::Warning)), - ) - .child( - Button::new("button_negative", "Tint::Negative") - .style(ButtonStyle::Tinted(TintColor::Negative)), - ), - ) - } - fn render_text(&self, layer: ElevationIndex, cx: &ViewContext) -> impl IntoElement { let bg = layer.bg(cx); @@ -502,7 +361,7 @@ impl ThemePreview { ) } - fn render_components_page(&self, cx: &ViewContext) -> impl IntoElement { + fn render_components_page(&self, cx: &mut WindowContext) -> impl IntoElement { let layer = ElevationIndex::Surface; v_flex() @@ -510,18 +369,18 @@ impl ThemePreview { .overflow_scroll() .size_full() .gap_2() - .child(ContentGroup::render_component_previews(cx)) - .child(IconDecoration::render_component_previews(cx)) - .child(DecoratedIcon::render_component_previews(cx)) + .child(Button::render_component_previews(cx)) .child(Checkbox::render_component_previews(cx)) .child(CheckboxWithLabel::render_component_previews(cx)) + .child(ContentGroup::render_component_previews(cx)) + .child(DecoratedIcon::render_component_previews(cx)) .child(Facepile::render_component_previews(cx)) - .child(Button::render_component_previews(cx)) - .child(Indicator::render_component_previews(cx)) .child(Icon::render_component_previews(cx)) + .child(IconDecoration::render_component_previews(cx)) + .child(Indicator::render_component_previews(cx)) + .child(Switch::render_component_previews(cx)) + .child(SwitchWithLabel::render_component_previews(cx)) .child(Table::render_component_previews(cx)) - .child(self.render_avatars(cx)) - .child(self.render_buttons(layer, cx)) } fn render_page_nav(&self, cx: &ViewContext) -> impl IntoElement { @@ -537,7 +396,7 @@ impl ThemePreview { this.current_page = p; cx.notify(); })) - .selected(p == self.current_page) + .toggle_state(p == self.current_page) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) })) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 833a8b15a0..4a11662705 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -21,7 +21,7 @@ use client::{ }; use collections::{hash_map, HashMap, HashSet}; use derive_more::{Deref, DerefMut}; -use dock::{Dock, DockPosition, Panel, PanelButtons, PanelHandle}; +use dock::{Dock, DockPosition, Panel, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE}; use futures::{ channel::{ mpsc::{self, UnboundedReceiver, UnboundedSender}, @@ -34,10 +34,10 @@ use gpui::{ action_as, actions, canvas, impl_action_as, impl_actions, point, relative, size, transparent_black, Action, AnyView, AnyWeakView, AppContext, AsyncAppContext, AsyncWindowContext, Bounds, CursorStyle, Decorations, DragMoveEvent, Entity as _, EntityId, - EventEmitter, Flatten, FocusHandle, FocusableView, Global, Hsla, KeyContext, Keystroke, - ManagedView, Model, ModelContext, MouseButton, PathPromptOptions, Point, PromptLevel, Render, - ResizeEdge, Size, Stateful, Subscription, Task, Tiling, View, WeakView, WindowBounds, - WindowHandle, WindowId, WindowOptions, + EventEmitter, FocusHandle, FocusableView, Global, Hsla, KeyContext, Keystroke, ManagedView, + Model, ModelContext, MouseButton, PathPromptOptions, Point, PromptLevel, Render, ResizeEdge, + Size, Stateful, Subscription, Task, Tiling, View, WeakView, WindowBounds, WindowHandle, + WindowId, WindowOptions, }; pub use item::{ FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, PreviewTabsSettings, @@ -92,12 +92,8 @@ use task::SpawnInTerminal; use theme::{ActiveTheme, SystemAppearance, ThemeSettings}; pub use toolbar::{Toolbar, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView}; pub use ui; -use ui::{ - div, h_flex, px, BorrowAppContext, Context as _, Div, FluentBuilder, InteractiveElement as _, - IntoElement, ParentElement as _, Pixels, SharedString, Styled as _, ViewContext, - VisualContext as _, WindowContext, -}; -use util::{ResultExt, TryFutureExt}; +use ui::prelude::*; +use util::{paths::SanitizedPath, serde::default_true, ResultExt, TryFutureExt}; use uuid::Uuid; pub use workspace_settings::{ AutosaveSetting, RestoreOnStartupBehavior, TabBarSettings, WorkspaceSettings, @@ -149,6 +145,7 @@ actions!( NewTerminal, NewWindow, Open, + OpenFiles, OpenInTerminal, ReloadActiveItem, SaveAs, @@ -177,6 +174,20 @@ pub struct ActivatePaneInDirection(pub SplitDirection); #[derive(Clone, Deserialize, PartialEq)] pub struct SwapPaneInDirection(pub SplitDirection); +#[derive(Clone, Deserialize, PartialEq)] +pub struct MoveItemToPane { + pub destination: usize, + #[serde(default = "default_true")] + pub focus: bool, +} + +#[derive(Clone, Deserialize, PartialEq)] +pub struct MoveItemToPaneInDirection { + pub direction: SplitDirection, + #[serde(default = "default_true")] + pub focus: bool, +} + #[derive(Clone, PartialEq, Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct SaveAll { @@ -226,6 +237,8 @@ impl_actions!( ActivatePaneInDirection, CloseAllItemsAndPanes, CloseInactiveTabsAndPanes, + MoveItemToPane, + MoveItemToPaneInDirection, OpenTerminal, Reload, Save, @@ -320,6 +333,42 @@ pub fn init_settings(cx: &mut AppContext) { TabBarSettings::register(cx); } +fn prompt_and_open_paths( + app_state: Arc, + options: PathPromptOptions, + cx: &mut AppContext, +) { + let paths = cx.prompt_for_paths(options); + cx.spawn(|cx| async move { + match paths.await.anyhow().and_then(|res| res) { + Ok(Some(paths)) => { + cx.update(|cx| { + open_paths(&paths, app_state, OpenOptions::default(), cx).detach_and_log_err(cx) + }) + .ok(); + } + Ok(None) => {} + Err(err) => { + util::log_err(&err); + cx.update(|cx| { + if let Some(workspace_window) = cx + .active_window() + .and_then(|window| window.downcast::()) + { + workspace_window + .update(cx, |workspace, cx| { + workspace.show_portal_error(err.to_string(), cx); + }) + .ok(); + } + }) + .ok(); + } + } + }) + .detach(); +} + pub fn init(app_state: Arc, cx: &mut AppContext) { init_settings(cx); notifications::init(cx); @@ -331,41 +380,33 @@ pub fn init(app_state: Arc, cx: &mut AppContext) { cx.on_action({ let app_state = Arc::downgrade(&app_state); move |_: &Open, cx: &mut AppContext| { - let paths = cx.prompt_for_paths(PathPromptOptions { - files: true, - directories: true, - multiple: true, - }); - if let Some(app_state) = app_state.upgrade() { - cx.spawn(move |cx| async move { - match Flatten::flatten(paths.await.map_err(|e| e.into())) { - Ok(Some(paths)) => { - cx.update(|cx| { - open_paths(&paths, app_state, OpenOptions::default(), cx) - .detach_and_log_err(cx) - }) - .ok(); - } - Ok(None) => {} - Err(err) => { - cx.update(|cx| { - if let Some(workspace_window) = cx - .active_window() - .and_then(|window| window.downcast::()) - { - workspace_window - .update(cx, |workspace, cx| { - workspace.show_portal_error(err.to_string(), cx); - }) - .ok(); - } - }) - .ok(); - } - }; - }) - .detach(); + prompt_and_open_paths( + app_state, + PathPromptOptions { + files: true, + directories: true, + multiple: true, + }, + cx, + ); + } + } + }); + cx.on_action({ + let app_state = Arc::downgrade(&app_state); + move |_: &OpenFiles, cx: &mut AppContext| { + let directories = cx.can_select_mixed_files_and_dirs(); + if let Some(app_state) = app_state.upgrade() { + prompt_and_open_paths( + app_state, + PathPromptOptions { + files: true, + directories, + multiple: true, + }, + cx, + ); } } }); @@ -391,12 +432,12 @@ impl Global for ProjectItemOpeners {} pub fn register_project_item(cx: &mut AppContext) { let builders = cx.default_global::(); builders.push(|project, project_path, cx| { - let project_item = ::try_open(project, project_path, cx)?; + let project_item = ::try_open(project, project_path, cx)?; let project = project.clone(); Some(cx.spawn(|cx| async move { let project_item = project_item.await?; let project_entry_id: Option = - project_item.read_with(&cx, project::Item::entry_id)?; + project_item.read_with(&cx, project::ProjectItem::entry_id)?; let build_workspace_item = Box::new(|cx: &mut ViewContext| { Box::new(cx.new_view(|cx| I::for_project_item(project, project_item, cx))) as Box @@ -597,7 +638,6 @@ impl AppState { use node_runtime::NodeRuntime; use session::Session; use settings::SettingsStore; - use ui::Context as _; if !cx.has_global::() { let settings_store = SettingsStore::test(cx); @@ -606,7 +646,7 @@ impl AppState { let fs = fs::FakeFs::new(cx.background_executor().clone()); let languages = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); - let clock = Arc::new(clock::FakeSystemClock::default()); + let clock = Arc::new(clock::FakeSystemClock::new()); let http_client = http_client::FakeHttpClient::with_404_response(); let client = Client::new(clock, http_client.clone(), cx); let session = cx.new_model(|cx| AppSession::new(Session::test(), cx)); @@ -691,7 +731,9 @@ pub enum Event { }, ContactRequestedJoin(u64), WorkspaceCreated(WeakView), - SpawnTask(Box), + SpawnTask { + action: Box, + }, OpenBundledFile { text: Cow<'static, str>, title: &'static str, @@ -730,6 +772,7 @@ pub struct Workspace { weak_self: WeakView, workspace_actions: Vec) -> Div>>, zoomed: Option, + previous_dock_drag_coordinates: Option>, zoomed_position: Option, center: PaneGroup, left_dock: View, @@ -777,7 +820,7 @@ pub struct ViewId { pub id: u64, } -struct FollowerState { +pub struct FollowerState { center_pane: View, dock_pane: Option>, active_view_id: Option, @@ -810,7 +853,7 @@ impl Workspace { this.collaborator_left(*peer_id, cx); } - project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded => { + project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded(_) => { this.update_window_title(cx); this.serialize_workspace(cx); } @@ -832,7 +875,7 @@ impl Workspace { cx.remove_window(); } - project::Event::DeletedEntry(entry_id) => { + project::Event::DeletedEntry(_, entry_id) => { for pane in this.panes.iter() { pane.update(cx, |pane, cx| { pane.handle_deleted_project_item(*entry_id, cx) @@ -887,14 +930,16 @@ impl Workspace { let pane_history_timestamp = Arc::new(AtomicUsize::new(0)); let center_pane = cx.new_view(|cx| { - Pane::new( + let mut center_pane = Pane::new( weak_handle.clone(), project.clone(), pane_history_timestamp.clone(), None, NewFile.boxed_clone(), cx, - ) + ); + center_pane.set_can_split(Some(Arc::new(|_, _, _| true))); + center_pane }); cx.subscribe(¢er_pane, Self::handle_pane_event).detach(); @@ -980,7 +1025,7 @@ impl Workspace { this.update(&mut cx, |this, cx| { if let Some(display) = cx.display() { if let Ok(display_uuid) = display.uuid() { - let window_bounds = cx.window_bounds(); + let window_bounds = cx.inner_window_bounds(); if let Some(database_id) = workspace_id { cx.background_executor() .spawn(DB.set_window_open_status( @@ -1005,18 +1050,6 @@ impl Workspace { ThemeSettings::reload_current_theme(cx); }), - cx.observe(&left_dock, |this, _, cx| { - this.serialize_workspace(cx); - cx.notify(); - }), - cx.observe(&bottom_dock, |this, _, cx| { - this.serialize_workspace(cx); - cx.notify(); - }), - cx.observe(&right_dock, |this, _, cx| { - this.serialize_workspace(cx); - cx.notify(); - }), cx.on_release(|this, window, cx| { this.app_state.workspace_store.update(cx, |store, _| { let window = window.downcast::().unwrap(); @@ -1032,6 +1065,7 @@ impl Workspace { weak_self: weak_handle.clone(), zoomed: None, zoomed_position: None, + previous_dock_drag_coordinates: None, center: PaneGroup::new(center_pane.clone()), panes: vec![center_pane.clone()], panes_by_item: Default::default(), @@ -1096,31 +1130,30 @@ impl Workspace { ); cx.spawn(|mut cx| async move { - let serialized_workspace: Option = - persistence::DB.workspace_for_roots(abs_paths.as_slice()); + let mut paths_to_open = Vec::with_capacity(abs_paths.len()); + for path in abs_paths.into_iter() { + if let Some(canonical) = app_state.fs.canonicalize(&path).await.ok() { + paths_to_open.push(canonical) + } else { + paths_to_open.push(path) + } + } - let mut paths_to_open = abs_paths; + let serialized_workspace: Option = + persistence::DB.workspace_for_roots(paths_to_open.as_slice()); let workspace_location = serialized_workspace .as_ref() .map(|ws| &ws.location) .and_then(|loc| match loc { - SerializedWorkspaceLocation::Local(paths, order) => { - Some((paths.paths(), order.order())) + SerializedWorkspaceLocation::Local(_, order) => { + Some((loc.sorted_paths(), order.order())) } _ => None, }); if let Some((paths, order)) = workspace_location { - // todo: should probably move this logic to a method on the SerializedWorkspaceLocation - // it's only valid for Local and would be more clear there and be able to be tested - // and reused elsewhere - paths_to_open = order - .iter() - .zip(paths.iter()) - .sorted_by_key(|(i, _)| *i) - .map(|(_, path)| path.clone()) - .collect(); + paths_to_open = paths.iter().cloned().collect(); if order.iter().enumerate().any(|(i, &j)| i != j) { project_handle @@ -1645,7 +1678,7 @@ impl Workspace { F: 'static + FnOnce(&mut Workspace, &mut ViewContext) -> T, { if self.project.read(cx).is_local() { - Task::Ready(Some(Ok(callback(self, cx)))) + Task::ready(Ok(callback(self, cx))) } else { let env = self.project.read(cx).cli_environment(cx); let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, cx); @@ -1684,11 +1717,11 @@ impl Workspace { cx.defer(|cx| { cx.windows().iter().find(|window| { window - .update(cx, |_, window| { - if window.is_window_active() { + .update(cx, |_, cx| { + if cx.is_window_active() { //This can only get called when the window's project connection has been lost //so we don't need to prompt the user for anything and instead just close the window - window.remove_window(); + cx.remove_window(); true } else { false @@ -2015,7 +2048,7 @@ impl Workspace { }; let this = this.clone(); - let abs_path = abs_path.clone(); + let abs_path: Arc = SanitizedPath::from(abs_path.clone()).into(); let fs = fs.clone(); let pane = pane.clone(); let task = cx.spawn(move |mut cx| async move { @@ -2024,7 +2057,7 @@ impl Workspace { this.update(&mut cx, |workspace, cx| { let worktree = worktree.read(cx); let worktree_abs_path = worktree.abs_path(); - let entry_id = if abs_path == worktree_abs_path.as_ref() { + let entry_id = if abs_path.as_ref() == worktree_abs_path.as_ref() { worktree.root_entry() } else { abs_path @@ -2275,6 +2308,10 @@ impl Workspace { let was_visible = dock.is_open() && !other_is_zoomed; dock.set_open(!was_visible, cx); + if dock.active_panel().is_none() && dock.panels_len() > 0 { + dock.activate_panel(0, cx); + } + if let Some(active_panel) = dock.active_panel() { if was_visible { if active_panel.focus_handle(cx).contains_focused(cx) { @@ -2457,14 +2494,16 @@ impl Workspace { fn add_pane(&mut self, cx: &mut ViewContext) -> View { let pane = cx.new_view(|cx| { - Pane::new( + let mut pane = Pane::new( self.weak_handle(), self.project.clone(), self.pane_history_timestamp.clone(), None, NewFile.boxed_clone(), cx, - ) + ); + pane.set_can_split(Some(Arc::new(|_, _, _| true))); + pane }); cx.subscribe(&pane, Self::handle_pane_event).detach(); self.panes.push(pane.clone()); @@ -2710,7 +2749,7 @@ impl Workspace { where T: ProjectItem, { - use project::Item as _; + use project::ProjectItem as _; let project_item = project_item.read(cx); let entry_id = project_item.entry_id(cx); let project_path = project_item.project_path(cx); @@ -2821,6 +2860,13 @@ impl Workspace { } } + fn move_item_to_pane_at_index(&mut self, action: &MoveItemToPane, cx: &mut ViewContext) { + let Some(&target_pane) = self.center.panes().get(action.destination) else { + return; + }; + move_active_item(&self.active_pane, target_pane, action.focus, true, cx); + } + pub fn activate_next_pane(&mut self, cx: &mut WindowContext) { let panes = self.center.panes(); if let Some(ix) = panes.iter().position(|pane| **pane == self.active_pane) { @@ -2929,45 +2975,42 @@ impl Workspace { match target { Some(ActivateInDirectionTarget::Pane(pane)) => cx.focus_view(&pane), Some(ActivateInDirectionTarget::Dock(dock)) => { - if let Some(panel) = dock.read(cx).active_panel() { - panel.focus_handle(cx).focus(cx); - } else { - log::error!("Could not find a focus target when in switching focus in {direction} direction for a {:?} dock", dock.read(cx).position()); - } + // Defer this to avoid a panic when the dock's active panel is already on the stack. + cx.defer(move |cx| { + let dock = dock.read(cx); + if let Some(panel) = dock.active_panel() { + panel.focus_handle(cx).focus(cx); + } else { + log::error!("Could not find a focus target when in switching focus in {direction} direction for a {:?} dock", dock.position()); + } + }) } None => {} } } + pub fn move_item_to_pane_in_direction( + &mut self, + action: &MoveItemToPaneInDirection, + cx: &mut WindowContext, + ) { + if let Some(destination) = self.find_pane_in_direction(action.direction, cx) { + move_active_item(&self.active_pane, &destination, action.focus, true, cx); + } + } + + pub fn bounding_box_for_pane(&self, pane: &View) -> Option> { + self.center.bounding_box_for_pane(pane) + } + pub fn find_pane_in_direction( &mut self, direction: SplitDirection, cx: &WindowContext, ) -> Option> { - let bounding_box = self.center.bounding_box_for_pane(&self.active_pane)?; - let cursor = self.active_pane.read(cx).pixel_position_of_cursor(cx); - let center = match cursor { - Some(cursor) if bounding_box.contains(&cursor) => cursor, - _ => bounding_box.center(), - }; - - let distance_to_next = pane_group::HANDLE_HITBOX_SIZE; - - let target = match direction { - SplitDirection::Left => { - Point::new(bounding_box.left() - distance_to_next.into(), center.y) - } - SplitDirection::Right => { - Point::new(bounding_box.right() + distance_to_next.into(), center.y) - } - SplitDirection::Up => { - Point::new(center.x, bounding_box.top() - distance_to_next.into()) - } - SplitDirection::Down => { - Point::new(center.x, bounding_box.bottom() + distance_to_next.into()) - } - }; - self.center.pane_at_pixel_position(target).cloned() + self.center + .find_pane_in_direction(&self.active_pane, direction, cx) + .cloned() } pub fn swap_pane_in_direction( @@ -2976,11 +3019,22 @@ impl Workspace { cx: &mut ViewContext, ) { if let Some(to) = self.find_pane_in_direction(direction, cx) { - self.center.swap(&self.active_pane.clone(), &to); + self.center.swap(&self.active_pane, &to); cx.notify(); } } + pub fn resize_pane(&mut self, axis: gpui::Axis, amount: Pixels, cx: &mut ViewContext) { + self.center + .resize(&self.active_pane, axis, amount, &self.bounds); + cx.notify(); + } + + pub fn reset_pane_sizes(&mut self, cx: &mut ViewContext) { + self.center.reset_pane_sizes(); + cx.notify(); + } + fn handle_pane_focused(&mut self, pane: View, cx: &mut ViewContext) { // This is explicitly hoisted out of the following check for pane identity as // terminal panel panes are not registered as a center panes. @@ -3027,6 +3081,7 @@ impl Workspace { event: &pane::Event, cx: &mut ViewContext, ) { + let mut serialize_workspace = true; match event { pane::Event::AddItem { item } => { item.added_to_pane(self, pane, cx); @@ -3037,12 +3092,19 @@ impl Workspace { pane::Event::Split(direction) => { self.split_and_clone(pane, *direction, cx); } - pane::Event::JoinIntoNext => self.join_pane_into_next(pane, cx), - pane::Event::JoinAll => self.join_all_panes(cx), - pane::Event::Remove { focus_on_pane } => { - self.remove_pane(pane, focus_on_pane.clone(), cx) + pane::Event::JoinIntoNext => { + self.join_pane_into_next(pane, cx); } - pane::Event::ActivateItem { local } => { + pane::Event::JoinAll => { + self.join_all_panes(cx); + } + pane::Event::Remove { focus_on_pane } => { + self.remove_pane(pane, focus_on_pane.clone(), cx); + } + pane::Event::ActivateItem { + local, + focus_changed, + } => { cx.on_next_frame(|_, cx| { cx.invalidate_character_coordinates(); }); @@ -3057,17 +3119,22 @@ impl Workspace { self.active_item_path_changed(cx); self.update_active_view_for_followers(cx); } + serialize_workspace = *focus_changed || &pane != self.active_pane(); + } + pane::Event::UserSavedItem { item, save_intent } => { + cx.emit(Event::UserSavedItem { + pane: pane.downgrade(), + item: item.boxed_clone(), + save_intent: *save_intent, + }); + serialize_workspace = false; } - pane::Event::UserSavedItem { item, save_intent } => cx.emit(Event::UserSavedItem { - pane: pane.downgrade(), - item: item.boxed_clone(), - save_intent: *save_intent, - }), pane::Event::ChangeItemTitle => { if pane == self.active_pane { self.active_item_path_changed(cx); } self.update_window_edited(cx); + serialize_workspace = false; } pane::Event::RemoveItem { .. } => {} pane::Event::RemovedItem { item_id } => { @@ -3106,7 +3173,9 @@ impl Workspace { } } - self.serialize_workspace(cx); + if serialize_workspace { + self.serialize_workspace(cx); + } } pub fn unfollow_in_pane( @@ -3718,7 +3787,7 @@ impl Workspace { let mut new_item = task.await?; pane.update(cx, |pane, cx| { - let mut item_ix_to_remove = None; + let mut item_to_remove = None; for (ix, item) in pane.items().enumerate() { if let Some(item) = item.to_followable_item_handle(cx) { match new_item.dedup(item.as_ref(), cx) { @@ -3728,7 +3797,7 @@ impl Workspace { break; } Some(item::Dedup::ReplaceExisting) => { - item_ix_to_remove = Some(ix); + item_to_remove = Some((ix, item.item_id())); break; } None => {} @@ -3736,8 +3805,8 @@ impl Workspace { } } - if let Some(ix) = item_ix_to_remove { - pane.remove_item(ix, false, false, cx); + if let Some((ix, id)) = item_to_remove { + pane.remove_item(id, false, false, cx); pane.add_item(new_item.boxed_clone(), false, false, Some(ix), cx); } })?; @@ -3939,6 +4008,17 @@ impl Workspace { None } + #[cfg(target_os = "windows")] + fn shared_screen_for_peer( + &self, + _peer_id: PeerId, + _pane: &View, + _cx: &mut WindowContext, + ) -> Option> { + None + } + + #[cfg(not(target_os = "windows"))] fn shared_screen_for_peer( &self, peer_id: PeerId, @@ -3957,7 +4037,7 @@ impl Workspace { } } - Some(cx.new_view(|cx| SharedScreen::new(&track, peer_id, user.clone(), cx))) + Some(cx.new_view(|cx| SharedScreen::new(track, peer_id, user.clone(), cx))) } pub fn on_window_activation_changed(&mut self, cx: &mut ViewContext) { @@ -4139,30 +4219,30 @@ impl Workspace { let left_dock = this.left_dock.read(cx); let left_visible = left_dock.is_open(); let left_active_panel = left_dock - .visible_panel() + .active_panel() .map(|panel| panel.persistent_name().to_string()); let left_dock_zoom = left_dock - .visible_panel() + .active_panel() .map(|panel| panel.is_zoomed(cx)) .unwrap_or(false); let right_dock = this.right_dock.read(cx); let right_visible = right_dock.is_open(); let right_active_panel = right_dock - .visible_panel() + .active_panel() .map(|panel| panel.persistent_name().to_string()); let right_dock_zoom = right_dock - .visible_panel() + .active_panel() .map(|panel| panel.is_zoomed(cx)) .unwrap_or(false); let bottom_dock = this.bottom_dock.read(cx); let bottom_visible = bottom_dock.is_open(); let bottom_active_panel = bottom_dock - .visible_panel() + .active_panel() .map(|panel| panel.persistent_name().to_string()); let bottom_dock_zoom = bottom_dock - .visible_panel() + .active_panel() .map(|panel| panel.is_zoomed(cx)) .unwrap_or(false); @@ -4395,6 +4475,7 @@ impl Workspace { .on_action(cx.listener(Self::follow_next_collaborator)) .on_action(cx.listener(Self::close_window)) .on_action(cx.listener(Self::activate_pane_at_index)) + .on_action(cx.listener(Self::move_item_to_pane_at_index)) .on_action(cx.listener(|workspace, _: &Unfollow, cx| { let pane = workspace.active_pane().clone(); workspace.unfollow_in_pane(&pane, cx); @@ -4425,6 +4506,11 @@ impl Workspace { workspace.activate_pane_in_direction(action.0, cx) }), ) + .on_action( + cx.listener(|workspace, action: &MoveItemToPaneInDirection, cx| { + workspace.move_item_to_pane_in_direction(action, cx) + }), + ) .on_action(cx.listener(|workspace, action: &SwapPaneInDirection, cx| { workspace.swap_pane_in_direction(action.0, cx) })) @@ -4507,7 +4593,7 @@ impl Workspace { div } - pub fn has_active_modal(&self, cx: &WindowContext<'_>) -> bool { + pub fn has_active_modal(&self, cx: &WindowContext) -> bool { self.modal_layer.read(cx).has_active_modal() } @@ -4569,6 +4655,10 @@ impl Workspace { let window = cx.window_handle().downcast::()?; cx.read_window(&window, |workspace, _| workspace).ok() } + + pub fn zoomed_item(&self) -> Option<&AnyWeakView> { + self.zoomed.as_ref() + } } fn leader_border_for_pane( @@ -4817,7 +4907,27 @@ impl Render for Workspace { let this = cx.view().clone(); canvas( move |bounds, cx| { - this.update(cx, |this, _cx| this.bounds = bounds) + this.update(cx, |this, cx| { + let bounds_changed = this.bounds != bounds; + this.bounds = bounds; + + if bounds_changed { + this.left_dock.update(cx, |dock, cx| { + dock.clamp_panel_size(bounds.size.width, cx) + }); + + this.right_dock.update(cx, |dock, cx| { + dock.clamp_panel_size(bounds.size.width, cx) + }); + + this.bottom_dock.update(cx, |dock, cx| { + dock.clamp_panel_size( + bounds.size.height, + cx, + ) + }); + } + }) }, |_, _, _| {}, ) @@ -4826,47 +4936,39 @@ impl Render for Workspace { }) .when(self.zoomed.is_none(), |this| { this.on_drag_move(cx.listener( - |workspace, e: &DragMoveEvent, cx| { - match e.drag(cx).0 { - DockPosition::Left => { - let size = e.event.position.x - - workspace.bounds.left(); - workspace.left_dock.update( - cx, - |left_dock, cx| { - left_dock.resize_active_panel( - Some(size), - cx, - ); - }, - ); - } - DockPosition::Right => { - let size = workspace.bounds.right() - - e.event.position.x; - workspace.right_dock.update( - cx, - |right_dock, cx| { - right_dock.resize_active_panel( - Some(size), - cx, - ); - }, - ); - } - DockPosition::Bottom => { - let size = workspace.bounds.bottom() - - e.event.position.y; - workspace.bottom_dock.update( - cx, - |bottom_dock, cx| { - bottom_dock.resize_active_panel( - Some(size), - cx, - ); - }, - ); - } + move |workspace, e: &DragMoveEvent, cx| { + if workspace.previous_dock_drag_coordinates + != Some(e.event.position) + { + workspace.previous_dock_drag_coordinates = + Some(e.event.position); + match e.drag(cx).0 { + DockPosition::Left => { + resize_left_dock( + e.event.position.x + - workspace.bounds.left(), + workspace, + cx, + ); + } + DockPosition::Right => { + resize_right_dock( + workspace.bounds.right() + - e.event.position.x, + workspace, + cx, + ); + } + DockPosition::Bottom => { + resize_bottom_dock( + workspace.bounds.bottom() + - e.event.position.y, + workspace, + cx, + ); + } + }; + workspace.serialize_workspace(cx); } }, )) @@ -4952,6 +5054,32 @@ impl Render for Workspace { } } +fn resize_bottom_dock( + new_size: Pixels, + workspace: &mut Workspace, + cx: &mut ViewContext, +) { + let size = new_size.min(workspace.bounds.bottom() - RESIZE_HANDLE_SIZE); + workspace.bottom_dock.update(cx, |bottom_dock, cx| { + bottom_dock.resize_active_panel(Some(size), cx); + }); +} + +fn resize_right_dock(new_size: Pixels, workspace: &mut Workspace, cx: &mut ViewContext) { + let size = new_size.max(workspace.bounds.left() - RESIZE_HANDLE_SIZE); + workspace.right_dock.update(cx, |right_dock, cx| { + right_dock.resize_active_panel(Some(size), cx); + }); +} + +fn resize_left_dock(new_size: Pixels, workspace: &mut Workspace, cx: &mut ViewContext) { + let size = new_size.min(workspace.bounds.right() - RESIZE_HANDLE_SIZE); + + workspace.left_dock.update(cx, |left_dock, cx| { + left_dock.resize_active_panel(Some(size), cx); + }); +} + impl WorkspaceStore { pub fn new(client: Arc, cx: &mut ModelContext) -> Self { Self { @@ -5468,15 +5596,22 @@ pub fn open_paths( } if let Some(existing) = existing { - Ok(( - existing, - existing - .update(&mut cx, |workspace, cx| { - cx.activate_window(); - workspace.open_paths(abs_paths, open_visible, None, cx) - })? - .await, - )) + let open_task = existing + .update(&mut cx, |workspace, cx| { + cx.activate_window(); + workspace.open_paths(abs_paths, open_visible, None, cx) + })? + .await; + + _ = existing.update(&mut cx, |workspace, cx| { + for item in open_task.iter().flatten() { + if let Err(e) = item { + workspace.show_error(&e, cx); + } + } + }); + + Ok((existing, open_task)) } else { cx.update(move |cx| { Workspace::new_local( @@ -6052,7 +6187,7 @@ fn resize_edge( } } -fn join_pane_into_active(active_pane: &View, pane: &View, cx: &mut WindowContext<'_>) { +fn join_pane_into_active(active_pane: &View, pane: &View, cx: &mut WindowContext) { if pane == active_pane { return; } else if pane.read(cx).items_len() == 0 { @@ -6066,7 +6201,7 @@ fn join_pane_into_active(active_pane: &View, pane: &View, cx: &mut W } } -fn move_all_items(from_pane: &View, to_pane: &View, cx: &mut WindowContext<'_>) { +fn move_all_items(from_pane: &View, to_pane: &View, cx: &mut WindowContext) { let destination_is_different = from_pane != to_pane; let mut moved_items = 0; for (item_ix, item_handle) in from_pane @@ -6098,7 +6233,7 @@ pub fn move_item( destination: &View, item_id_to_move: EntityId, destination_index: usize, - cx: &mut WindowContext<'_>, + cx: &mut WindowContext, ) { let Some((item_ix, item_handle)) = source .read(cx) @@ -6125,6 +6260,34 @@ pub fn move_item( }); } +pub fn move_active_item( + source: &View, + destination: &View, + focus_destination: bool, + close_if_empty: bool, + cx: &mut WindowContext, +) { + if source == destination { + return; + } + let Some(active_item) = source.read(cx).active_item() else { + return; + }; + source.update(cx, |source_pane, cx| { + let item_id = active_item.item_id(); + source_pane.remove_item(item_id, false, close_if_empty, cx); + destination.update(cx, |target_pane, cx| { + target_pane.add_item( + active_item, + focus_destination, + focus_destination, + Some(target_pane.items_len()), + cx, + ); + }); + }); +} + #[cfg(test)] mod tests { use std::{cell::RefCell, rc::Rc}; @@ -6374,24 +6537,26 @@ mod tests { let item1 = cx.new_view(|cx| { TestItem::new(cx) .with_dirty(true) - .with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + .with_project_items(&[dirty_project_item(1, "1.txt", cx)]) }); let item2 = cx.new_view(|cx| { TestItem::new(cx) .with_dirty(true) .with_conflict(true) - .with_project_items(&[TestProjectItem::new(2, "2.txt", cx)]) + .with_project_items(&[dirty_project_item(2, "2.txt", cx)]) }); let item3 = cx.new_view(|cx| { TestItem::new(cx) .with_dirty(true) .with_conflict(true) - .with_project_items(&[TestProjectItem::new(3, "3.txt", cx)]) + .with_project_items(&[dirty_project_item(3, "3.txt", cx)]) }); let item4 = cx.new_view(|cx| { - TestItem::new(cx) - .with_dirty(true) - .with_project_items(&[TestProjectItem::new_untitled(cx)]) + TestItem::new(cx).with_dirty(true).with_project_items(&[{ + let project_item = TestProjectItem::new_untitled(cx); + project_item.update(cx, |project_item, _| project_item.is_dirty = true); + project_item + }]) }); let pane = workspace.update(cx, |workspace, cx| { workspace.add_item_to_active_pane(Box::new(item1.clone()), None, true, cx); @@ -6483,7 +6648,7 @@ mod tests { cx.new_view(|cx| { TestItem::new(cx) .with_dirty(true) - .with_project_items(&[TestProjectItem::new( + .with_project_items(&[dirty_project_item( project_entry_id, &format!("{project_entry_id}.txt"), cx, @@ -6665,6 +6830,9 @@ mod tests { }) }); item.is_dirty = true; + for project_item in &mut item.project_items { + project_item.update(cx, |project_item, _| project_item.is_dirty = true); + } }); pane.update(cx, |pane, cx| { @@ -7145,14 +7313,10 @@ mod tests { let (panel_1, panel_2) = workspace.update(cx, |workspace, cx| { let panel_1 = cx.new_view(|cx| TestPanel::new(DockPosition::Left, cx)); workspace.add_panel(panel_1.clone(), cx); - workspace - .left_dock() - .update(cx, |left_dock, cx| left_dock.set_open(true, cx)); + workspace.toggle_dock(DockPosition::Left, cx); let panel_2 = cx.new_view(|cx| TestPanel::new(DockPosition::Right, cx)); workspace.add_panel(panel_2.clone(), cx); - workspace - .right_dock() - .update(cx, |right_dock, cx| right_dock.set_open(true, cx)); + workspace.toggle_dock(DockPosition::Right, cx); let left_dock = workspace.left_dock(); assert_eq!( @@ -7363,8 +7527,436 @@ mod tests { }); } + #[gpui::test] + async fn test_no_save_prompt_when_multi_buffer_dirty_items_closed(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx)); + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + let dirty_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("1.txt") + .with_project_items(&[dirty_project_item(1, "1.txt", cx)]) + }); + let dirty_regular_buffer_2 = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("2.txt") + .with_project_items(&[dirty_project_item(2, "2.txt", cx)]) + }); + let dirty_multi_buffer_with_both = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_singleton(false) + .with_label("Fake Project Search") + .with_project_items(&[ + dirty_regular_buffer.read(cx).project_items[0].clone(), + dirty_regular_buffer_2.read(cx).project_items[0].clone(), + ]) + }); + let multi_buffer_with_both_files_id = dirty_multi_buffer_with_both.item_id(); + workspace.update(cx, |workspace, cx| { + workspace.add_item( + pane.clone(), + Box::new(dirty_regular_buffer.clone()), + None, + false, + false, + cx, + ); + workspace.add_item( + pane.clone(), + Box::new(dirty_regular_buffer_2.clone()), + None, + false, + false, + cx, + ); + workspace.add_item( + pane.clone(), + Box::new(dirty_multi_buffer_with_both.clone()), + None, + false, + false, + cx, + ); + }); + + pane.update(cx, |pane, cx| { + pane.activate_item(2, true, true, cx); + assert_eq!( + pane.active_item().unwrap().item_id(), + multi_buffer_with_both_files_id, + "Should select the multi buffer in the pane" + ); + }); + let close_all_but_multi_buffer_task = pane + .update(cx, |pane, cx| { + pane.close_inactive_items( + &CloseInactiveItems { + save_intent: Some(SaveIntent::Save), + close_pinned: true, + }, + cx, + ) + }) + .expect("should have inactive files to close"); + cx.background_executor.run_until_parked(); + assert!( + !cx.has_pending_prompt(), + "Multi buffer still has the unsaved buffer inside, so no save prompt should be shown" + ); + close_all_but_multi_buffer_task + .await + .expect("Closing all buffers but the multi buffer failed"); + pane.update(cx, |pane, cx| { + assert_eq!(dirty_regular_buffer.read(cx).save_count, 0); + assert_eq!(dirty_multi_buffer_with_both.read(cx).save_count, 0); + assert_eq!(dirty_regular_buffer_2.read(cx).save_count, 0); + assert_eq!(pane.items_len(), 1); + assert_eq!( + pane.active_item().unwrap().item_id(), + multi_buffer_with_both_files_id, + "Should have only the multi buffer left in the pane" + ); + assert!( + dirty_multi_buffer_with_both.read(cx).is_dirty, + "The multi buffer containing the unsaved buffer should still be dirty" + ); + }); + + let close_multi_buffer_task = pane + .update(cx, |pane, cx| { + pane.close_active_item( + &CloseActiveItem { + save_intent: Some(SaveIntent::Close), + }, + cx, + ) + }) + .expect("should have the multi buffer to close"); + cx.background_executor.run_until_parked(); + assert!( + cx.has_pending_prompt(), + "Dirty multi buffer should prompt a save dialog" + ); + cx.simulate_prompt_answer(0); + cx.background_executor.run_until_parked(); + close_multi_buffer_task + .await + .expect("Closing the multi buffer failed"); + pane.update(cx, |pane, cx| { + assert_eq!( + dirty_multi_buffer_with_both.read(cx).save_count, + 1, + "Multi buffer item should get be saved" + ); + // Test impl does not save inner items, so we do not assert them + assert_eq!( + pane.items_len(), + 0, + "No more items should be left in the pane" + ); + assert!(pane.active_item().is_none()); + }); + } + + #[gpui::test] + async fn test_no_save_prompt_when_dirty_singleton_buffer_closed_with_a_multi_buffer_containing_it_present_in_the_pane( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx)); + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + let dirty_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("1.txt") + .with_project_items(&[dirty_project_item(1, "1.txt", cx)]) + }); + let dirty_regular_buffer_2 = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("2.txt") + .with_project_items(&[dirty_project_item(2, "2.txt", cx)]) + }); + let clear_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_label("3.txt") + .with_project_items(&[TestProjectItem::new(3, "3.txt", cx)]) + }); + + let dirty_multi_buffer_with_both = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_singleton(false) + .with_label("Fake Project Search") + .with_project_items(&[ + dirty_regular_buffer.read(cx).project_items[0].clone(), + dirty_regular_buffer_2.read(cx).project_items[0].clone(), + clear_regular_buffer.read(cx).project_items[0].clone(), + ]) + }); + workspace.update(cx, |workspace, cx| { + workspace.add_item( + pane.clone(), + Box::new(dirty_regular_buffer.clone()), + None, + false, + false, + cx, + ); + workspace.add_item( + pane.clone(), + Box::new(dirty_multi_buffer_with_both.clone()), + None, + false, + false, + cx, + ); + }); + + pane.update(cx, |pane, cx| { + pane.activate_item(0, true, true, cx); + assert_eq!( + pane.active_item().unwrap().item_id(), + dirty_regular_buffer.item_id(), + "Should select the dirty singleton buffer in the pane" + ); + }); + let close_singleton_buffer_task = pane + .update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .expect("should have active singleton buffer to close"); + cx.background_executor.run_until_parked(); + assert!( + !cx.has_pending_prompt(), + "Multi buffer is still in the pane and has the unsaved buffer inside, so no save prompt should be shown" + ); + + close_singleton_buffer_task + .await + .expect("Should not fail closing the singleton buffer"); + pane.update(cx, |pane, cx| { + assert_eq!(dirty_regular_buffer.read(cx).save_count, 0); + assert_eq!( + dirty_multi_buffer_with_both.read(cx).save_count, + 0, + "Multi buffer itself should not be saved" + ); + assert_eq!(dirty_regular_buffer_2.read(cx).save_count, 0); + assert_eq!( + pane.items_len(), + 1, + "A dirty multi buffer should be present in the pane" + ); + assert_eq!( + pane.active_item().unwrap().item_id(), + dirty_multi_buffer_with_both.item_id(), + "Should activate the only remaining item in the pane" + ); + }); + } + + #[gpui::test] + async fn test_save_prompt_when_dirty_multi_buffer_closed_with_some_of_its_dirty_items_not_present_in_the_pane( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx)); + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + let dirty_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("1.txt") + .with_project_items(&[dirty_project_item(1, "1.txt", cx)]) + }); + let dirty_regular_buffer_2 = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("2.txt") + .with_project_items(&[dirty_project_item(2, "2.txt", cx)]) + }); + let clear_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_label("3.txt") + .with_project_items(&[TestProjectItem::new(3, "3.txt", cx)]) + }); + + let dirty_multi_buffer_with_both = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_singleton(false) + .with_label("Fake Project Search") + .with_project_items(&[ + dirty_regular_buffer.read(cx).project_items[0].clone(), + dirty_regular_buffer_2.read(cx).project_items[0].clone(), + clear_regular_buffer.read(cx).project_items[0].clone(), + ]) + }); + let multi_buffer_with_both_files_id = dirty_multi_buffer_with_both.item_id(); + workspace.update(cx, |workspace, cx| { + workspace.add_item( + pane.clone(), + Box::new(dirty_regular_buffer.clone()), + None, + false, + false, + cx, + ); + workspace.add_item( + pane.clone(), + Box::new(dirty_multi_buffer_with_both.clone()), + None, + false, + false, + cx, + ); + }); + + pane.update(cx, |pane, cx| { + pane.activate_item(1, true, true, cx); + assert_eq!( + pane.active_item().unwrap().item_id(), + multi_buffer_with_both_files_id, + "Should select the multi buffer in the pane" + ); + }); + let _close_multi_buffer_task = pane + .update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .expect("should have active multi buffer to close"); + cx.background_executor.run_until_parked(); + assert!( + cx.has_pending_prompt(), + "With one dirty item from the multi buffer not being in the pane, a save prompt should be shown" + ); + } + + #[gpui::test] + async fn test_no_save_prompt_when_dirty_multi_buffer_closed_with_all_of_its_dirty_items_present_in_the_pane( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx)); + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + let dirty_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("1.txt") + .with_project_items(&[dirty_project_item(1, "1.txt", cx)]) + }); + let dirty_regular_buffer_2 = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_label("2.txt") + .with_project_items(&[dirty_project_item(2, "2.txt", cx)]) + }); + let clear_regular_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_label("3.txt") + .with_project_items(&[TestProjectItem::new(3, "3.txt", cx)]) + }); + + let dirty_multi_buffer = cx.new_view(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_singleton(false) + .with_label("Fake Project Search") + .with_project_items(&[ + dirty_regular_buffer.read(cx).project_items[0].clone(), + dirty_regular_buffer_2.read(cx).project_items[0].clone(), + clear_regular_buffer.read(cx).project_items[0].clone(), + ]) + }); + workspace.update(cx, |workspace, cx| { + workspace.add_item( + pane.clone(), + Box::new(dirty_regular_buffer.clone()), + None, + false, + false, + cx, + ); + workspace.add_item( + pane.clone(), + Box::new(dirty_regular_buffer_2.clone()), + None, + false, + false, + cx, + ); + workspace.add_item( + pane.clone(), + Box::new(dirty_multi_buffer.clone()), + None, + false, + false, + cx, + ); + }); + + pane.update(cx, |pane, cx| { + pane.activate_item(2, true, true, cx); + assert_eq!( + pane.active_item().unwrap().item_id(), + dirty_multi_buffer.item_id(), + "Should select the multi buffer in the pane" + ); + }); + let close_multi_buffer_task = pane + .update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) + }) + .expect("should have active multi buffer to close"); + cx.background_executor.run_until_parked(); + assert!( + !cx.has_pending_prompt(), + "All dirty items from the multi buffer are in the pane still, no save prompts should be shown" + ); + close_multi_buffer_task + .await + .expect("Closing multi buffer failed"); + pane.update(cx, |pane, cx| { + assert_eq!(dirty_regular_buffer.read(cx).save_count, 0); + assert_eq!(dirty_multi_buffer.read(cx).save_count, 0); + assert_eq!(dirty_regular_buffer_2.read(cx).save_count, 0); + assert_eq!( + pane.items() + .map(|item| item.item_id()) + .sorted() + .collect::>(), + vec![ + dirty_regular_buffer.item_id(), + dirty_regular_buffer_2.item_id(), + ], + "Should have no multi buffer left in the pane" + ); + assert!(dirty_regular_buffer.read(cx).is_dirty); + assert!(dirty_regular_buffer_2.read(cx).is_dirty); + }); + } + mod register_project_item_tests { - use ui::Context as _; + use gpui::Context as _; use super::*; @@ -7375,7 +7967,7 @@ mod tests { // Model struct TestPngItem {} - impl project::Item for TestPngItem { + impl project::ProjectItem for TestPngItem { fn try_open( _project: &Model, path: &ProjectPath, @@ -7395,6 +7987,10 @@ mod tests { fn project_path(&self, _: &AppContext) -> Option { None } + + fn is_dirty(&self) -> bool { + false + } } impl Item for TestPngItemView { @@ -7437,7 +8033,7 @@ mod tests { // Model struct TestIpynbItem {} - impl project::Item for TestIpynbItem { + impl project::ProjectItem for TestIpynbItem { fn try_open( _project: &Model, path: &ProjectPath, @@ -7457,6 +8053,10 @@ mod tests { fn project_path(&self, _: &AppContext) -> Option { None } + + fn is_dirty(&self) -> bool { + false + } } impl Item for TestIpynbItemView { @@ -7654,4 +8254,12 @@ mod tests { Project::init_settings(cx); }); } + + fn dirty_project_item(id: u64, path: &str, cx: &mut AppContext) -> Model { + let item = TestProjectItem::new(id, path, cx); + item.update(cx, |item, _| { + item.is_dirty = true; + }); + item + } } diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 0d872425c1..cd1aab7f24 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -1,3 +1,5 @@ +use std::num::NonZeroUsize; + use anyhow::Result; use collections::HashMap; use gpui::AppContext; @@ -19,6 +21,8 @@ pub struct WorkspaceSettings { pub when_closing_with_no_tabs: CloseWindowWhenNoItems, pub use_system_path_prompts: bool, pub command_aliases: HashMap, + pub show_user_picture: bool, + pub max_tabs: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema)] @@ -83,15 +87,15 @@ pub enum RestoreOnStartupBehavior { pub struct WorkspaceSettingsContent { /// Active pane styling settings. pub active_pane_modifiers: Option, - // Direction to split horizontally. - // - // Default: "up" + /// Direction to split horizontally. + /// + /// Default: "up" pub pane_split_direction_horizontal: Option, - // Direction to split vertically. - // - // Default: "left" + /// Direction to split vertically. + /// + /// Default: "left" pub pane_split_direction_vertical: Option, - // Centered layout related settings. + /// Centered layout related settings. pub centered_layout: Option, /// Whether or not to prompt the user to confirm before closing the application. /// @@ -128,6 +132,15 @@ pub struct WorkspaceSettingsContent { /// /// Default: true pub command_aliases: Option>, + /// Whether to show user avatar in the title bar. + /// + /// Default: true + pub show_user_picture: Option, + // Maximum open tabs in a pane. Will not close an unsaved + // tab. Set to `None` for unlimited tabs. + // + // Default: none + pub max_tabs: Option, } #[derive(Deserialize)] diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index da3676f15c..adbbf66d23 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -37,7 +37,7 @@ log.workspace = true parking_lot.workspace = true paths.workspace = true postage.workspace = true -rpc.workspace = true +rpc = { workspace = true, features = ["gpui"] } schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 28b23d2fa7..73e93fac13 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -7,7 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context as _, Result}; use clock::ReplicaId; use collections::{HashMap, HashSet, VecDeque}; -use fs::{copy_recursive, Fs, PathEvent, RemoveOptions, Watcher}; +use fs::{copy_recursive, Fs, MTime, PathEvent, RemoveOptions, Watcher}; use futures::{ channel::{ mpsc::{self, UnboundedSender}, @@ -21,7 +21,6 @@ use fuzzy::CharBag; use git::GitHostingProviderRegistry; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, - status::GitStatus, COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, }; use gpui::{ @@ -29,6 +28,8 @@ use gpui::{ Task, }; use ignore::IgnoreStack; +use language::DiskState; + use parking_lot::Mutex; use paths::local_settings_folder_relative_path; use postage::{ @@ -52,19 +53,24 @@ use std::{ ffi::OsStr, fmt, future::Future, - mem, - ops::{AddAssign, Deref, DerefMut, Sub}, + mem::{self}, + ops::{Deref, DerefMut}, path::{Path, PathBuf}, pin::Pin, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, Arc, }, - time::{Duration, Instant, SystemTime}, + time::{Duration, Instant}, +}; +use sum_tree::{ + Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit, }; -use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; use text::{LineEnding, Rope}; -use util::{paths::home_dir, ResultExt}; +use util::{ + paths::{home_dir, PathMatcher, SanitizedPath}, + ResultExt, +}; pub use worktree_settings::WorktreeSettings; #[cfg(feature = "test-support")] @@ -100,7 +106,6 @@ pub enum CreatedEntry { pub struct LoadedFile { pub file: Arc, pub text: String, - pub diff_base: Option, } pub struct LoadedBinaryFile { @@ -133,6 +138,7 @@ pub struct RemoteWorktree { background_snapshot: Arc)>>, project_id: u64, client: AnyProtoClient, + file_scan_inclusions: PathMatcher, updates_tx: Option>, update_observer: Option>, snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>, @@ -144,12 +150,13 @@ pub struct RemoteWorktree { #[derive(Clone)] pub struct Snapshot { id: WorktreeId, - abs_path: Arc, + abs_path: SanitizedPath, root_name: String, root_char_bag: CharBag, entries_by_path: SumTree, entries_by_id: SumTree, - repository_entries: TreeMap, + always_included_entries: Vec>, + repositories: SumTree, /// A number that increases every time the worktree begins scanning /// a set of paths from the filesystem. This scanning could be caused @@ -166,8 +173,133 @@ pub struct Snapshot { #[derive(Clone, Debug, PartialEq, Eq)] pub struct RepositoryEntry { - pub(crate) work_directory: WorkDirectoryEntry, + /// The git status entries for this repository. + /// Note that the paths on this repository are relative to the git work directory. + /// If the .git folder is external to Zed, these paths will be relative to that folder, + /// and this data structure might reference files external to this worktree. + /// + /// For example: + /// + /// my_root_folder/ <-- repository root + /// .git + /// my_sub_folder_1/ + /// project_root/ <-- Project root, Zed opened here + /// changed_file_1 <-- File with changes, in worktree + /// my_sub_folder_2/ + /// changed_file_2 <-- File with changes, out of worktree + /// ... + /// + /// With this setup, this field would contain 2 entries, like so: + /// - my_sub_folder_1/project_root/changed_file_1 + /// - my_sub_folder_2/changed_file_2 + pub(crate) statuses_by_path: SumTree, + pub(crate) work_directory_id: ProjectEntryId, + pub(crate) work_directory: WorkDirectory, pub(crate) branch: Option>, +} + +impl Deref for RepositoryEntry { + type Target = WorkDirectory; + + fn deref(&self) -> &Self::Target { + &self.work_directory + } +} + +impl AsRef for RepositoryEntry { + fn as_ref(&self) -> &Path { + &self.path + } +} + +impl RepositoryEntry { + pub fn branch(&self) -> Option> { + self.branch.clone() + } + + pub fn work_directory_id(&self) -> ProjectEntryId { + self.work_directory_id + } + + pub fn status(&self) -> impl Iterator + '_ { + self.statuses_by_path.iter().cloned() + } + + pub fn initial_update(&self) -> proto::RepositoryEntry { + proto::RepositoryEntry { + work_directory_id: self.work_directory_id.to_proto(), + branch: self.branch.as_ref().map(|branch| branch.to_string()), + updated_statuses: self + .statuses_by_path + .iter() + .map(|entry| proto::StatusEntry { + repo_path: entry.repo_path.to_string_lossy().to_string(), + status: git_status_to_proto(entry.status), + }) + .collect(), + removed_statuses: Default::default(), + } + } + + pub fn build_update(&self, old: &Self) -> proto::RepositoryEntry { + let mut updated_statuses: Vec = Vec::new(); + let mut removed_statuses: Vec = Vec::new(); + + let mut new_statuses = self.statuses_by_path.iter().peekable(); + let mut old_statuses = old.statuses_by_path.iter().peekable(); + + let mut current_new_entry = new_statuses.next(); + let mut current_old_entry = old_statuses.next(); + loop { + match (current_new_entry, current_old_entry) { + (Some(new_entry), Some(old_entry)) => { + match new_entry.repo_path.cmp(&old_entry.repo_path) { + Ordering::Less => { + updated_statuses.push(new_entry.to_proto()); + current_new_entry = new_statuses.next(); + } + Ordering::Equal => { + if new_entry.status != old_entry.status { + updated_statuses.push(new_entry.to_proto()); + } + current_old_entry = old_statuses.next(); + current_new_entry = new_statuses.next(); + } + Ordering::Greater => { + removed_statuses.push(old_entry.repo_path.to_proto()); + current_old_entry = old_statuses.next(); + } + } + } + (None, Some(old_entry)) => { + removed_statuses.push(old_entry.repo_path.to_proto()); + current_old_entry = old_statuses.next(); + } + (Some(new_entry), None) => { + updated_statuses.push(new_entry.to_proto()); + current_new_entry = new_statuses.next(); + } + (None, None) => break, + } + } + + proto::RepositoryEntry { + work_directory_id: self.work_directory_id.to_proto(), + branch: self.branch.as_ref().map(|branch| branch.to_string()), + updated_statuses, + removed_statuses, + } + } +} + +/// This path corresponds to the 'content path' of a repository in relation +/// to Zed's project root. +/// In the majority of the cases, this is the folder that contains the .git folder. +/// But if a sub-folder of a git repository is opened, this corresponds to the +/// project root and the .git folder is located in a parent directory. +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct WorkDirectory { + path: Arc, /// If location_in_repo is set, it means the .git folder is external /// and in a parent folder of the project root. @@ -190,23 +322,19 @@ pub struct RepositoryEntry { pub(crate) location_in_repo: Option>, } -impl RepositoryEntry { - pub fn branch(&self) -> Option> { - self.branch.clone() +impl WorkDirectory { + pub fn path_key(&self) -> PathKey { + PathKey(self.path.clone()) } - pub fn work_directory_id(&self) -> ProjectEntryId { - *self.work_directory - } - - pub fn work_directory(&self, snapshot: &Snapshot) -> Option { - snapshot - .entry_for_id(self.work_directory_id()) - .map(|entry| RepositoryWorkDirectory(entry.path.clone())) - } - - pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry { - self.into() + /// Returns true if the given path is a child of the work directory. + /// + /// Note that the path may not be a member of this repository, if there + /// is a repository in a directory between these two paths + /// external .git folder in a parent folder of the project root. + pub fn directory_contains(&self, path: impl AsRef) -> bool { + let path = path.as_ref(); + path.starts_with(&self.path) } /// relativize returns the given project path relative to the root folder of the @@ -214,53 +342,50 @@ impl RepositoryEntry { /// If the root of the repository (and its .git folder) are located in a parent folder /// of the project root folder, then the returned RepoPath is relative to the root /// of the repository and not a valid path inside the project. - pub fn relativize(&self, worktree: &Snapshot, path: &Path) -> Result { - let relativize_path = |path: &Path| { - let entry = worktree - .entry_for_id(self.work_directory.0) - .ok_or_else(|| anyhow!("entry not found"))?; - + pub fn relativize(&self, path: &Path) -> Result { + if let Some(location_in_repo) = &self.location_in_repo { + Ok(location_in_repo.join(path).into()) + } else { let relativized_path = path - .strip_prefix(&entry.path) - .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, entry.path))?; + .strip_prefix(&self.path) + .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?; Ok(relativized_path.into()) - }; + } + } - if let Some(location_in_repo) = &self.location_in_repo { - relativize_path(&location_in_repo.join(path)) + /// This is the opposite operation to `relativize` above + pub fn unrelativize(&self, path: &RepoPath) -> Option> { + if let Some(location) = &self.location_in_repo { + // If we fail to strip the prefix, that means this status entry is + // external to this worktree, and we definitely won't have an entry_id + path.strip_prefix(location).ok().map(Into::into) } else { - relativize_path(path) + Some(self.path.join(path).into()) } } } -impl From<&RepositoryEntry> for proto::RepositoryEntry { - fn from(value: &RepositoryEntry) -> Self { - proto::RepositoryEntry { - work_directory_id: value.work_directory.to_proto(), - branch: value.branch.as_ref().map(|str| str.to_string()), - } - } -} - -/// This path corresponds to the 'content path' of a repository in relation -/// to Zed's project root. -/// In the majority of the cases, this is the folder that contains the .git folder. -/// But if a sub-folder of a git repository is opened, this corresponds to the -/// project root and the .git folder is located in a parent directory. -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub struct RepositoryWorkDirectory(pub(crate) Arc); - -impl Default for RepositoryWorkDirectory { +impl Default for WorkDirectory { fn default() -> Self { - RepositoryWorkDirectory(Arc::from(Path::new(""))) + Self { + path: Arc::from(Path::new("")), + location_in_repo: None, + } } } -impl AsRef for RepositoryWorkDirectory { +impl Deref for WorkDirectory { + type Target = Path; + + fn deref(&self) -> &Self::Target { + self.as_ref() + } +} + +impl AsRef for WorkDirectory { fn as_ref(&self) -> &Path { - self.0.as_ref() + self.path.as_ref() } } @@ -312,7 +437,9 @@ struct BackgroundScannerState { #[derive(Debug, Clone)] pub struct LocalRepositoryEntry { + pub(crate) work_directory: WorkDirectory, pub(crate) git_dir_scan_id: usize, + pub(crate) status_scan_id: usize, pub(crate) repo_ptr: Arc, /// Absolute path to the actual .git folder. /// Note: if .git is a file, this points to the folder indicated by the .git file @@ -321,12 +448,39 @@ pub struct LocalRepositoryEntry { pub(crate) dot_git_worktree_abs_path: Option>, } +impl sum_tree::Item for LocalRepositoryEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.work_directory.path.clone(), + item_summary: Unit, + } + } +} + +impl KeyedItem for LocalRepositoryEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.work_directory.path.clone()) + } +} + impl LocalRepositoryEntry { pub fn repo(&self) -> &Arc { &self.repo_ptr } } +impl Deref for LocalRepositoryEntry { + type Target = WorkDirectory; + + fn deref(&self) -> &Self::Target { + &self.work_directory + } +} + impl Deref for LocalSnapshot { type Target = Snapshot; @@ -350,7 +504,7 @@ enum ScanState { scanning: bool, }, RootUpdated { - new_path: Option>, + new_path: Option, }, } @@ -404,24 +558,11 @@ impl Worktree { abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()), - abs_path, + abs_path.clone(), ), root_file_handle, }; - if let Some(metadata) = metadata { - snapshot.insert_entry( - Entry::new( - Arc::from(Path::new("")), - &metadata, - &next_entry_id, - snapshot.root_char_bag, - None, - ), - fs.as_ref(), - ); - } - let worktree_id = snapshot.id(); let settings_location = Some(SettingsLocation { worktree_id, @@ -432,7 +573,7 @@ impl Worktree { cx.observe_global::(move |this, cx| { if let Self::Local(this) = this { let settings = WorktreeSettings::get(settings_location, cx).clone(); - if settings != this.settings { + if this.settings != settings { this.settings = settings; this.restart_background_scanners(cx); } @@ -440,10 +581,26 @@ impl Worktree { }) .detach(); + let share_private_files = false; + if let Some(metadata) = metadata { + let mut entry = Entry::new( + Arc::from(Path::new("")), + &metadata, + &next_entry_id, + snapshot.root_char_bag, + None, + ); + if !metadata.is_dir { + entry.is_private = !share_private_files + && settings.is_path_private(abs_path.file_name().unwrap().as_ref()); + } + snapshot.insert_entry(entry, fs.as_ref()); + } + let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); let mut worktree = LocalWorktree { - share_private_files: false, + share_private_files, next_entry_id, snapshot, is_scanning: watch::channel_with(true), @@ -479,11 +636,19 @@ impl Worktree { let (background_updates_tx, mut background_updates_rx) = mpsc::unbounded(); let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel(); + let worktree_id = snapshot.id(); + let settings_location = Some(SettingsLocation { + worktree_id, + path: Path::new(EMPTY_PATH), + }); + + let settings = WorktreeSettings::get(settings_location, cx).clone(); let worktree = RemoteWorktree { client, project_id, replica_id, snapshot, + file_scan_inclusions: settings.file_scan_inclusions.clone(), background_snapshot: background_snapshot.clone(), updates_tx: Some(background_updates_tx), update_observer: None, @@ -499,7 +664,10 @@ impl Worktree { while let Some(update) = background_updates_rx.next().await { { let mut lock = background_snapshot.lock(); - if let Err(error) = lock.0.apply_remote_update(update.clone()) { + if let Err(error) = lock + .0 + .apply_remote_update(update.clone(), &settings.file_scan_inclusions) + { log::error!("error applying worktree update: {}", error); } lock.1.push(update); @@ -637,8 +805,8 @@ impl Worktree { pub fn abs_path(&self) -> Arc { match self { - Worktree::Local(worktree) => worktree.abs_path.clone(), - Worktree::Remote(worktree) => worktree.abs_path.clone(), + Worktree::Local(worktree) => worktree.abs_path.clone().into(), + Worktree::Remote(worktree) => worktree.abs_path.clone().into(), } } @@ -690,6 +858,30 @@ impl Worktree { } } + pub fn load_staged_file(&self, path: &Path, cx: &AppContext) -> Task>> { + match self { + Worktree::Local(this) => { + let path = Arc::from(path); + let snapshot = this.snapshot(); + cx.background_executor().spawn(async move { + if let Some(repo) = snapshot.repository_for_path(&path) { + if let Some(repo_path) = repo.relativize(&path).log_err() { + if let Some(git_repo) = + snapshot.git_repositories.get(&repo.work_directory_id) + { + return Ok(git_repo.repo_ptr.load_index_text(&repo_path)); + } + } + } + Ok(None) + }) + } + Worktree::Remote(_) => { + Task::ready(Err(anyhow!("remote worktrees can't yet load staged files"))) + } + } + } + pub fn load_binary_file( &self, path: &Path, @@ -1009,6 +1201,7 @@ impl LocalWorktree { } pub fn contains_abs_path(&self, path: &Path) -> bool { + let path = SanitizedPath::from(path); path.starts_with(&self.abs_path) } @@ -1021,7 +1214,17 @@ impl LocalWorktree { let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); self.scan_requests_tx = scan_requests_tx; self.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx; + self.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx); + let always_included_entries = mem::take(&mut self.snapshot.always_included_entries); + log::debug!( + "refreshing entries for the following always included paths: {:?}", + always_included_entries + ); + + // Cleans up old always included entries to ensure they get updated properly. Otherwise, + // nested always included entries may not get updated and will result in out-of-date info. + self.refresh_entries_for_paths(always_included_entries); } fn start_background_scanner( @@ -1039,13 +1242,13 @@ impl LocalWorktree { let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_executor().spawn({ let abs_path = &snapshot.abs_path; - let abs_path = if cfg!(target_os = "windows") { - abs_path - .canonicalize() - .unwrap_or_else(|_| abs_path.to_path_buf()) - } else { - abs_path.to_path_buf() - }; + #[cfg(target_os = "windows")] + let abs_path = abs_path + .as_path() + .canonicalize() + .unwrap_or_else(|_| abs_path.as_path().to_path_buf()); + #[cfg(not(target_os = "windows"))] + let abs_path = abs_path.as_path().to_path_buf(); let background = cx.background_executor().clone(); async move { let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await; @@ -1108,6 +1311,7 @@ impl LocalWorktree { this.snapshot.git_repositories = Default::default(); this.snapshot.ignores_by_parent_abs_path = Default::default(); let root_name = new_path + .as_path() .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); this.snapshot.update_abs_path(new_path, root_name); @@ -1160,6 +1364,7 @@ impl LocalWorktree { let mut changes = Vec::new(); let mut old_repos = old_snapshot.git_repositories.iter().peekable(); let mut new_repos = new_snapshot.git_repositories.iter().peekable(); + loop { match (new_repos.peek().map(clone), old_repos.peek().map(clone)) { (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => { @@ -1176,11 +1381,13 @@ impl LocalWorktree { new_repos.next(); } Ordering::Equal => { - if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id { + if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id + || new_repo.status_scan_id != old_repo.status_scan_id + { if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1196,8 +1403,8 @@ impl LocalWorktree { Ordering::Greater => { if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1224,8 +1431,8 @@ impl LocalWorktree { (None, Some((entry_id, _))) => { if let Some(entry) = old_snapshot.entry_for_id(entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1270,12 +1477,12 @@ impl LocalWorktree { } pub fn local_git_repo(&self, path: &Path) -> Option> { - self.repo_for_path(path) - .map(|(_, entry)| entry.repo_ptr.clone()) + self.local_repo_for_path(path) + .map(|local_repo| local_repo.repo_ptr.clone()) } pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> { - self.git_repositories.get(&repo.work_directory.0) + self.git_repositories.get(&repo.work_directory_id) } fn load_binary_file( @@ -1313,9 +1520,10 @@ impl LocalWorktree { entry_id: None, worktree, path, - mtime: Some(metadata.mtime), + disk_state: DiskState::Present { + mtime: metadata.mtime, + }, is_local: true, - is_deleted: false, is_private, }) } @@ -1332,28 +1540,9 @@ impl LocalWorktree { let entry = self.refresh_entry(path.clone(), None, cx); let is_private = self.is_path_private(path.as_ref()); - cx.spawn(|this, mut cx| async move { + cx.spawn(|this, _cx| async move { let abs_path = abs_path?; let text = fs.load(&abs_path).await?; - let mut index_task = None; - let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?; - if let Some(repo) = snapshot.repository_for_path(&path) { - if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() { - if let Some(git_repo) = snapshot.git_repositories.get(&*repo.work_directory) { - let git_repo = git_repo.repo_ptr.clone(); - index_task = Some( - cx.background_executor() - .spawn(async move { git_repo.load_index_text(&repo_path) }), - ); - } - } - } - - let diff_base = if let Some(index_task) = index_task { - index_task.await - } else { - None - }; let worktree = this .upgrade() @@ -1374,19 +1563,16 @@ impl LocalWorktree { entry_id: None, worktree, path, - mtime: Some(metadata.mtime), + disk_state: DiskState::Present { + mtime: metadata.mtime, + }, is_local: true, - is_deleted: false, is_private, }) } }; - Ok(LoadedFile { - file, - text, - diff_base, - }) + Ok(LoadedFile { file, text }) }) } @@ -1512,10 +1698,11 @@ impl LocalWorktree { Ok(Arc::new(File { worktree, path, - mtime: Some(metadata.mtime), + disk_state: DiskState::Present { + mtime: metadata.mtime, + }, entry_id: None, is_local: true, - is_deleted: false, is_private, })) } @@ -1967,7 +2154,7 @@ impl RemoteWorktree { this.update(&mut cx, |worktree, _| { let worktree = worktree.as_remote_mut().unwrap(); let snapshot = &mut worktree.background_snapshot.lock().0; - let entry = snapshot.insert_entry(entry); + let entry = snapshot.insert_entry(entry, &worktree.file_scan_inclusions); worktree.snapshot = snapshot.clone(); entry })? @@ -2045,12 +2232,13 @@ impl Snapshot { pub fn new(id: u64, root_name: String, abs_path: Arc) -> Self { Snapshot { id: WorktreeId::from_usize(id as usize), - abs_path, + abs_path: abs_path.into(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), root_name, + always_included_entries: Default::default(), entries_by_path: Default::default(), entries_by_id: Default::default(), - repository_entries: Default::default(), + repositories: Default::default(), scan_id: 1, completed_scan_id: 0, } @@ -2060,8 +2248,20 @@ impl Snapshot { self.id } + // TODO: + // Consider the following: + // + // ```rust + // let abs_path: Arc = snapshot.abs_path(); // e.g. "C:\Users\user\Desktop\project" + // let some_non_trimmed_path = Path::new("\\\\?\\C:\\Users\\user\\Desktop\\project\\main.rs"); + // // The caller perform some actions here: + // some_non_trimmed_path.strip_prefix(abs_path); // This fails + // some_non_trimmed_path.starts_with(abs_path); // This fails too + // ``` + // + // This is definitely a bug, but it's not clear if we should handle it here or not. pub fn abs_path(&self) -> &Arc { - &self.abs_path + self.abs_path.as_path() } fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree { @@ -2073,9 +2273,9 @@ impl Snapshot { updated_entries.sort_unstable_by_key(|e| e.id); let mut updated_repositories = self - .repository_entries - .values() - .map(proto::RepositoryEntry::from) + .repositories + .iter() + .map(|repository| repository.initial_update()) .collect::>(); updated_repositories.sort_unstable_by_key(|e| e.work_directory_id); @@ -2101,9 +2301,9 @@ impl Snapshot { return Err(anyhow!("invalid path")); } if path.file_name().is_some() { - Ok(self.abs_path.join(path)) + Ok(self.abs_path.as_path().join(path)) } else { - Ok(self.abs_path.to_path_buf()) + Ok(self.abs_path.as_path().to_path_buf()) } } @@ -2111,8 +2311,12 @@ impl Snapshot { self.entries_by_id.get(&entry_id, &()).is_some() } - fn insert_entry(&mut self, entry: proto::Entry) -> Result { - let entry = Entry::try_from((&self.root_char_bag, entry))?; + fn insert_entry( + &mut self, + entry: proto::Entry, + always_included_paths: &PathMatcher, + ) -> Result { + let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?; let old_entry = self.entries_by_id.insert_or_replace( PathEntry { id: entry.id, @@ -2134,7 +2338,7 @@ impl Snapshot { self.entries_by_path = { let mut cursor = self.entries_by_path.cursor::(&()); let mut new_entries_by_path = - cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &()); + cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &()); while let Some(entry) = cursor.item() { if entry.path.starts_with(&removed_entry.path) { self.entries_by_id.remove(&entry.id, &()); @@ -2150,15 +2354,17 @@ impl Snapshot { Some(removed_entry.path) } - #[cfg(any(test, feature = "test-support"))] - pub fn status_for_file(&self, path: impl Into) -> Option { - let path = path.into(); - self.entries_by_path - .get(&PathKey(Arc::from(path)), &()) - .and_then(|entry| entry.git_status) + pub fn status_for_file(&self, path: impl AsRef) -> Option { + let path = path.as_ref(); + self.repository_for_path(path).and_then(|repo| { + let repo_path = repo.relativize(path).unwrap(); + repo.statuses_by_path + .get(&PathKey(repo_path.0), &()) + .map(|entry| entry.status) + }) } - fn update_abs_path(&mut self, abs_path: Arc, root_name: String) { + fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) { self.abs_path = abs_path; if root_name != self.root_name { self.root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect(); @@ -2166,14 +2372,18 @@ impl Snapshot { } } - pub(crate) fn apply_remote_update(&mut self, mut update: proto::UpdateWorktree) -> Result<()> { + pub(crate) fn apply_remote_update( + &mut self, + mut update: proto::UpdateWorktree, + always_included_paths: &PathMatcher, + ) -> Result<()> { log::trace!( "applying remote worktree update. {} entries updated, {} removed", update.updated_entries.len(), update.removed_entries.len() ); self.update_abs_path( - Arc::from(PathBuf::from(update.abs_path).as_path()), + SanitizedPath::from(PathBuf::from(update.abs_path)), update.root_name, ); @@ -2189,7 +2399,7 @@ impl Snapshot { } for entry in update.updated_entries { - let entry = Entry::try_from((&self.root_char_bag, entry))?; + let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?; if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) { entries_by_path_edits.push(Edit::Remove(PathKey(path.clone()))); } @@ -2211,38 +2421,66 @@ impl Snapshot { self.entries_by_id.edit(entries_by_id_edits, &()); update.removed_repositories.sort_unstable(); - self.repository_entries.retain(|_, entry| { + self.repositories.retain(&(), |entry: &RepositoryEntry| { update .removed_repositories - .binary_search(&entry.work_directory.to_proto()) + .binary_search(&entry.work_directory_id.to_proto()) .is_err() }); for repository in update.updated_repositories { - let work_directory_entry: WorkDirectoryEntry = - ProjectEntryId::from_proto(repository.work_directory_id).into(); + let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id); + if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) { + if self + .repositories + .contains(&PathKey(work_dir_entry.path.clone()), &()) + { + let edits = repository + .removed_statuses + .into_iter() + .map(|path| Edit::Remove(PathKey(Path::new(&path).into()))) + .chain(repository.updated_statuses.into_iter().filter_map( + |updated_status| { + Some(Edit::Insert(updated_status.try_into().log_err()?)) + }, + )) + .collect::>(); - if let Some(entry) = self.entry_for_id(*work_directory_entry) { - let work_directory = RepositoryWorkDirectory(entry.path.clone()); - if self.repository_entries.get(&work_directory).is_some() { - self.repository_entries.update(&work_directory, |repo| { - repo.branch = repository.branch.map(Into::into); - }); + self.repositories + .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| { + repo.branch = repository.branch.map(Into::into); + repo.statuses_by_path.edit(edits, &()); + }); } else { - self.repository_entries.insert( - work_directory, + let statuses = SumTree::from_iter( + repository + .updated_statuses + .into_iter() + .filter_map(|updated_status| updated_status.try_into().log_err()), + &(), + ); + + self.repositories.insert_or_replace( RepositoryEntry { - work_directory: work_directory_entry, + work_directory_id, + work_directory: WorkDirectory { + path: work_dir_entry.path.clone(), + // When syncing repository entries from a peer, we don't need + // the location_in_repo field, since git operations don't happen locally + // anyway. + location_in_repo: None, + }, branch: repository.branch.map(Into::into), - // When syncing repository entries from a peer, we don't need - // the location_in_repo field, since git operations don't happen locally - // anyway. - location_in_repo: None, + statuses_by_path: statuses, }, - ) + &(), + ); } } else { - log::error!("no work directory entry for repository {:?}", repository) + log::error!( + "no work directory entry for repository {:?}", + repository.work_directory_id + ) } } @@ -2299,6 +2537,7 @@ impl Snapshot { &(), ); Traversal { + snapshot: self, cursor, include_files, include_dirs, @@ -2313,13 +2552,7 @@ impl Snapshot { include_ignored: bool, path: &Path, ) -> Traversal { - Traversal::new( - &self.entries_by_path, - include_files, - include_dirs, - include_ignored, - path, - ) + Traversal::new(self, include_files, include_dirs, include_ignored, path) } pub fn files(&self, include_ignored: bool, start: usize) -> Traversal { @@ -2334,34 +2567,39 @@ impl Snapshot { self.traverse_from_offset(true, true, include_ignored, start) } - pub fn repositories(&self) -> impl Iterator, &RepositoryEntry)> { - self.repository_entries - .iter() - .map(|(path, entry)| (&path.0, entry)) + #[cfg(any(feature = "test-support", test))] + pub fn git_status(&self, work_dir: &Path) -> Option> { + self.repositories + .get(&PathKey(work_dir.into()), &()) + .map(|repo| repo.status().collect()) + } + + pub fn repositories(&self) -> impl Iterator { + self.repositories.iter() + } + + /// Get the repository whose work directory corresponds to the given path. + pub(crate) fn repository(&self, work_directory: PathKey) -> Option { + self.repositories.get(&work_directory, &()).cloned() } /// Get the repository whose work directory contains the given path. - pub fn repository_for_work_directory(&self, path: &Path) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(path.into())) - .cloned() - } + pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> { + let mut cursor = self.repositories.cursor::(&()); + let mut repository = None; - /// Get the repository whose work directory contains the given path. - pub fn repository_for_path(&self, path: &Path) -> Option { - self.repository_and_work_directory_for_path(path) - .map(|e| e.1) - } + // Git repositories may contain other git repositories. As a side effect of + // lexicographic sorting by path, deeper repositories will be after higher repositories + // So, let's loop through every matching repository until we can't find any more to find + // the deepest repository that could contain this path. + while cursor.seek_forward(&PathTarget::Contains(path), Bias::Left, &()) + && cursor.item().is_some() + { + repository = cursor.item(); + cursor.next(&()); + } - pub fn repository_and_work_directory_for_path( - &self, - path: &Path, - ) -> Option<(RepositoryWorkDirectory, RepositoryEntry)> { - self.repository_entries - .iter() - .filter(|(workdir_path, _)| path.starts_with(workdir_path)) - .last() - .map(|(path, repo)| (path.clone(), repo.clone())) + repository } /// Given an ordered iterator of entries, returns an iterator of those entries, @@ -2370,86 +2608,28 @@ impl Snapshot { &'a self, entries: impl 'a + Iterator, ) -> impl 'a + Iterator)> { - let mut containing_repos = Vec::<(&Arc, &RepositoryEntry)>::new(); + let mut containing_repos = Vec::<&RepositoryEntry>::new(); let mut repositories = self.repositories().peekable(); entries.map(move |entry| { - while let Some((repo_path, _)) = containing_repos.last() { - if entry.path.starts_with(repo_path) { + while let Some(repository) = containing_repos.last() { + if repository.directory_contains(&entry.path) { break; } else { containing_repos.pop(); } } - while let Some((repo_path, _)) = repositories.peek() { - if entry.path.starts_with(repo_path) { + while let Some(repository) = repositories.peek() { + if repository.directory_contains(&entry.path) { containing_repos.push(repositories.next().unwrap()); } else { break; } } - let repo = containing_repos.last().map(|(_, repo)| *repo); + let repo = containing_repos.last().copied(); (entry, repo) }) } - /// Updates the `git_status` of the given entries such that files' - /// statuses bubble up to their ancestor directories. - pub fn propagate_git_statuses(&self, result: &mut [Entry]) { - let mut cursor = self - .entries_by_path - .cursor::<(TraversalProgress, GitStatuses)>(&()); - let mut entry_stack = Vec::<(usize, GitStatuses)>::new(); - - let mut result_ix = 0; - loop { - let next_entry = result.get(result_ix); - let containing_entry = entry_stack.last().map(|(ix, _)| &result[*ix]); - - let entry_to_finish = match (containing_entry, next_entry) { - (Some(_), None) => entry_stack.pop(), - (Some(containing_entry), Some(next_path)) => { - if next_path.path.starts_with(&containing_entry.path) { - None - } else { - entry_stack.pop() - } - } - (None, Some(_)) => None, - (None, None) => break, - }; - - if let Some((entry_ix, prev_statuses)) = entry_to_finish { - cursor.seek_forward( - &TraversalTarget::PathSuccessor(&result[entry_ix].path), - Bias::Left, - &(), - ); - - let statuses = cursor.start().1 - prev_statuses; - - result[entry_ix].git_status = if statuses.conflict > 0 { - Some(GitFileStatus::Conflict) - } else if statuses.modified > 0 { - Some(GitFileStatus::Modified) - } else if statuses.added > 0 { - Some(GitFileStatus::Added) - } else { - None - }; - } else { - if result[result_ix].is_dir() { - cursor.seek_forward( - &TraversalTarget::Path(&result[result_ix].path), - Bias::Left, - &(), - ); - entry_stack.push((result_ix, cursor.start().1)); - } - result_ix += 1; - } - } - } - pub fn paths(&self) -> impl Iterator> { let empty_path = Path::new(""); self.entries_by_path @@ -2460,8 +2640,9 @@ impl Snapshot { pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> { let mut cursor = self.entries_by_path.cursor(&()); - cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &()); + cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &()); let traversal = Traversal { + snapshot: self, cursor, include_files: true, include_dirs: true, @@ -2477,24 +2658,30 @@ impl Snapshot { self.entry_for_path("") } + pub fn root_dir(&self) -> Option> { + self.root_entry() + .filter(|entry| entry.is_dir()) + .map(|_| self.abs_path().clone()) + } + pub fn root_name(&self) -> &str { &self.root_name } pub fn root_git_entry(&self) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(Path::new("").into())) + self.repositories + .get(&PathKey(Path::new("").into()), &()) .map(|entry| entry.to_owned()) } pub fn git_entry(&self, work_directory_path: Arc) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(work_directory_path)) + self.repositories + .get(&PathKey(work_directory_path), &()) .map(|entry| entry.to_owned()) } pub fn git_entries(&self) -> impl Iterator { - self.repository_entries.values() + self.repositories.iter() } pub fn scan_id(&self) -> usize { @@ -2525,10 +2712,10 @@ impl Snapshot { } impl LocalSnapshot { - pub fn repo_for_path(&self, path: &Path) -> Option<(RepositoryEntry, &LocalRepositoryEntry)> { - let (_, repo_entry) = self.repository_and_work_directory_for_path(path)?; - let work_directory_id = repo_entry.work_directory_id(); - Some((repo_entry, self.git_repositories.get(&work_directory_id)?)) + pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> { + let repository_entry = self.repository_for_path(path)?; + let work_directory_id = repository_entry.work_directory_id(); + self.git_repositories.get(&work_directory_id) } fn build_update( @@ -2552,18 +2739,16 @@ impl LocalSnapshot { } for (work_dir_path, change) in repo_changes.iter() { - let new_repo = self - .repository_entries - .get(&RepositoryWorkDirectory(work_dir_path.clone())); + let new_repo = self.repositories.get(&PathKey(work_dir_path.clone()), &()); match (&change.old_repository, new_repo) { (Some(old_repo), Some(new_repo)) => { updated_repositories.push(new_repo.build_update(old_repo)); } (None, Some(new_repo)) => { - updated_repositories.push(proto::RepositoryEntry::from(new_repo)); + updated_repositories.push(new_repo.initial_update()); } (Some(old_repo), None) => { - removed_repositories.push(old_repo.work_directory.0.to_proto()); + removed_repositories.push(old_repo.work_directory_id.to_proto()); } _ => {} } @@ -2593,7 +2778,7 @@ impl LocalSnapshot { fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry { if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) { - let abs_path = self.abs_path.join(&entry.path); + let abs_path = self.abs_path.as_path().join(&entry.path); match smol::block_on(build_gitignore(&abs_path, fs)) { Ok(ignore) => { self.ignores_by_parent_abs_path @@ -2709,7 +2894,7 @@ impl LocalSnapshot { for entry in self.entries_by_path.cursor::<()>(&()) { if entry.is_file() { assert_eq!(files.next().unwrap().inode, entry.inode); - if !entry.is_ignored && !entry.is_external { + if (!entry.is_ignored && !entry.is_external) || entry.is_always_included { assert_eq!(visible_files.next().unwrap().inode, entry.inode); } } @@ -2747,8 +2932,9 @@ impl LocalSnapshot { if git_state { for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() { - let ignore_parent_path = - ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap(); + let ignore_parent_path = ignore_parent_abs_path + .strip_prefix(self.abs_path.as_path()) + .unwrap(); assert!(self.entry_for_path(ignore_parent_path).is_some()); assert!(self .entry_for_path(ignore_parent_path.join(*GITIGNORE)) @@ -2765,15 +2951,15 @@ impl LocalSnapshot { .map(|repo| repo.1.dot_git_dir_abs_path.clone()) .collect::>(); let work_dir_paths = self - .repository_entries + .repositories .iter() - .map(|repo| repo.0.clone().0) + .map(|repo| repo.work_directory.path.clone()) .collect::>(); assert_eq!(dotgit_paths.len(), work_dir_paths.len()); - assert_eq!(self.repository_entries.iter().count(), work_dir_paths.len()); + assert_eq!(self.repositories.iter().count(), work_dir_paths.len()); assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len()); - for (_, entry) in self.repository_entries.iter() { - self.git_repositories.get(&entry.work_directory).unwrap(); + for entry in self.repositories.iter() { + self.git_repositories.get(&entry.work_directory_id).unwrap(); } } @@ -2792,7 +2978,7 @@ impl LocalSnapshot { impl BackgroundScannerState { fn should_scan_directory(&self, entry: &Entry) -> bool { - (!entry.is_external && !entry.is_ignored) + (!entry.is_external && (!entry.is_ignored || entry.is_always_included)) || entry.path.file_name() == Some(*DOT_GIT) || entry.path.file_name() == Some(local_settings_folder_relative_path().as_os_str()) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning @@ -2810,23 +2996,7 @@ impl BackgroundScannerState { let path = entry.path.clone(); let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); - let mut containing_repository = None; - if !ignore_stack.is_abs_path_ignored(&abs_path, true) { - if let Some((repo_entry, repo)) = self.snapshot.repo_for_path(&path) { - if let Some(workdir_path) = repo_entry.work_directory(&self.snapshot) { - if let Ok(repo_path) = repo_entry.relativize(&self.snapshot, &path) { - containing_repository = Some(ScanJobContainingRepository { - work_directory: workdir_path, - statuses: repo - .repo_ptr - .status(&[repo_path.0]) - .log_err() - .unwrap_or_default(), - }); - } - } - } - } + if !ancestor_inodes.contains(&entry.inode) { ancestor_inodes.insert(entry.inode); scan_job_tx @@ -2837,7 +3007,6 @@ impl BackgroundScannerState { scan_queue: scan_job_tx.clone(), ancestor_inodes, is_external: entry.is_external, - containing_repository, }) .unwrap(); } @@ -2902,7 +3071,7 @@ impl BackgroundScannerState { } if let Some(ignore) = ignore { - let abs_parent_path = self.snapshot.abs_path.join(parent_path).into(); + let abs_parent_path = self.snapshot.abs_path.as_path().join(parent_path).into(); self.snapshot .ignores_by_parent_abs_path .insert(abs_parent_path, (ignore, false)); @@ -2944,8 +3113,8 @@ impl BackgroundScannerState { .snapshot .entries_by_path .cursor::(&()); - new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); - removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); + new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &()); + removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &()); new_entries.append(cursor.suffix(&()), &()); } self.snapshot.entries_by_path = new_entries; @@ -2965,7 +3134,11 @@ impl BackgroundScannerState { } if entry.path.file_name() == Some(&GITIGNORE) { - let abs_parent_path = self.snapshot.abs_path.join(entry.path.parent().unwrap()); + let abs_parent_path = self + .snapshot + .abs_path + .as_path() + .join(entry.path.parent().unwrap()); if let Some((_, needs_update)) = self .snapshot .ignores_by_parent_abs_path @@ -2987,9 +3160,9 @@ impl BackgroundScannerState { self.snapshot .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); - self.snapshot - .repository_entries - .retain(|repo_path, _| !repo_path.0.starts_with(path)); + self.snapshot.repositories.retain(&(), |repository| { + !repository.work_directory.starts_with(path) + }); #[cfg(test)] self.snapshot.check_invariants(false); @@ -3000,7 +3173,7 @@ impl BackgroundScannerState { dot_git_path: Arc, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option<(RepositoryWorkDirectory, Arc)> { + ) -> Option { let work_dir_path: Arc = match dot_git_path.parent() { Some(parent_dir) => { // Guard against repositories inside the repository metadata @@ -3036,7 +3209,7 @@ impl BackgroundScannerState { location_in_repo: Option>, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option<(RepositoryWorkDirectory, Arc)> { + ) -> Option { let work_dir_id = self .snapshot .entry_for_path(work_dir_path.clone()) @@ -3046,18 +3219,14 @@ impl BackgroundScannerState { return None; } - let dot_git_abs_path = self.snapshot.abs_path.join(&dot_git_path); + let dot_git_abs_path = self.snapshot.abs_path.as_path().join(&dot_git_path); let t0 = Instant::now(); let repository = fs.open_repo(&dot_git_abs_path)?; - let actual_repo_path = repository.path(); - let actual_dot_git_dir_abs_path: Arc = Arc::from( - actual_repo_path - .ancestors() - .find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))?, - ); + let actual_repo_path = repository.dot_git_dir(); + let actual_dot_git_dir_abs_path = smol::block_on(find_git_dir(&actual_repo_path, fs))?; watcher.add(&actual_repo_path).log_err()?; let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path.as_ref() == dot_git_abs_path @@ -3072,7 +3241,10 @@ impl BackgroundScannerState { }; log::trace!("constructed libgit2 repo in {:?}", t0.elapsed()); - let work_directory = RepositoryWorkDirectory(work_dir_path.clone()); + let work_directory = WorkDirectory { + path: work_dir_path.clone(), + location_in_repo, + }; if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() { git_hosting_providers::register_additional_providers( @@ -3081,28 +3253,58 @@ impl BackgroundScannerState { ); } - self.snapshot.repository_entries.insert( - work_directory.clone(), + self.snapshot.repositories.insert_or_replace( RepositoryEntry { - work_directory: work_dir_id.into(), + work_directory_id: work_dir_id, + work_directory: work_directory.clone(), branch: repository.branch_name().map(Into::into), - location_in_repo, - }, - ); - self.snapshot.git_repositories.insert( - work_dir_id, - LocalRepositoryEntry { - git_dir_scan_id: 0, - repo_ptr: repository.clone(), - dot_git_dir_abs_path: actual_dot_git_dir_abs_path, - dot_git_worktree_abs_path, + statuses_by_path: Default::default(), }, + &(), ); - Some((work_directory, repository)) + let local_repository = LocalRepositoryEntry { + work_directory: work_directory.clone(), + git_dir_scan_id: 0, + status_scan_id: 0, + repo_ptr: repository.clone(), + dot_git_dir_abs_path: actual_dot_git_dir_abs_path, + dot_git_worktree_abs_path, + }; + + self.snapshot + .git_repositories + .insert(work_dir_id, local_repository.clone()); + + Some(local_repository) } } +async fn is_git_dir(path: &Path, fs: &dyn Fs) -> bool { + if path.file_name() == Some(&*DOT_GIT) { + return true; + } + + // If we're in a bare repository, we are not inside a `.git` folder. In a + // bare repository, the root folder contains what would normally be in the + // `.git` folder. + let head_metadata = fs.metadata(&path.join("HEAD")).await; + if !matches!(head_metadata, Ok(Some(_))) { + return false; + } + let config_metadata = fs.metadata(&path.join("config")).await; + matches!(config_metadata, Ok(Some(_))) +} + +async fn find_git_dir(path: &Path, fs: &dyn Fs) -> Option> { + for ancestor in path.ancestors() { + if is_git_dir(ancestor, fs).await { + return Some(Arc::from(ancestor)); + } + } + None +} + async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { let contents = fs.load(abs_path).await?; let parent = abs_path.parent().unwrap_or_else(|| Path::new("/")); @@ -3178,10 +3380,9 @@ impl fmt::Debug for Snapshot { pub struct File { pub worktree: Model, pub path: Arc, - pub mtime: Option, + pub disk_state: DiskState, pub entry_id: Option, pub is_local: bool, - pub is_deleted: bool, pub is_private: bool, } @@ -3194,8 +3395,8 @@ impl language::File for File { } } - fn mtime(&self) -> Option { - self.mtime + fn disk_state(&self) -> DiskState { + self.disk_state } fn path(&self) -> &Arc { @@ -3238,10 +3439,6 @@ impl language::File for File { self.worktree.read(cx).id() } - fn is_deleted(&self) -> bool { - self.is_deleted - } - fn as_any(&self) -> &dyn Any { self } @@ -3251,8 +3448,8 @@ impl language::File for File { worktree_id: self.worktree.read(cx).id().to_proto(), entry_id: self.entry_id.map(|id| id.to_proto()), path: self.path.to_string_lossy().into(), - mtime: self.mtime.map(|time| time.into()), - is_deleted: self.is_deleted, + mtime: self.disk_state.mtime().map(|time| time.into()), + is_deleted: self.disk_state == DiskState::Deleted, } } @@ -3265,9 +3462,9 @@ impl language::LocalFile for File { fn abs_path(&self, cx: &AppContext) -> PathBuf { let worktree_path = &self.worktree.read(cx).as_local().unwrap().abs_path; if self.path.as_ref() == Path::new("") { - worktree_path.to_path_buf() + worktree_path.as_path().to_path_buf() } else { - worktree_path.join(&self.path) + worktree_path.as_path().join(&self.path) } } @@ -3293,10 +3490,13 @@ impl File { Arc::new(Self { worktree, path: entry.path.clone(), - mtime: entry.mtime, + disk_state: if let Some(mtime) = entry.mtime { + DiskState::Present { mtime } + } else { + DiskState::New + }, entry_id: Some(entry.id), is_local: true, - is_deleted: false, is_private: entry.is_private, }) } @@ -3316,13 +3516,22 @@ impl File { return Err(anyhow!("worktree id does not match file")); } + let disk_state = if proto.is_deleted { + DiskState::Deleted + } else { + if let Some(mtime) = proto.mtime.map(&Into::into) { + DiskState::Present { mtime } + } else { + DiskState::New + } + }; + Ok(Self { worktree, path: Path::new(&proto.path).into(), - mtime: proto.mtime.map(|time| time.into()), + disk_state, entry_id: proto.entry_id.map(ProjectEntryId::from_proto), is_local: false, - is_deleted: proto.is_deleted, is_private: false, }) } @@ -3336,21 +3545,20 @@ impl File { } pub fn project_entry_id(&self, _: &AppContext) -> Option { - if self.is_deleted { - None - } else { - self.entry_id + match self.disk_state { + DiskState::Deleted => None, + _ => self.entry_id, } } } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Entry { pub id: ProjectEntryId, pub kind: EntryKind, pub path: Arc, pub inode: u64, - pub mtime: Option, + pub mtime: Option, pub canonical_path: Option>, /// Whether this entry is ignored by Git. @@ -3359,6 +3567,12 @@ pub struct Entry { /// exclude them from searches. pub is_ignored: bool, + /// Whether this entry is always included in searches. + /// + /// This is used for entries that are always included in searches, even + /// if they are ignored by git. Overridden by file_scan_exclusions. + pub is_always_included: bool, + /// Whether this entry's canonical path is outside of the worktree. /// This means the entry is only accessible from the worktree root via a /// symlink. @@ -3367,7 +3581,7 @@ pub struct Entry { /// directory is expanded. External entries are treated like gitignored /// entries in that they are not included in searches. pub is_external: bool, - pub git_status: Option, + /// Whether this entry is considered to be a `.env` file. pub is_private: bool, /// The entry's size on disk, in bytes. @@ -3376,7 +3590,7 @@ pub struct Entry { pub is_fifo: bool, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EntryKind { UnloadedDir, PendingDir, @@ -3400,6 +3614,7 @@ pub enum PathChange { Loaded, } +#[derive(Debug)] pub struct GitRepositoryChange { /// The previous state of the repository, if it already existed. pub old_repository: Option, @@ -3408,6 +3623,230 @@ pub struct GitRepositoryChange { pub type UpdatedEntriesSet = Arc<[(Arc, ProjectEntryId, PathChange)]>; pub type UpdatedGitRepositoriesSet = Arc<[(Arc, GitRepositoryChange)]>; +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct StatusEntry { + pub repo_path: RepoPath, + pub status: GitFileStatus, +} + +impl StatusEntry { + fn to_proto(&self) -> proto::StatusEntry { + proto::StatusEntry { + repo_path: self.repo_path.to_proto(), + status: git_status_to_proto(self.status), + } + } +} + +impl TryFrom for StatusEntry { + type Error = anyhow::Error; + fn try_from(value: proto::StatusEntry) -> Result { + Ok(Self { + repo_path: RepoPath(Path::new(&value.repo_path).into()), + status: git_status_from_proto(Some(value.status)) + .ok_or_else(|| anyhow!("Unable to parse status value {}", value.status))?, + }) + } +} + +#[derive(Clone, Debug)] +struct PathProgress<'a> { + max_path: &'a Path, +} + +#[derive(Clone, Debug)] +pub struct PathSummary { + max_path: Arc, + item_summary: S, +} + +impl Summary for PathSummary { + type Context = S::Context; + + fn zero(cx: &Self::Context) -> Self { + Self { + max_path: Path::new("").into(), + item_summary: S::zero(cx), + } + } + + fn add_summary(&mut self, rhs: &Self, cx: &Self::Context) { + self.max_path = rhs.max_path.clone(); + self.item_summary.add_summary(&rhs.item_summary, cx); + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathProgress<'a> { + fn zero(_: & as Summary>::Context) -> Self { + Self { + max_path: Path::new(""), + } + } + + fn add_summary( + &mut self, + summary: &'a PathSummary, + _: & as Summary>::Context, + ) { + self.max_path = summary.max_path.as_ref() + } +} + +impl sum_tree::Item for RepositoryEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.work_directory.path.clone(), + item_summary: Unit, + } + } +} + +impl sum_tree::KeyedItem for RepositoryEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.work_directory.path.clone()) + } +} + +impl sum_tree::Summary for GitStatuses { + type Context = (); + + fn zero(_: &Self::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, rhs: &Self, _: &Self::Context) { + *self += *rhs; + } +} + +impl sum_tree::Item for StatusEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.repo_path.0.clone(), + item_summary: match self.status { + GitFileStatus::Added => GitStatuses { + added: 1, + ..Default::default() + }, + GitFileStatus::Modified => GitStatuses { + modified: 1, + ..Default::default() + }, + GitFileStatus::Conflict => GitStatuses { + conflict: 1, + ..Default::default() + }, + GitFileStatus::Deleted => Default::default(), + GitFileStatus::Untracked => GitStatuses { + untracked: 1, + ..Default::default() + }, + }, + } + } +} + +impl sum_tree::KeyedItem for StatusEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.repo_path.0.clone()) + } +} + +#[derive(Clone, Debug, Default, Copy, PartialEq, Eq)] +pub struct GitStatuses { + added: usize, + modified: usize, + conflict: usize, + untracked: usize, +} + +impl GitStatuses { + pub fn to_status(&self) -> Option { + if self.conflict > 0 { + Some(GitFileStatus::Conflict) + } else if self.modified > 0 { + Some(GitFileStatus::Modified) + } else if self.added > 0 || self.untracked > 0 { + Some(GitFileStatus::Added) + } else { + None + } + } +} + +impl std::ops::Add for GitStatuses { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + GitStatuses { + added: self.added + rhs.added, + modified: self.modified + rhs.modified, + conflict: self.conflict + rhs.conflict, + untracked: self.untracked + rhs.untracked, + } + } +} + +impl std::ops::AddAssign for GitStatuses { + fn add_assign(&mut self, rhs: Self) { + self.added += rhs.added; + self.modified += rhs.modified; + self.conflict += rhs.conflict; + self.untracked += rhs.untracked; + } +} + +impl std::ops::Sub for GitStatuses { + type Output = GitStatuses; + + fn sub(self, rhs: Self) -> Self::Output { + GitStatuses { + added: self.added - rhs.added, + modified: self.modified - rhs.modified, + conflict: self.conflict - rhs.conflict, + untracked: self.untracked - rhs.untracked, + } + } +} + +impl<'a> sum_tree::Dimension<'a, PathSummary> for GitStatuses { + fn zero(_cx: &()) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &()) { + *self += summary.item_summary + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathKey { + fn zero(_: &S::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &S::Context) { + self.0 = summary.max_path.clone(); + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for TraversalProgress<'a> { + fn zero(_cx: &S::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &S::Context) { + self.max_path = summary.max_path.as_ref(); + } +} + impl Entry { fn new( path: Arc, @@ -3430,9 +3869,9 @@ impl Entry { size: metadata.len, canonical_path, is_ignored: false, + is_always_included: false, is_external: false, is_private: false, - git_status: None, char_bag, is_fifo: metadata.is_fifo, } @@ -3449,10 +3888,6 @@ impl Entry { pub fn is_file(&self) -> bool { self.kind.is_file() } - - pub fn git_status(&self) -> Option { - self.git_status - } } impl EntryKind { @@ -3476,7 +3911,8 @@ impl sum_tree::Item for Entry { type Summary = EntrySummary; fn summary(&self, _cx: &()) -> Self::Summary { - let non_ignored_count = if self.is_ignored || self.is_external { + let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included + { 0 } else { 1 @@ -3491,22 +3927,12 @@ impl sum_tree::Item for Entry { non_ignored_file_count = 0; } - let mut statuses = GitStatuses::default(); - if let Some(status) = self.git_status { - match status { - GitFileStatus::Added => statuses.added = 1, - GitFileStatus::Modified => statuses.modified = 1, - GitFileStatus::Conflict => statuses.conflict = 1, - } - } - EntrySummary { max_path: self.path.clone(), count: 1, non_ignored_count, file_count, non_ignored_file_count, - statuses, } } } @@ -3526,7 +3952,6 @@ pub struct EntrySummary { non_ignored_count: usize, file_count: usize, non_ignored_file_count: usize, - statuses: GitStatuses, } impl Default for EntrySummary { @@ -3537,7 +3962,6 @@ impl Default for EntrySummary { non_ignored_count: 0, file_count: 0, non_ignored_file_count: 0, - statuses: Default::default(), } } } @@ -3555,7 +3979,6 @@ impl sum_tree::Summary for EntrySummary { self.non_ignored_count += rhs.non_ignored_count; self.file_count += rhs.file_count; self.non_ignored_file_count += rhs.non_ignored_file_count; - self.statuses += rhs.statuses; } } @@ -3659,7 +4082,7 @@ impl BackgroundScanner { // the git repository in an ancestor directory. Find any gitignore files // in ancestor directories. let root_abs_path = self.state.lock().snapshot.abs_path.clone(); - for (index, ancestor) in root_abs_path.ancestors().enumerate() { + for (index, ancestor) in root_abs_path.as_path().ancestors().enumerate() { if index != 0 { if let Ok(ignore) = build_gitignore(&ancestor.join(*GITIGNORE), self.fs.as_ref()).await @@ -3691,7 +4114,13 @@ impl BackgroundScanner { self.state.lock().insert_git_repository_for_path( Path::new("").into(), ancestor_dot_git.into(), - Some(root_abs_path.strip_prefix(ancestor).unwrap().into()), + Some( + root_abs_path + .as_path() + .strip_prefix(ancestor) + .unwrap() + .into(), + ), self.fs.as_ref(), self.watcher.as_ref(), ); @@ -3710,12 +4139,12 @@ impl BackgroundScanner { if let Some(mut root_entry) = state.snapshot.root_entry().cloned() { let ignore_stack = state .snapshot - .ignore_stack_for_abs_path(&root_abs_path, true); - if ignore_stack.is_abs_path_ignored(&root_abs_path, true) { + .ignore_stack_for_abs_path(root_abs_path.as_path(), true); + if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()); } - state.enqueue_scan_dir(root_abs_path, &root_entry, &scan_job_tx); + state.enqueue_scan_dir(root_abs_path.into(), &root_entry, &scan_job_tx); } }; @@ -3765,7 +4194,7 @@ impl BackgroundScanner { { let mut state = self.state.lock(); state.path_prefixes_to_scan.insert(path_prefix.clone()); - state.snapshot.abs_path.join(&path_prefix) + state.snapshot.abs_path.as_path().join(&path_prefix) }; if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() { @@ -3792,7 +4221,7 @@ impl BackgroundScanner { self.forcibly_load_paths(&request.relative_paths).await; let root_path = self.state.lock().snapshot.abs_path.clone(); - let root_canonical_path = match self.fs.canonicalize(&root_path).await { + let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await { Ok(path) => path, Err(err) => { log::error!("failed to canonicalize root path: {}", err); @@ -3821,7 +4250,7 @@ impl BackgroundScanner { } self.reload_entries_for_paths( - root_path, + root_path.into(), root_canonical_path, &request.relative_paths, abs_paths, @@ -3834,7 +4263,7 @@ impl BackgroundScanner { async fn process_events(&self, mut abs_paths: Vec) { let root_path = self.state.lock().snapshot.abs_path.clone(); - let root_canonical_path = match self.fs.canonicalize(&root_path).await { + let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await { Ok(path) => path, Err(err) => { let new_path = self @@ -3844,21 +4273,20 @@ impl BackgroundScanner { .root_file_handle .clone() .and_then(|handle| handle.current_path(&self.fs).log_err()) - .filter(|new_path| **new_path != *root_path); + .map(SanitizedPath::from) + .filter(|new_path| *new_path != root_path); if let Some(new_path) = new_path.as_ref() { log::info!( "root renamed from {} to {}", - root_path.display(), - new_path.display() + root_path.as_path().display(), + new_path.as_path().display() ) } else { log::warn!("root path could not be canonicalized: {}", err); } self.status_updates_tx - .unbounded_send(ScanState::RootUpdated { - new_path: new_path.map(|p| p.into()), - }) + .unbounded_send(ScanState::RootUpdated { new_path }) .ok(); return; } @@ -3890,7 +4318,7 @@ impl BackgroundScanner { } else if fsmonitor_parse_state == Some(FsMonitorParseState::Cookies) && file_name == Some(*FSMONITOR_DAEMON) { fsmonitor_parse_state = Some(FsMonitorParseState::FsMonitor); false - } else if fsmonitor_parse_state != Some(FsMonitorParseState::FsMonitor) && file_name == Some(*DOT_GIT) { + } else if fsmonitor_parse_state != Some(FsMonitorParseState::FsMonitor) && smol::block_on(is_git_dir(ancestor, self.fs.as_ref())) { true } else { fsmonitor_parse_state.take(); @@ -3953,7 +4381,7 @@ impl BackgroundScanner { let (scan_job_tx, scan_job_rx) = channel::unbounded(); log::debug!("received fs events {:?}", relative_paths); self.reload_entries_for_paths( - root_path, + root_path.into(), root_canonical_path, &relative_paths, abs_paths, @@ -3991,7 +4419,7 @@ impl BackgroundScanner { for ancestor in path.ancestors() { if let Some(entry) = state.snapshot.entry_for_path(ancestor) { if entry.kind == EntryKind::UnloadedDir { - let abs_path = root_path.join(ancestor); + let abs_path = root_path.as_path().join(ancestor); state.enqueue_scan_dir(abs_path.into(), entry, &scan_job_tx); state.paths_to_scan.insert(path.clone()); break; @@ -4118,7 +4546,6 @@ impl BackgroundScanner { let next_entry_id = self.next_entry_id.clone(); let mut ignore_stack = job.ignore_stack.clone(); - let mut containing_repository = job.containing_repository.clone(); let mut new_ignore = None; let mut root_canonical_path = None; let mut new_entries: Vec = Vec::new(); @@ -4155,16 +4582,9 @@ impl BackgroundScanner { self.watcher.as_ref(), ); - if let Some((work_directory, repository)) = repo { - let t0 = Instant::now(); - let statuses = repository - .status(&[PathBuf::from("")]) - .log_err() - .unwrap_or_default(); - log::trace!("computed git status in {:?}", t0.elapsed()); - containing_repository = Some(ScanJobContainingRepository { - work_directory, - statuses, + if let Some(local_repo) = repo { + self.update_git_statuses(UpdateGitStatusesJob { + local_repository: local_repo, }); } } else if child_name == *GITIGNORE { @@ -4244,6 +4664,7 @@ impl BackgroundScanner { if child_entry.is_dir() { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true); + child_entry.is_always_included = self.settings.is_path_always_included(&child_path); // Avoid recursing until crash in the case of a recursive symlink if job.ancestor_inodes.contains(&child_entry.inode) { @@ -4263,19 +4684,11 @@ impl BackgroundScanner { }, ancestor_inodes, scan_queue: job.scan_queue.clone(), - containing_repository: containing_repository.clone(), })); } } else { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); - if !child_entry.is_ignored { - if let Some(repo) = &containing_repository { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repo.work_directory) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = repo.statuses.get(&repo_path); - } - } - } + child_entry.is_always_included = self.settings.is_path_always_included(&child_path); } { @@ -4304,6 +4717,12 @@ impl BackgroundScanner { new_jobs.remove(job_ix); } } + if entry.is_always_included { + state + .snapshot + .always_included_entries + .push(entry.path.clone()); + } } state.populate_dir(&job.path, new_entries, new_ignore); @@ -4318,6 +4737,7 @@ impl BackgroundScanner { Ok(()) } + /// All list arguments should be sorted before calling this function async fn reload_entries_for_paths( &self, root_abs_path: Arc, @@ -4326,6 +4746,7 @@ impl BackgroundScanner { abs_paths: Vec, scan_queue_tx: Option>, ) { + // grab metadata for all requested paths let metadata = futures::future::join_all( abs_paths .iter() @@ -4373,28 +4794,76 @@ impl BackgroundScanner { // Group all relative paths by their git repository. let mut paths_by_git_repo = HashMap::default(); for relative_path in relative_paths.iter() { - if let Some((repo_entry, repo)) = state.snapshot.repo_for_path(relative_path) { - if let Ok(repo_path) = repo_entry.relativize(&state.snapshot, relative_path) { + let repository_data = state + .snapshot + .local_repo_for_path(relative_path) + .zip(state.snapshot.repository_for_path(relative_path)); + if let Some((local_repo, entry)) = repository_data { + if let Ok(repo_path) = local_repo.relativize(relative_path) { paths_by_git_repo - .entry(repo.dot_git_dir_abs_path.clone()) + .entry(local_repo.work_directory.clone()) .or_insert_with(|| RepoPaths { - repo: repo.repo_ptr.clone(), - repo_paths: Vec::new(), - relative_paths: Vec::new(), + entry: entry.clone(), + repo: local_repo.repo_ptr.clone(), + repo_paths: Default::default(), }) - .add_paths(relative_path, repo_path); + .add_path(repo_path); } } } - // Now call `git status` once per repository and collect each file's git status. - let mut git_statuses_by_relative_path = - paths_by_git_repo - .into_values() - .fold(HashMap::default(), |mut map, repo_paths| { - map.extend(repo_paths.into_git_file_statuses()); - map - }); + for (work_directory, mut paths) in paths_by_git_repo { + if let Ok(status) = paths.repo.status(&paths.repo_paths) { + let mut changed_path_statuses = Vec::new(); + let statuses = paths.entry.statuses_by_path.clone(); + let mut cursor = statuses.cursor::(&()); + + for (repo_path, status) in &*status.entries { + paths.remove_repo_path(repo_path); + if cursor.seek_forward(&PathTarget::Path(&repo_path), Bias::Left, &()) { + if cursor.item().unwrap().status == *status { + continue; + } + } + + changed_path_statuses.push(Edit::Insert(StatusEntry { + repo_path: repo_path.clone(), + status: *status, + })); + } + + let mut cursor = statuses.cursor::(&()); + for path in paths.repo_paths { + if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) { + changed_path_statuses.push(Edit::Remove(PathKey(path.0))); + } + } + + if !changed_path_statuses.is_empty() { + let work_directory_id = state.snapshot.repositories.update( + &work_directory.path_key(), + &(), + move |repository_entry| { + repository_entry + .statuses_by_path + .edit(changed_path_statuses, &()); + + repository_entry.work_directory_id + }, + ); + + if let Some(work_directory_id) = work_directory_id { + let scan_id = state.snapshot.scan_id; + state.snapshot.git_repositories.update( + &work_directory_id, + |local_repository_entry| { + local_repository_entry.status_scan_id = scan_id; + }, + ); + } + } + } + } for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) { let abs_path: Arc = root_abs_path.join(path).into(); @@ -4420,6 +4889,7 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = is_external; fs_entry.is_private = self.is_path_private(path); + fs_entry.is_always_included = self.settings.is_path_always_included(path); if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) { if state.should_scan_directory(&fs_entry) @@ -4432,10 +4902,6 @@ impl BackgroundScanner { } } - if !is_dir && !fs_entry.is_ignored && !fs_entry.is_external { - fs_entry.git_status = git_statuses_by_relative_path.remove(path); - } - state.insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()); } Ok(None) => { @@ -4455,18 +4921,19 @@ impl BackgroundScanner { ); } - fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> { + fn remove_repo_path(&self, path: &Arc, snapshot: &mut LocalSnapshot) -> Option<()> { if !path .components() .any(|component| component.as_os_str() == *DOT_GIT) { - if let Some(repository) = snapshot.repository_for_work_directory(path) { - let entry = repository.work_directory.0; - snapshot.git_repositories.remove(&entry); + if let Some(repository) = snapshot.repository(PathKey(path.clone())) { + snapshot + .git_repositories + .remove(&repository.work_directory_id); snapshot .snapshot - .repository_entries - .remove(&RepositoryWorkDirectory(path.into())); + .repositories + .remove(&PathKey(repository.work_directory.path.clone()), &()); return Some(()); } } @@ -4486,7 +4953,7 @@ impl BackgroundScanner { snapshot .ignores_by_parent_abs_path .retain(|parent_abs_path, (_, needs_update)| { - if let Ok(parent_path) = parent_abs_path.strip_prefix(&abs_path) { + if let Ok(parent_path) = parent_abs_path.strip_prefix(abs_path.as_path()) { if *needs_update { *needs_update = false; if snapshot.snapshot.entry_for_path(parent_path).is_some() { @@ -4565,8 +5032,11 @@ impl BackgroundScanner { let mut entries_by_id_edits = Vec::new(); let mut entries_by_path_edits = Vec::new(); - let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap(); - let repo = snapshot.repo_for_path(path); + let path = job + .abs_path + .strip_prefix(snapshot.abs_path.as_path()) + .unwrap(); + for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); @@ -4602,18 +5072,6 @@ impl BackgroundScanner { let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone(); path_entry.scan_id = snapshot.scan_id; path_entry.is_ignored = entry.is_ignored; - if !entry.is_dir() && !entry.is_ignored && !entry.is_external { - if let Some((ref repo_entry, local_repo)) = repo { - if let Ok(repo_path) = repo_entry.relativize(snapshot, &entry.path) { - let status = local_repo - .repo_ptr - .status(&[repo_path.0.clone()]) - .ok() - .and_then(|status| status.get(&repo_path)); - entry.git_status = status; - } - } - } entries_by_id_edits.push(Edit::Insert(path_entry)); entries_by_path_edits.push(Edit::Insert(entry)); } @@ -4658,7 +5116,7 @@ impl BackgroundScanner { } }); - let (work_directory, repository) = match existing_repository_entry { + let local_repository = match existing_repository_entry { None => { match state.insert_git_repository( dot_git_dir.into(), @@ -4669,45 +5127,36 @@ impl BackgroundScanner { None => continue, } } - Some((entry_id, repository)) => { - if repository.git_dir_scan_id == scan_id { + Some((entry_id, local_repository)) => { + if local_repository.git_dir_scan_id == scan_id { continue; } let Some(work_dir) = state .snapshot .entry_for_id(entry_id) - .map(|entry| RepositoryWorkDirectory(entry.path.clone())) + .map(|entry| entry.path.clone()) else { continue; }; - let repo = &repository.repo_ptr; - let branch = repo.branch_name(); - repo.reload_index(); + let branch = local_repository.repo_ptr.branch_name(); + local_repository.repo_ptr.reload_index(); - state - .snapshot - .git_repositories - .update(&entry_id, |entry| entry.git_dir_scan_id = scan_id); - state - .snapshot - .snapshot - .repository_entries - .update(&work_dir, |entry| entry.branch = branch.map(Into::into)); - (work_dir, repository.repo_ptr.clone()) + state.snapshot.git_repositories.update(&entry_id, |entry| { + entry.git_dir_scan_id = scan_id; + entry.status_scan_id = scan_id; + }); + state.snapshot.snapshot.repositories.update( + &PathKey(work_dir.clone()), + &(), + |entry| entry.branch = branch.map(Into::into), + ); + + local_repository } }; - repo_updates.push(UpdateGitStatusesJob { - location_in_repo: state - .snapshot - .repository_entries - .get(&work_directory) - .and_then(|repo| repo.location_in_repo.clone()) - .clone(), - work_directory, - repository, - }); + repo_updates.push(UpdateGitStatusesJob { local_repository }); } // Remove any git repositories whose .git entry no longer exists. @@ -4733,9 +5182,9 @@ impl BackgroundScanner { snapshot .git_repositories .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); - snapshot - .repository_entries - .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); + snapshot.repositories.retain(&(), |entry| { + ids_to_preserve.contains(&entry.work_directory_id) + }); } let (mut updates_done_tx, mut updates_done_rx) = barrier::channel(); @@ -4769,59 +5218,72 @@ impl BackgroundScanner { /// Update the git statuses for a given batch of entries. fn update_git_statuses(&self, job: UpdateGitStatusesJob) { - log::trace!("updating git statuses for repo {:?}", job.work_directory.0); + log::trace!( + "updating git statuses for repo {:?}", + job.local_repository.work_directory.path + ); let t0 = Instant::now(); - let Some(statuses) = job.repository.status(&[PathBuf::from("")]).log_err() else { + + let Some(statuses) = job + .local_repository + .repo() + .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) + .log_err() + else { return; }; log::trace!( "computed git statuses for repo {:?} in {:?}", - job.work_directory.0, + job.local_repository.work_directory.path, t0.elapsed() ); let t0 = Instant::now(); - let mut changes = Vec::new(); + let mut changed_paths = Vec::new(); let snapshot = self.state.lock().snapshot.snapshot.clone(); - for file in snapshot.traverse_from_path(true, false, false, job.work_directory.0.as_ref()) { - let Ok(repo_path) = file.path.strip_prefix(&job.work_directory.0) else { - break; - }; - let git_status = if let Some(location) = &job.location_in_repo { - statuses.get(&location.join(repo_path)) - } else { - statuses.get(repo_path) - }; - if file.git_status != git_status { - let mut entry = file.clone(); - entry.git_status = git_status; - changes.push((entry.path, git_status)); + + let Some(mut repository) = + snapshot.repository(job.local_repository.work_directory.path_key()) + else { + log::error!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot"); + debug_assert!(false); + return; + }; + + let mut new_entries_by_path = SumTree::new(&()); + for (repo_path, status) in statuses.entries.iter() { + let project_path = repository.work_directory.unrelativize(repo_path); + + new_entries_by_path.insert_or_replace( + StatusEntry { + repo_path: repo_path.clone(), + status: *status, + }, + &(), + ); + + if let Some(path) = project_path { + changed_paths.push(path); } } + repository.statuses_by_path = new_entries_by_path; let mut state = self.state.lock(); - let edits = changes - .iter() - .filter_map(|(path, git_status)| { - let entry = state.snapshot.entry_for_path(path)?.clone(); - Some(Edit::Insert(Entry { - git_status: *git_status, - ..entry.clone() - })) - }) - .collect(); + state + .snapshot + .repositories + .insert_or_replace(repository, &()); - // Apply the git status changes. util::extend_sorted( &mut state.changed_paths, - changes.iter().map(|p| p.0.clone()), + changed_paths, usize::MAX, Ord::cmp, ); - state.snapshot.entries_by_path.edit(edits, &()); + log::trace!( "applied git status updates for repo {:?} in {:?}", - job.work_directory.0, + job.local_repository.work_directory.path, t0.elapsed(), ); } @@ -4991,28 +5453,29 @@ fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { result } +#[derive(Debug)] struct RepoPaths { repo: Arc, - relative_paths: Vec>, - repo_paths: Vec, + entry: RepositoryEntry, + // sorted + repo_paths: Vec, } impl RepoPaths { - fn add_paths(&mut self, relative_path: &Arc, repo_path: RepoPath) { - self.relative_paths.push(relative_path.clone()); - self.repo_paths.push(repo_path.0); + fn add_path(&mut self, repo_path: RepoPath) { + match self.repo_paths.binary_search(&repo_path) { + Ok(_) => {} + Err(ix) => self.repo_paths.insert(ix, repo_path), + } } - fn into_git_file_statuses(self) -> HashMap, GitFileStatus> { - let mut statuses = HashMap::default(); - if let Ok(status) = self.repo.status(&self.repo_paths) { - for (repo_path, relative_path) in self.repo_paths.into_iter().zip(self.relative_paths) { - if let Some(path_status) = status.get(&repo_path) { - statuses.insert(relative_path, path_status); - } + fn remove_repo_path(&mut self, repo_path: &RepoPath) { + match self.repo_paths.binary_search(&repo_path) { + Ok(ix) => { + self.repo_paths.remove(ix); } + Err(_) => {} } - statuses } } @@ -5023,13 +5486,6 @@ struct ScanJob { scan_queue: Sender, ancestor_inodes: TreeSet, is_external: bool, - containing_repository: Option, -} - -#[derive(Clone)] -struct ScanJobContainingRepository { - work_directory: RepositoryWorkDirectory, - statuses: GitStatus, } struct UpdateIgnoreStatusJob { @@ -5040,9 +5496,7 @@ struct UpdateIgnoreStatusJob { } struct UpdateGitStatusesJob { - work_directory: RepositoryWorkDirectory, - location_in_repo: Option>, - repository: Arc, + local_repository: LocalRepositoryEntry, } pub trait WorktreeModelHandle { @@ -5215,44 +5669,166 @@ impl<'a> Default for TraversalProgress<'a> { } } -#[derive(Clone, Debug, Default, Copy)] -struct GitStatuses { - added: usize, - modified: usize, - conflict: usize, +#[derive(Debug, Clone, Copy)] +pub struct GitEntryRef<'a> { + pub entry: &'a Entry, + pub git_status: Option, } -impl AddAssign for GitStatuses { - fn add_assign(&mut self, rhs: Self) { - self.added += rhs.added; - self.modified += rhs.modified; - self.conflict += rhs.conflict; - } -} - -impl Sub for GitStatuses { - type Output = GitStatuses; - - fn sub(self, rhs: Self) -> Self::Output { - GitStatuses { - added: self.added - rhs.added, - modified: self.modified - rhs.modified, - conflict: self.conflict - rhs.conflict, +impl<'a> GitEntryRef<'a> { + pub fn to_owned(&self) -> GitEntry { + GitEntry { + entry: self.entry.clone(), + git_status: self.git_status, } } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses { - fn zero(_cx: &()) -> Self { - Default::default() - } +impl<'a> Deref for GitEntryRef<'a> { + type Target = Entry; - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { - *self += summary.statuses + fn deref(&self) -> &Self::Target { + &self.entry } } +impl<'a> AsRef for GitEntryRef<'a> { + fn as_ref(&self) -> &Entry { + self.entry + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct GitEntry { + pub entry: Entry, + pub git_status: Option, +} + +impl GitEntry { + pub fn to_ref(&self) -> GitEntryRef { + GitEntryRef { + entry: &self.entry, + git_status: self.git_status, + } + } +} + +impl Deref for GitEntry { + type Target = Entry; + + fn deref(&self) -> &Self::Target { + &self.entry + } +} + +impl AsRef for GitEntry { + fn as_ref(&self) -> &Entry { + &self.entry + } +} + +/// Walks the worktree entries and their associated git statuses. +pub struct GitTraversal<'a> { + traversal: Traversal<'a>, + current_entry_status: Option, + repo_location: Option<( + &'a RepositoryEntry, + Cursor<'a, StatusEntry, PathProgress<'a>>, + )>, +} + +impl<'a> GitTraversal<'a> { + fn synchronize_statuses(&mut self, reset: bool) { + self.current_entry_status = None; + + let Some(entry) = self.traversal.cursor.item() else { + return; + }; + + let Some(repo) = self.traversal.snapshot.repository_for_path(&entry.path) else { + self.repo_location = None; + return; + }; + + // Update our state if we changed repositories. + if reset || self.repo_location.as_ref().map(|(prev_repo, _)| prev_repo) != Some(&repo) { + self.repo_location = Some((repo, repo.statuses_by_path.cursor::(&()))); + } + + let Some((repo, statuses)) = &mut self.repo_location else { + return; + }; + + let repo_path = repo.relativize(&entry.path).unwrap(); + + if entry.is_dir() { + let mut statuses = statuses.clone(); + statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()); + let summary: GitStatuses = + statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &()); + + self.current_entry_status = summary.to_status(); + } else if entry.is_file() { + // For a file entry, park the cursor on the corresponding status + if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) { + self.current_entry_status = Some(statuses.item().unwrap().status); + } + } + } + + pub fn advance(&mut self) -> bool { + self.advance_by(1) + } + + pub fn advance_by(&mut self, count: usize) -> bool { + let found = self.traversal.advance_by(count); + self.synchronize_statuses(false); + found + } + + pub fn advance_to_sibling(&mut self) -> bool { + let found = self.traversal.advance_to_sibling(); + self.synchronize_statuses(false); + found + } + + pub fn back_to_parent(&mut self) -> bool { + let found = self.traversal.back_to_parent(); + self.synchronize_statuses(true); + found + } + + pub fn start_offset(&self) -> usize { + self.traversal.start_offset() + } + + pub fn end_offset(&self) -> usize { + self.traversal.end_offset() + } + + pub fn entry(&self) -> Option> { + Some(GitEntryRef { + entry: self.traversal.cursor.item()?, + git_status: self.current_entry_status, + }) + } +} + +impl<'a> Iterator for GitTraversal<'a> { + type Item = GitEntryRef<'a>; + fn next(&mut self) -> Option { + if let Some(item) = self.entry() { + self.advance(); + Some(item) + } else { + None + } + } +} + +#[derive(Debug)] pub struct Traversal<'a> { + snapshot: &'a Snapshot, cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>, include_ignored: bool, include_files: bool, @@ -5261,15 +5837,16 @@ pub struct Traversal<'a> { impl<'a> Traversal<'a> { fn new( - entries: &'a SumTree, + snapshot: &'a Snapshot, include_files: bool, include_dirs: bool, include_ignored: bool, start_path: &Path, ) -> Self { - let mut cursor = entries.cursor(&()); - cursor.seek(&TraversalTarget::Path(start_path), Bias::Left, &()); + let mut cursor = snapshot.entries_by_path.cursor(&()); + cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &()); let mut traversal = Self { + snapshot, cursor, include_files, include_dirs, @@ -5280,6 +5857,17 @@ impl<'a> Traversal<'a> { } traversal } + + pub fn with_git_statuses(self) -> GitTraversal<'a> { + let mut this = GitTraversal { + traversal: self, + current_entry_status: None, + repo_location: None, + }; + this.synchronize_statuses(true); + this + } + pub fn advance(&mut self) -> bool { self.advance_by(1) } @@ -5299,15 +5887,12 @@ impl<'a> Traversal<'a> { pub fn advance_to_sibling(&mut self) -> bool { while let Some(entry) = self.cursor.item() { - self.cursor.seek_forward( - &TraversalTarget::PathSuccessor(&entry.path), - Bias::Left, - &(), - ); + self.cursor + .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &()); if let Some(entry) = self.cursor.item() { if (self.include_files || !entry.is_file()) && (self.include_dirs || !entry.is_dir()) - && (self.include_ignored || !entry.is_ignored) + && (self.include_ignored || !entry.is_ignored || entry.is_always_included) { return true; } @@ -5321,7 +5906,7 @@ impl<'a> Traversal<'a> { return false; }; self.cursor - .seek(&TraversalTarget::Path(parent_path), Bias::Left, &()) + .seek(&TraversalTarget::path(parent_path), Bias::Left, &()) } pub fn entry(&self) -> Option<&'a Entry> { @@ -5354,10 +5939,58 @@ impl<'a> Iterator for Traversal<'a> { } } +#[derive(Debug, Clone, Copy)] +enum PathTarget<'a> { + Path(&'a Path), + Successor(&'a Path), + Contains(&'a Path), +} + +impl<'a> PathTarget<'a> { + fn cmp_path(&self, other: &Path) -> Ordering { + match self { + PathTarget::Path(path) => path.cmp(&other), + PathTarget::Successor(path) => { + if other.starts_with(path) { + Ordering::Greater + } else { + Ordering::Equal + } + } + PathTarget::Contains(path) => { + if path.starts_with(other) { + Ordering::Equal + } else { + Ordering::Greater + } + } + } + } +} + +impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary, PathProgress<'a>> for PathTarget<'b> { + fn cmp(&self, cursor_location: &PathProgress<'a>, _: &S::Context) -> Ordering { + self.cmp_path(&cursor_location.max_path) + } +} + +impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for PathTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &S::Context) -> Ordering { + self.cmp_path(&cursor_location.max_path) + } +} + +impl<'a, 'b> SeekTarget<'a, PathSummary, (TraversalProgress<'a>, GitStatuses)> + for PathTarget<'b> +{ + fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering { + self.cmp_path(&cursor_location.0.max_path) + } +} + #[derive(Debug)] enum TraversalTarget<'a> { - Path(&'a Path), - PathSuccessor(&'a Path), + Path(PathTarget<'a>), Count { count: usize, include_files: bool, @@ -5366,17 +5999,18 @@ enum TraversalTarget<'a> { }, } -impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> { - fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { +impl<'a> TraversalTarget<'a> { + fn path(path: &'a Path) -> Self { + Self::Path(PathTarget::Path(path)) + } + + fn successor(path: &'a Path) -> Self { + Self::Path(PathTarget::Successor(path)) + } + + fn cmp_progress(&self, progress: &TraversalProgress) -> Ordering { match self { - TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path), - TraversalTarget::PathSuccessor(path) => { - if cursor_location.max_path.starts_with(path) { - Ordering::Greater - } else { - Ordering::Equal - } - } + TraversalTarget::Path(path) => path.cmp_path(&progress.max_path), TraversalTarget::Count { count, include_files, @@ -5384,17 +6018,21 @@ impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTa include_ignored, } => Ord::cmp( count, - &cursor_location.count(*include_files, *include_dirs, *include_ignored), + &progress.count(*include_files, *include_dirs, *include_ignored), ), } } } -impl<'a, 'b> SeekTarget<'a, EntrySummary, (TraversalProgress<'a>, GitStatuses)> - for TraversalTarget<'b> -{ - fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering { - self.cmp(&cursor_location.0, &()) +impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { + self.cmp_progress(cursor_location) + } +} + +impl<'a, 'b> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for TraversalTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { + self.cmp_progress(cursor_location) } } @@ -5403,6 +6041,20 @@ pub struct ChildEntriesIter<'a> { traversal: Traversal<'a>, } +impl<'a> ChildEntriesIter<'a> { + pub fn with_git_statuses(self) -> ChildEntriesGitIter<'a> { + ChildEntriesGitIter { + parent_path: self.parent_path, + traversal: self.traversal.with_git_statuses(), + } + } +} + +pub struct ChildEntriesGitIter<'a> { + parent_path: &'a Path, + traversal: GitTraversal<'a>, +} + impl<'a> Iterator for ChildEntriesIter<'a> { type Item = &'a Entry; @@ -5417,6 +6069,20 @@ impl<'a> Iterator for ChildEntriesIter<'a> { } } +impl<'a> Iterator for ChildEntriesGitIter<'a> { + type Item = GitEntryRef<'a>; + + fn next(&mut self) -> Option { + if let Some(item) = self.traversal.entry() { + if item.path.starts_with(self.parent_path) { + self.traversal.advance_to_sibling(); + return Some(item); + } + } + None + } +} + impl<'a> From<&'a Entry> for proto::Entry { fn from(entry: &'a Entry) -> Self { Self { @@ -5427,7 +6093,6 @@ impl<'a> From<&'a Entry> for proto::Entry { mtime: entry.mtime.map(|time| time.into()), is_ignored: entry.is_ignored, is_external: entry.is_external, - git_status: entry.git_status.map(git_status_to_proto), is_fifo: entry.is_fifo, size: Some(entry.size), canonical_path: entry @@ -5438,10 +6103,12 @@ impl<'a> From<&'a Entry> for proto::Entry { } } -impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry { +impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry { type Error = anyhow::Error; - fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result { + fn try_from( + (root_char_bag, always_included, entry): (&'a CharBag, &PathMatcher, proto::Entry), + ) -> Result { let kind = if entry.is_dir { EntryKind::Dir } else { @@ -5452,7 +6119,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry { Ok(Entry { id: ProjectEntryId::from_proto(entry.id), kind, - path, + path: path.clone(), inode: entry.inode, mtime: entry.mtime.map(|time| time.into()), size: entry.size.unwrap_or(0), @@ -5460,8 +6127,8 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry { .canonical_path .map(|path_string| Box::from(Path::new(&path_string))), is_ignored: entry.is_ignored, + is_always_included: always_included.is_match(path.as_ref()), is_external: entry.is_external, - git_status: git_status_from_proto(entry.git_status), is_private: false, char_bag, is_fifo: entry.is_fifo, @@ -5475,6 +6142,7 @@ fn git_status_from_proto(git_status: Option) -> Option { proto::GitStatus::Added => GitFileStatus::Added, proto::GitStatus::Modified => GitFileStatus::Modified, proto::GitStatus::Conflict => GitFileStatus::Conflict, + proto::GitStatus::Deleted => GitFileStatus::Deleted, }) }) } @@ -5484,6 +6152,8 @@ fn git_status_to_proto(status: GitFileStatus) -> i32 { GitFileStatus::Added => proto::GitStatus::Added as i32, GitFileStatus::Modified => proto::GitStatus::Modified as i32, GitFileStatus::Conflict => proto::GitStatus::Conflict as i32, + GitFileStatus::Deleted => proto::GitStatus::Deleted as i32, + GitFileStatus::Untracked => proto::GitStatus::Added as i32, // TODO } } diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 32851d963a..9535264c92 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -9,6 +9,7 @@ use util::paths::PathMatcher; #[derive(Clone, PartialEq, Eq)] pub struct WorktreeSettings { + pub file_scan_inclusions: PathMatcher, pub file_scan_exclusions: PathMatcher, pub private_files: PathMatcher, } @@ -21,18 +22,25 @@ impl WorktreeSettings { pub fn is_path_excluded(&self, path: &Path) -> bool { path.ancestors() - .any(|ancestor| self.file_scan_exclusions.is_match(ancestor)) + .any(|ancestor| self.file_scan_exclusions.is_match(&ancestor)) + } + + pub fn is_path_always_included(&self, path: &Path) -> bool { + path.ancestors() + .any(|ancestor| self.file_scan_inclusions.is_match(&ancestor)) } } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct WorktreeSettingsContent { - /// Completely ignore files matching globs from `file_scan_exclusions` + /// Completely ignore files matching globs from `file_scan_exclusions`. Overrides + /// `file_scan_inclusions`. /// /// Default: [ /// "**/.git", /// "**/.svn", /// "**/.hg", + /// "**/.jj", /// "**/CVS", /// "**/.DS_Store", /// "**/Thumbs.db", @@ -42,6 +50,15 @@ pub struct WorktreeSettingsContent { #[serde(default)] pub file_scan_exclusions: Option>, + /// Always include files that match these globs when scanning for files, even if they're + /// ignored by git. This setting is overridden by `file_scan_exclusions`. + /// Default: [ + /// ".env*", + /// "docker-compose.*.yml", + /// ] + #[serde(default)] + pub file_scan_inclusions: Option>, + /// Treat the files matching these globs as `.env` files. /// Default: [ "**/.env*" ] pub private_files: Option>, @@ -59,11 +76,27 @@ impl Settings for WorktreeSettings { let result: WorktreeSettingsContent = sources.json_merge()?; let mut file_scan_exclusions = result.file_scan_exclusions.unwrap_or_default(); let mut private_files = result.private_files.unwrap_or_default(); + let mut parsed_file_scan_inclusions: Vec = result + .file_scan_inclusions + .unwrap_or_default() + .iter() + .flat_map(|glob| { + Path::new(glob) + .ancestors() + .map(|a| a.to_string_lossy().into()) + }) + .filter(|p| p != "") + .collect(); file_scan_exclusions.sort(); private_files.sort(); + parsed_file_scan_inclusions.sort(); Ok(Self { file_scan_exclusions: path_matchers(&file_scan_exclusions, "file_scan_exclusions")?, private_files: path_matchers(&private_files, "private_files")?, + file_scan_inclusions: path_matchers( + &parsed_file_scan_inclusions, + "file_scan_inclusions", + )?, }) } } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 75f86fa606..eebb5f9360 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -12,7 +12,13 @@ use pretty_assertions::assert_eq; use rand::prelude::*; use serde_json::json; use settings::{Settings, SettingsStore}; -use std::{env, fmt::Write, mem, path::Path, sync::Arc}; +use std::{ + env, + fmt::Write, + mem, + path::{Path, PathBuf}, + sync::Arc, +}; use util::{test::temp_tree, ResultExt}; #[gpui::test] @@ -532,14 +538,20 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) { assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1); }); + let path = PathBuf::from("/root/one/node_modules/c/lib"); + // No work happens when files and directories change within an unloaded directory. let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count(); - fs.create_dir("/root/one/node_modules/c/lib".as_ref()) - .await - .unwrap(); + // When we open a directory, we check each ancestor whether it's a git + // repository. That means we have an fs.metadata call per ancestor that we + // need to subtract here. + let ancestors = path.ancestors().count(); + + fs.create_dir(path.as_ref()).await.unwrap(); cx.executor().run_until_parked(); + assert_eq!( - fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count, + fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors, 0 ); } @@ -842,8 +854,8 @@ async fn test_write_file(cx: &mut TestAppContext) { .await .unwrap(); - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - fs::linux_watcher::global(|_| {}).unwrap(); + #[cfg(not(target_os = "macos"))] + fs::fs_watcher::global(|_| {}).unwrap(); cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; @@ -878,6 +890,211 @@ async fn test_write_file(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_file_scan_inclusions(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".gitignore": "**/target\n/node_modules\ntop_level.txt\n", + "target": { + "index": "blah2" + }, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + "bar": { + "bar.rs": "// bar", + }, + "lib.rs": "mod foo;\nmod bar;\n", + }, + "top_level.txt": "top level file", + ".DS_Store": "", + })); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(vec![]); + project_settings.file_scan_inclusions = Some(vec![ + "node_modules/**/package.json".to_string(), + "**/.DS_Store".to_string(), + ]); + }); + }); + }); + + let tree = Worktree::local( + dir.path(), + true, + Arc::new(RealFs::default()), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + tree.read_with(cx, |tree, _| { + // Assert that file_scan_inclusions overrides file_scan_exclusions. + check_worktree_entries( + tree, + &[], + &["target", "node_modules"], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + &[ + "node_modules/prettier/package.json", + ".DS_Store", + "node_modules/.DS_Store", + "src/.DS_Store", + ], + ) + }); +} + +#[gpui::test] +async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".gitignore": "**/target\n/node_modules\n", + "target": { + "index": "blah2" + }, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + }, + ".DS_Store": "", + })); + + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(vec!["**/.DS_Store".to_string()]); + project_settings.file_scan_inclusions = Some(vec!["**/.DS_Store".to_string()]); + }); + }); + }); + + let tree = Worktree::local( + dir.path(), + true, + Arc::new(RealFs::default()), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + tree.read_with(cx, |tree, _| { + // Assert that file_scan_inclusions overrides file_scan_exclusions. + check_worktree_entries( + tree, + &[".DS_Store, src/.DS_Store"], + &["target", "node_modules"], + &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"], + &[], + ) + }); +} + +#[gpui::test] +async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".gitignore": "**/target\n/node_modules/\n", + "target": { + "index": "blah2" + }, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + }, + ".DS_Store": "", + })); + + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(vec![]); + project_settings.file_scan_inclusions = Some(vec!["node_modules/**".to_string()]); + }); + }); + }); + let tree = Worktree::local( + dir.path(), + true, + Arc::new(RealFs::default()), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + assert!(tree + .entry_for_path("node_modules") + .is_some_and(|f| f.is_always_included)); + assert!(tree + .entry_for_path("node_modules/prettier/package.json") + .is_some_and(|f| f.is_always_included)); + }); + + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(vec![]); + project_settings.file_scan_inclusions = Some(vec![]); + }); + }); + }); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + assert!(tree + .entry_for_path("node_modules") + .is_some_and(|f| !f.is_always_included)); + assert!(tree + .entry_for_path("node_modules/prettier/package.json") + .is_some_and(|f| !f.is_always_included)); + }); +} + #[gpui::test] async fn test_file_scan_exclusions(cx: &mut TestAppContext) { init_test(cx); @@ -939,6 +1156,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { ], &["target", "node_modules"], &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + &[], ) }); @@ -970,6 +1188,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { "src/.DS_Store", ".DS_Store", ], + &[], ) }); } @@ -1051,6 +1270,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { "src/bar/bar.rs", ".gitignore", ], + &[], ) }); @@ -1111,6 +1331,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { "src/new_file", ".gitignore", ], + &[], ) }); } @@ -1140,14 +1361,14 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) { .await; tree.flush_fs_events(cx).await; tree.read_with(cx, |tree, _| { - check_worktree_entries(tree, &[], &["HEAD", "foo"], &[]) + check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[]) }); std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents") .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}")); tree.flush_fs_events(cx).await; tree.read_with(cx, |tree, _| { - check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[]) + check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[]) }); } @@ -1180,8 +1401,12 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { let snapshot = Arc::new(Mutex::new(tree.snapshot())); tree.observe_updates(0, cx, { let snapshot = snapshot.clone(); + let settings = tree.settings().clone(); move |update| { - snapshot.lock().apply_remote_update(update).unwrap(); + snapshot + .lock() + .apply_remote_update(update, &settings.file_scan_inclusions) + .unwrap(); async { true } } }); @@ -1272,7 +1497,8 @@ async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) { cx.executor().run_until_parked(); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); - check_propagated_statuses( + + check_git_statuses( &snapshot, &[ (Path::new(""), Some(GitFileStatus::Modified)), @@ -1474,12 +1700,14 @@ async fn test_random_worktree_operations_during_initial_scan( snapshot }); + let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings()); + for (i, snapshot) in snapshots.into_iter().enumerate().rev() { let mut updated_snapshot = snapshot.clone(); for update in updates.lock().iter() { if update.scan_id >= updated_snapshot.scan_id() as u64 { updated_snapshot - .apply_remote_update(update.clone()) + .apply_remote_update(update.clone(), &settings.file_scan_inclusions) .unwrap(); } } @@ -1610,10 +1838,14 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) ); } + let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings()); + for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() { for update in updates.lock().iter() { if update.scan_id >= prev_snapshot.scan_id() as u64 { - prev_snapshot.apply_remote_update(update.clone()).unwrap(); + prev_snapshot + .apply_remote_update(update.clone(), &settings.file_scan_inclusions) + .unwrap(); } } @@ -1947,15 +2179,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { cx.read(|cx| { let tree = tree.read(cx); - let (work_dir, _) = tree.repositories().next().unwrap(); - assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); + let repo = tree.repositories().next().unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("projects/project1")); assert_eq!( tree.status_for_file(Path::new("projects/project1/a")), Some(GitFileStatus::Modified) ); assert_eq!( tree.status_for_file(Path::new("projects/project1/b")), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -1968,15 +2200,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { cx.read(|cx| { let tree = tree.read(cx); - let (work_dir, _) = tree.repositories().next().unwrap(); - assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); + let repo = tree.repositories().next().unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("projects/project2")); assert_eq!( tree.status_for_file(Path::new("projects/project2/a")), Some(GitFileStatus::Modified) ); assert_eq!( tree.status_for_file(Path::new("projects/project2/b")), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); } @@ -2022,23 +2254,13 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); - let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); - assert_eq!( - entry - .work_directory(tree) - .map(|directory| directory.as_ref().to_owned()), - Some(Path::new("dir1").to_owned()) - ); + let repo = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("dir1")); - let entry = tree + let repo = tree .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) .unwrap(); - assert_eq!( - entry - .work_directory(tree) - .map(|directory| directory.as_ref().to_owned()), - Some(Path::new("dir1/deps/dep1").to_owned()) - ); + assert_eq!(repo.path.as_ref(), Path::new("dir1/deps/dep1")); let entries = tree.files(false, 0); @@ -2047,10 +2269,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { .map(|(entry, repo)| { ( entry.path.as_ref(), - repo.and_then(|repo| { - repo.work_directory(tree) - .map(|work_directory| work_directory.0.to_path_buf()) - }), + repo.map(|repo| repo.path.to_path_buf()), ) }) .collect::>(); @@ -2103,7 +2322,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_git_status(cx: &mut TestAppContext) { +async fn test_file_status(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); const IGNORE_RULE: &str = "**/target"; @@ -2162,17 +2381,17 @@ async fn test_git_status(cx: &mut TestAppContext) { tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); assert_eq!(snapshot.repositories().count(), 1); - let (dir, repo_entry) = snapshot.repositories().next().unwrap(); - assert_eq!(dir.as_ref(), Path::new("project")); + let repo_entry = snapshot.repositories().next().unwrap(); + assert_eq!(repo_entry.path.as_ref(), Path::new("project")); assert!(repo_entry.location_in_repo.is_none()); assert_eq!( snapshot.status_for_file(project_path.join(B_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!( snapshot.status_for_file(project_path.join(F_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2202,7 +2421,7 @@ async fn test_git_status(cx: &mut TestAppContext) { let snapshot = tree.snapshot(); assert_eq!( snapshot.status_for_file(project_path.join(F_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); @@ -2224,7 +2443,7 @@ async fn test_git_status(cx: &mut TestAppContext) { assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); assert_eq!( snapshot.status_for_file(project_path.join(B_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!( snapshot.status_for_file(project_path.join(E_TXT)), @@ -2263,7 +2482,7 @@ async fn test_git_status(cx: &mut TestAppContext) { let snapshot = tree.snapshot(); assert_eq!( snapshot.status_for_file(project_path.join(renamed_dir_name).join(RENAMED_FILE)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2287,11 +2506,125 @@ async fn test_git_status(cx: &mut TestAppContext) { .join(Path::new(renamed_dir_name)) .join(RENAMED_FILE) ), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); } +#[gpui::test] +async fn test_git_repository_status(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = temp_tree(json!({ + "project": { + "a.txt": "a", // Modified + "b.txt": "bb", // Added + "c.txt": "ccc", // Unchanged + "d.txt": "dddd", // Deleted + }, + + })); + + // Set up git repository before creating the worktree. + let work_dir = root.path().join("project"); + let repo = git_init(work_dir.as_path()); + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_add("d.txt", &repo); + git_commit("Initial commit", &repo); + std::fs::remove_file(work_dir.join("d.txt")).unwrap(); + std::fs::write(work_dir.join("a.txt"), "aa").unwrap(); + + let tree = Worktree::local( + root.path(), + true, + Arc::new(RealFs::default()), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + // Check that the right git state is observed on startup + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repo = snapshot.repositories().next().unwrap(); + let entries = repo.status().collect::>(); + + assert_eq!(entries.len(), 3); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].status, GitFileStatus::Modified); + assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt")); + assert_eq!(entries[1].status, GitFileStatus::Untracked); + assert_eq!(entries[2].repo_path.as_ref(), Path::new("d.txt")); + assert_eq!(entries[2].status, GitFileStatus::Deleted); + }); + + std::fs::write(work_dir.join("c.txt"), "some changes").unwrap(); + eprintln!("File c.txt has been modified"); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repository = snapshot.repositories().next().unwrap(); + let entries = repository.status().collect::>(); + + std::assert_eq!(entries.len(), 4, "entries: {entries:?}"); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].status, GitFileStatus::Modified); + assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt")); + assert_eq!(entries[1].status, GitFileStatus::Untracked); + // Status updated + assert_eq!(entries[2].repo_path.as_ref(), Path::new("c.txt")); + assert_eq!(entries[2].status, GitFileStatus::Modified); + assert_eq!(entries[3].repo_path.as_ref(), Path::new("d.txt")); + assert_eq!(entries[3].status, GitFileStatus::Deleted); + }); + + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_remove_index(Path::new("d.txt"), &repo); + git_commit("Another commit", &repo); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + std::fs::remove_file(work_dir.join("a.txt")).unwrap(); + std::fs::remove_file(work_dir.join("b.txt")).unwrap(); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repo = snapshot.repositories().next().unwrap(); + let entries = repo.status().collect::>(); + + // Deleting an untracked entry, b.txt, should leave no status + // a.txt was tracked, and so should have a status + assert_eq!( + entries.len(), + 1, + "Entries length was incorrect\n{:#?}", + &entries + ); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].status, GitFileStatus::Deleted); + }); +} + #[gpui::test] async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { init_test(cx); @@ -2344,22 +2677,22 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); assert_eq!(snapshot.repositories().count(), 1); - let (dir, repo_entry) = snapshot.repositories().next().unwrap(); + let repo = snapshot.repositories().next().unwrap(); // Path is blank because the working directory of // the git repository is located at the root of the project - assert_eq!(dir.as_ref(), Path::new("")); + assert_eq!(repo.path.as_ref(), Path::new("")); // This is the missing path between the root of the project (sub-folder-2) and its // location relative to the root of the repository. assert_eq!( - repo_entry.location_in_repo, + repo.location_in_repo, Some(Arc::from(Path::new("sub-folder-1/sub-folder-2"))) ); assert_eq!(snapshot.status_for_file("c.txt"), None); assert_eq!( snapshot.status_for_file("d/e.txt"), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2381,6 +2714,93 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_traverse_with_git_status(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar", + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z.txt": "sneaky..." + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[ + (Path::new("x2.txt"), GitFileStatus::Modified), + (Path::new("z.txt"), GitFileStatus::Added), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/y/.git"), + &[(Path::new("y1.txt"), GitFileStatus::Conflict)], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Added)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + let mut traversal = snapshot + .traverse_from_path(true, false, true, Path::new("x")) + .with_git_statuses(); + + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Modified)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Conflict)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/z.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Added)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Added)); +} + #[gpui::test] async fn test_propagate_git_statuses(cx: &mut TestAppContext) { init_test(cx); @@ -2407,7 +2827,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { "h1.txt": "", "h2.txt": "" }, - }), ) .await; @@ -2437,7 +2856,16 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { cx.executor().run_until_parked(); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); - check_propagated_statuses( + check_git_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), + (Path::new("g"), Some(GitFileStatus::Conflict)), + (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + check_git_statuses( &snapshot, &[ (Path::new(""), Some(GitFileStatus::Conflict)), @@ -2454,7 +2882,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ], ); - check_propagated_statuses( + check_git_statuses( &snapshot, &[ (Path::new("a/b"), Some(GitFileStatus::Added)), @@ -2469,7 +2897,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ], ); - check_propagated_statuses( + check_git_statuses( &snapshot, &[ (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), @@ -2481,23 +2909,284 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ); } -#[track_caller] -fn check_propagated_statuses( - snapshot: &Snapshot, - expected_statuses: &[(&Path, Option)], -) { - let mut entries = expected_statuses - .iter() - .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) - .collect::>(); - snapshot.propagate_git_statuses(&mut entries); - assert_eq!( - entries - .iter() - .map(|e| (e.path.as_ref(), e.git_status)) - .collect::>(), - expected_statuses +#[gpui::test] +async fn test_propagate_statuses_for_repos_under_project(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar" + }, + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[(Path::new("x1.txt"), GitFileStatus::Added)], ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/y/.git"), + &[ + (Path::new("y1.txt"), GitFileStatus::Conflict), + (Path::new("y2.txt"), GitFileStatus::Modified), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Modified)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("y"), Some(GitFileStatus::Conflict)), + (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("z"), Some(GitFileStatus::Modified)), + (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + (Path::new("x/x2.txt"), None), + (Path::new("y"), Some(GitFileStatus::Conflict)), + (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)), + (Path::new("z"), Some(GitFileStatus::Modified)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)), + ], + ); +} + +#[gpui::test] +async fn test_propagate_statuses_for_nested_repos(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar", + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z.txt": "sneaky..." + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[ + (Path::new("x2.txt"), GitFileStatus::Modified), + (Path::new("z.txt"), GitFileStatus::Added), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/y/.git"), + &[(Path::new("y1.txt"), GitFileStatus::Conflict)], + ); + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Added)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + // Sanity check the propagation for x/y and z + check_git_statuses( + &snapshot, + &[ + (Path::new("x/y"), Some(GitFileStatus::Conflict)), // the y git repository has conflict file in it, and so should have a conflict status + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + ], + ); + check_git_statuses( + &snapshot, + &[ + (Path::new("z"), Some(GitFileStatus::Added)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Added)), + ], + ); + + // Test one of the fundamental cases of propagation blocking, the transition from one git repository to another + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + // Sanity check everything around it + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/x1.txt"), None), + (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + (Path::new("x/z.txt"), Some(GitFileStatus::Added)), + ], + ); + + // Test the other fundamental case, transitioning from git repository to non-git repository + check_git_statuses( + &snapshot, + &[ + (Path::new(""), None), + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/x1.txt"), None), + ], + ); + + // And all together now + check_git_statuses( + &snapshot, + &[ + (Path::new(""), None), + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/x1.txt"), None), + (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + (Path::new("x/z.txt"), Some(GitFileStatus::Added)), + (Path::new("z"), Some(GitFileStatus::Added)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Added)), + ], + ); +} + +#[gpui::test] +async fn test_private_single_file_worktree(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"})) + .await; + let tree = Worktree::local( + Path::new("/.env"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.read_with(cx, |tree, _| { + let entry = tree.entry_for_path("").unwrap(); + assert!(entry.is_private); + }); +} + +#[track_caller] +fn check_git_statuses(snapshot: &Snapshot, expected_statuses: &[(&Path, Option)]) { + let mut traversal = snapshot + .traverse_from_path(true, true, false, "".as_ref()) + .with_git_statuses(); + let found_statuses = expected_statuses + .iter() + .map(|&(path, _)| { + let git_entry = traversal + .find(|git_entry| &*git_entry.path == path) + .expect("Traversal has no entry for {path:?}"); + (path, git_entry.git_status) + }) + .collect::>(); + assert_eq!(found_statuses, expected_statuses); } #[track_caller] @@ -2509,14 +3198,14 @@ fn git_init(path: &Path) -> git2::Repository { fn git_add>(path: P, repo: &git2::Repository) { let path = path.as_ref(); let mut index = repo.index().expect("Failed to get index"); - index.add_path(path).expect("Failed to add a.txt"); + index.add_path(path).expect("Failed to add file"); index.write().expect("Failed to write index"); } #[track_caller] fn git_remove_index(path: &Path, repo: &git2::Repository) { let mut index = repo.index().expect("Failed to get index"); - index.remove_path(path).expect("Failed to add a.txt"); + index.remove_path(path).expect("Failed to add file"); index.write().expect("Failed to write index"); } @@ -2588,6 +3277,7 @@ fn check_worktree_entries( expected_excluded_paths: &[&str], expected_ignored_paths: &[&str], expected_tracked_paths: &[&str], + expected_included_paths: &[&str], ) { for path in expected_excluded_paths { let entry = tree.entry_for_path(path); @@ -2610,10 +3300,19 @@ fn check_worktree_entries( .entry_for_path(path) .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'")); assert!( - !entry.is_ignored, + !entry.is_ignored || entry.is_always_included, "expected path '{path}' to be tracked, but got entry: {entry:?}", ); } + for path in expected_included_paths { + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'")); + assert!( + entry.is_always_included, + "expected path '{path}' to always be included, but got entry: {entry:?}", + ); + } } fn init_test(cx: &mut gpui::TestAppContext) { @@ -2636,7 +3335,8 @@ fn assert_entry_git_state( ) { let entry = tree.entry_for_path(path).expect("entry {path} not found"); assert_eq!( - entry.git_status, git_status, + tree.status_for_file(Path::new(path)), + git_status, "expected {path} to have git status: {git_status:?}" ); assert_eq!( diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e5d4cb7623..8052bdc335 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.163.0" +version = "0.170.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -15,14 +15,16 @@ name = "zed" path = "src/main.rs" [dependencies] -assistant_slash_command.workspace = true activity_indicator.workspace = true anyhow.workspace = true assets.workspace = true assistant.workspace = true +assistant2.workspace = true +assistant_tools.workspace = true async-watch.workspace = true audio.workspace = true auto_update.workspace = true +auto_update_ui.workspace = true backtrace = "0.3" breadcrumbs.workspace = true call.workspace = true @@ -35,12 +37,12 @@ collab_ui.workspace = true collections.workspace = true command_palette.workspace = true command_palette_hooks.workspace = true -context_servers.workspace = true copilot.workspace = true db.workspace = true diagnostics.workspace = true editor.workspace = true env_logger.workspace = true +extension.workspace = true extension_host.workspace = true extensions_ui.workspace = true feature_flags.workspace = true @@ -50,22 +52,25 @@ file_icons.workspace = true fs.workspace = true futures.workspace = true git.workspace = true +git_ui.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true gpui = { workspace = true, features = ["wayland", "x11", "font-kit"] } http_client.workspace = true image_viewer.workspace = true -indexed_docs.workspace = true inline_completion_button.workspace = true install_cli.workspace = true journal.workspace = true language.workspace = true +language_extension.workspace = true language_model.workspace = true +language_models.workspace = true language_selector.workspace = true language_tools.workspace = true languages = { workspace = true, features = ["load-grammars"] } libc.workspace = true log.workspace = true +markdown.workspace = true markdown_preview.workspace = true menu.workspace = true mimalloc = { version = "0.1", optional = true } @@ -76,16 +81,17 @@ outline.workspace = true outline_panel.workspace = true parking_lot.workspace = true paths.workspace = true +picker.workspace = true profiling.workspace = true project.workspace = true project_panel.workspace = true project_symbols.workspace = true proto.workspace = true -quick_action_bar.workspace = true recent_projects.workspace = true release_channel.workspace = true remote.workspace = true repl.workspace = true +reqwest_client.workspace = true rope.workspace = true search.workspace = true serde.workspace = true @@ -103,22 +109,26 @@ sysinfo.workspace = true tab_switcher.workspace = true task.workspace = true tasks_ui.workspace = true +telemetry.workspace = true telemetry_events.workspace = true terminal_view.workspace = true theme.workspace = true +theme_extension.workspace = true theme_selector.workspace = true time.workspace = true toolchain_selector.workspace = true ui.workspace = true -reqwest_client.workspace = true url.workspace = true urlencoding = "2.1.2" util.workspace = true uuid.workspace = true +vcs_menu.workspace = true vim.workspace = true +vim_mode_setting.workspace = true welcome.workspace = true workspace.workspace = true zed_actions.workspace = true +zeta.workspace = true [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true @@ -133,8 +143,10 @@ ashpd.workspace = true call = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +image_viewer = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } +terminal_view = { workspace = true, features = ["test-support"] } tree-sitter-md.workspace = true tree-sitter-rust.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/build.rs b/crates/zed/build.rs index 3013773f91..b97bda1681 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -25,6 +25,10 @@ fn main() { // Populate git sha environment variable if git is available println!("cargo:rerun-if-changed=../../.git/logs/HEAD"); + println!( + "cargo:rustc-env=TARGET={}", + std::env::var("TARGET").unwrap() + ); if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() { if output.status.success() { let git_sha = String::from_utf8_lossy(&output.stdout); diff --git a/crates/zed/resources/flatpak/manifest-template.json b/crates/zed/resources/flatpak/manifest-template.json index 7905058f44..1560027e9f 100644 --- a/crates/zed/resources/flatpak/manifest-template.json +++ b/crates/zed/resources/flatpak/manifest-template.json @@ -32,7 +32,7 @@ "BRANDING_LIGHT": "$BRANDING_LIGHT", "BRANDING_DARK": "$BRANDING_DARK", "APP_CLI": "zed", - "APP_ARGS": "--foreground", + "APP_ARGS": "--foreground %U", "DO_STARTUP_NOTIFY": "false" } }, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index a5fc52e933..cc4e98f38c 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -1,5 +1,3 @@ -// Allow binary to be called Zed for a nice application menu when running executable directly -#![allow(non_snake_case)] // Disable command line from opening on release mode #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] @@ -7,16 +5,16 @@ mod reliability; mod zed; use anyhow::{anyhow, Context as _, Result}; -use assistant_slash_command::SlashCommandRegistry; use chrono::Offset; use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; use client::{parse_zed_link, Client, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; -use context_servers::ContextServerFactoryRegistry; +use collections::HashMap; use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; use editor::Editor; use env_logger::Builder; +use extension::ExtensionHostProxy; use fs::{Fs, RealFs}; use futures::{future, StreamExt}; use git::GitHostingProviderRegistry; @@ -25,7 +23,6 @@ use gpui::{ VisualContext, }; use http_client::{read_proxy_from_env, Uri}; -use indexed_docs::IndexedDocsRegistry; use language::LanguageRegistry; use log::LevelFilter; use reqwest_client::ReqwestClient; @@ -41,19 +38,17 @@ use settings::{ handle_settings_file_changes, watch_config_file, InvalidSettingsError, Settings, SettingsStore, }; use simplelog::ConfigBuilder; -use smol::process::Command; -use snippet_provider::SnippetRegistry; use std::{ env, fs::OpenOptions, - io::{IsTerminal, Write}, + io::{self, IsTerminal, Write}, path::{Path, PathBuf}, process, sync::Arc, }; use theme::{ActiveTheme, SystemAppearance, ThemeRegistry, ThemeSettings}; use time::UtcOffset; -use util::{maybe, parse_env_output, ResultExt, TryFutureExt}; +use util::{maybe, ResultExt, TryFutureExt}; use uuid::Uuid; use welcome::{show_welcome_view, BaseKeymap, FIRST_OPEN}; use workspace::{ @@ -68,26 +63,66 @@ use zed::{ use crate::zed::inline_completion_registry; +#[cfg(unix)] +use util::{load_login_shell_environment, load_shell_from_passwd}; + #[cfg(feature = "mimalloc")] #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; -fn fail_to_launch(e: anyhow::Error) { - eprintln!("Zed failed to launch: {e:?}"); - App::new().run(move |cx| { - if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |cx| cx.new_view(|_| gpui::Empty)) { - window.update(cx, |_, cx| { - let response = cx.prompt(gpui::PromptLevel::Critical, "Zed failed to launch", Some(&format!("{e}\n\nFor help resolving this, please open an issue on https://github.com/zed-industries/zed")), &["Exit"]); +fn files_not_created_on_launch(errors: HashMap>) { + let message = "Zed failed to launch"; + let error_details = errors + .into_iter() + .flat_map(|(kind, paths)| { + #[allow(unused_mut)] // for non-unix platforms + let mut error_kind_details = match paths.len() { + 0 => return None, + 1 => format!( + "{kind} when creating directory {:?}", + paths.first().expect("match arm checks for a single entry") + ), + _many => format!("{kind} when creating directories {paths:?}"), + }; - cx.spawn(|_, mut cx| async move { - response.await?; - cx.update(|cx| { - cx.quit() + #[cfg(unix)] + { + match kind { + io::ErrorKind::PermissionDenied => { + error_kind_details.push_str("\n\nConsider using chown and chmod tools for altering the directories permissions if your user has corresponding rights.\ + \nFor example, `sudo chown $(whoami):staff ~/.config` and `chmod +uwrx ~/.config`"); + } + _ => {} + } + } + + Some(error_kind_details) + }) + .collect::>().join("\n\n"); + + eprintln!("{message}: {error_details}"); + App::new().run(move |cx| { + if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |cx| { + cx.new_view(|_| gpui::Empty) + }) { + window + .update(cx, |_, cx| { + let response = cx.prompt( + gpui::PromptLevel::Critical, + message, + Some(&error_details), + &["Exit"], + ); + + cx.spawn(|_, mut cx| async move { + response.await?; + cx.update(|cx| cx.quit()) }) - }).detach_and_log_err(cx); - }).log_err(); + .detach_and_log_err(cx); + }) + .log_err(); } else { - fail_to_open_window(e, cx) + fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx) } }) } @@ -142,8 +177,9 @@ fn main() { menu::init(); zed_actions::init(); - if let Err(e) = init_paths() { - fail_to_launch(e); + let file_errors = init_paths(); + if !file_errors.is_empty() { + files_not_created_on_launch(file_errors); return; } @@ -219,14 +255,12 @@ fn main() { paths::keymap_file().clone(), ); + #[cfg(unix)] if !stdout_is_a_pty() { app.background_executor() .spawn(async { - #[cfg(unix)] - { - load_shell_from_passwd().await.log_err(); - } - load_login_shell_environment().await.log_err(); + load_shell_from_passwd().log_err(); + load_login_shell_environment().log_err(); }) .detach() }; @@ -286,6 +320,9 @@ fn main() { OpenListener::set_global(cx, open_listener.clone()); + extension::init(cx); + let extension_host_proxy = ExtensionHostProxy::global(cx); + let client = Client::production(cx); cx.set_http_client(client.http_client().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); @@ -319,6 +356,7 @@ fn main() { let node_runtime = NodeRuntime::new(client.http_client(), rx); language::init(cx); + language_extension::init(extension_host_proxy.clone(), languages.clone()); languages::init(languages.clone(), node_runtime.clone(), cx); let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx)); @@ -328,7 +366,6 @@ fn main() { zed::init(cx); project::Project::init(&client, cx); client::init(&client, cx); - language::init(cx); let telemetry = client.telemetry(); telemetry.start( system_id.as_ref().map(|id| id.to_string()), @@ -367,6 +404,7 @@ fn main() { AppState::set_global(Arc::downgrade(&app_state), cx); auto_update::init(client.http_client(), cx); + auto_update_ui::init(cx); reliability::init( client.http_client(), system_id.as_ref().map(|id| id.to_string()), @@ -377,6 +415,11 @@ fn main() { SystemAppearance::init(cx); theme::init(theme::LoadThemes::All(Box::new(Assets)), cx); + theme_extension::init( + extension_host_proxy.clone(), + ThemeRegistry::global(cx), + cx.background_executor().clone(), + ); command_palette::init(cx); let copilot_language_server_id = app_state.languages.next_language_server_id(); copilot::init( @@ -387,36 +430,31 @@ fn main() { cx, ); supermaven::init(app_state.client.clone(), cx); - language_model::init( + language_model::init(cx); + language_models::init( app_state.user_store.clone(), app_state.client.clone(), app_state.fs.clone(), cx, ); snippet_provider::init(cx); - inline_completion_registry::init(app_state.client.telemetry().clone(), cx); + inline_completion_registry::init(app_state.client.clone(), cx); let prompt_builder = assistant::init( app_state.fs.clone(), app_state.client.clone(), stdout_is_a_pty(), cx, ); - repl::init( + assistant2::init( app_state.fs.clone(), - app_state.client.telemetry().clone(), - cx, - ); - let api = extensions_ui::ConcreteExtensionRegistrationHooks::new( - ThemeRegistry::global(cx), - SlashCommandRegistry::global(cx), - IndexedDocsRegistry::global(cx), - SnippetRegistry::global(cx), - app_state.languages.clone(), - ContextServerFactoryRegistry::global(cx), + app_state.client.clone(), + stdout_is_a_pty(), cx, ); + assistant_tools::init(cx); + repl::init(app_state.fs.clone(), cx); extension_host::init( - api, + extension_host_proxy, app_state.fs.clone(), app_state.client.clone(), app_state.node_runtime.clone(), @@ -444,6 +482,7 @@ fn main() { outline::init(cx); project_symbols::init(cx); project_panel::init(Assets, cx); + git_ui::git_panel::init(cx); outline_panel::init(Assets, cx); tasks_ui::init(cx); snippets_ui::init(cx); @@ -459,11 +498,14 @@ fn main() { call::init(app_state.client.clone(), app_state.user_store.clone(), cx); notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx); collab_ui::init(&app_state, cx); + git_ui::init(cx); + vcs_menu::init(cx); feedback::init(cx); markdown_preview::init(cx); welcome::init(cx); settings_ui::init(cx); extensions_ui::init(cx); + zeta::init(cx); cx.observe_global::({ let languages = app_state.languages.clone(); @@ -490,9 +532,16 @@ fn main() { } }) .detach(); - let telemetry = app_state.client.telemetry(); - telemetry.report_setting_event("theme", cx.theme().name.to_string()); - telemetry.report_setting_event("keymap", BaseKeymap::get_global(cx).to_string()); + telemetry::event!( + "Settings Changed", + setting = "theme", + value = cx.theme().name.to_string() + ); + telemetry::event!( + "Settings Changed", + setting = "keymap", + value = BaseKeymap::get_global(cx).to_string() + ); telemetry.flush_events(); let fs = app_state.fs.clone(); @@ -895,8 +944,8 @@ pub(crate) async fn restorable_workspace_locations( } } -fn init_paths() -> anyhow::Result<()> { - for path in [ +fn init_paths() -> HashMap> { + [ paths::config_dir(), paths::extensions_dir(), paths::languages_dir(), @@ -904,12 +953,13 @@ fn init_paths() -> anyhow::Result<()> { paths::logs_dir(), paths::temp_dir(), ] - .iter() - { - std::fs::create_dir_all(path) - .map_err(|e| anyhow!("Could not create directory {:?}: {}", path, e))?; - } - Ok(()) + .into_iter() + .fold(HashMap::default(), |mut errors, path| { + if let Err(e) = std::fs::create_dir_all(path) { + errors.entry(e.kind()).or_insert_with(Vec::new).push(path); + } + errors + }) } fn init_logger() { @@ -988,109 +1038,8 @@ fn init_stdout_logger() { .init(); } -#[cfg(unix)] -async fn load_shell_from_passwd() -> Result<()> { - let buflen = match unsafe { libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) } { - n if n < 0 => 1024, - n => n as usize, - }; - let mut buffer = Vec::with_capacity(buflen); - - let mut pwd: std::mem::MaybeUninit = std::mem::MaybeUninit::uninit(); - let mut result: *mut libc::passwd = std::ptr::null_mut(); - - let uid = unsafe { libc::getuid() }; - let status = unsafe { - libc::getpwuid_r( - uid, - pwd.as_mut_ptr(), - buffer.as_mut_ptr() as *mut libc::c_char, - buflen, - &mut result, - ) - }; - let entry = unsafe { pwd.assume_init() }; - - anyhow::ensure!( - status == 0, - "call to getpwuid_r failed. uid: {}, status: {}", - uid, - status - ); - anyhow::ensure!(!result.is_null(), "passwd entry for uid {} not found", uid); - anyhow::ensure!( - entry.pw_uid == uid, - "passwd entry has different uid ({}) than getuid ({}) returned", - entry.pw_uid, - uid, - ); - - let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() }; - if env::var("SHELL").map_or(true, |shell_env| shell_env != shell) { - log::info!( - "updating SHELL environment variable to value from passwd entry: {:?}", - shell, - ); - env::set_var("SHELL", shell); - } - - Ok(()) -} - -async fn load_login_shell_environment() -> Result<()> { - let marker = "ZED_LOGIN_SHELL_START"; - let shell = env::var("SHELL").context( - "SHELL environment variable is not assigned so we can't source login environment variables", - )?; - - // If possible, we want to `cd` in the user's `$HOME` to trigger programs - // such as direnv, asdf, mise, ... to adjust the PATH. These tools often hook - // into shell's `cd` command (and hooks) to manipulate env. - // We do this so that we get the env a user would have when spawning a shell - // in home directory. - let shell_cmd_prefix = std::env::var_os("HOME") - .and_then(|home| home.into_string().ok()) - .map(|home| format!("cd '{home}';")); - - // The `exit 0` is the result of hours of debugging, trying to find out - // why running this command here, without `exit 0`, would mess - // up signal process for our process so that `ctrl-c` doesn't work - // anymore. - // We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would - // do that, but it does, and `exit 0` helps. - let shell_cmd = format!( - "{}printf '%s' {marker}; /usr/bin/env; exit 0;", - shell_cmd_prefix.as_deref().unwrap_or("") - ); - - let output = Command::new(&shell) - .args(["-l", "-i", "-c", &shell_cmd]) - .output() - .await - .context("failed to spawn login shell to source login environment variables")?; - if !output.status.success() { - Err(anyhow!("login shell exited with error"))?; - } - - let stdout = String::from_utf8_lossy(&output.stdout); - - if let Some(env_output_start) = stdout.find(marker) { - let env_output = &stdout[env_output_start + marker.len()..]; - - parse_env_output(env_output, |key, value| env::set_var(key, value)); - - log::info!( - "set environment variables from shell:{}, path:{}", - shell, - env::var("PATH").unwrap_or_default(), - ); - } - - Ok(()) -} - fn stdout_is_a_pty() -> bool { - std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && std::io::stdout().is_terminal() + std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && io::stdout().is_terminal() } #[derive(Parser, Debug)] @@ -1125,10 +1074,7 @@ impl ToString for IdType { fn parse_url_arg(arg: &str, cx: &AppContext) -> Result { match std::fs::canonicalize(Path::new(&arg)) { - Ok(path) => Ok(format!( - "file://{}", - path.to_string_lossy().trim_start_matches(r#"\\?\"#) - )), + Ok(path) => Ok(format!("file://{}", path.display())), Err(error) => { if arg.starts_with("file://") || arg.starts_with("zed-cli://") diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 681cc9834f..bd5dc96161 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -1,31 +1,26 @@ +use crate::stdout_is_a_pty; use anyhow::{Context, Result}; use backtrace::{self, Backtrace}; use chrono::Utc; use client::{telemetry, TelemetrySettings}; use db::kvp::KEY_VALUE_STORE; use gpui::{AppContext, SemanticVersion}; -use http_client::{HttpRequestExt, Method}; - -use http_client::{self, HttpClient, HttpClientWithUrl}; +use http_client::{self, HttpClient, HttpClientWithUrl, HttpRequestExt, Method}; use paths::{crashes_dir, crashes_retired_dir}; use project::Project; -use release_channel::ReleaseChannel; -use release_channel::RELEASE_CHANNEL; +use release_channel::{ReleaseChannel, RELEASE_CHANNEL}; use settings::Settings; use smol::stream::StreamExt; use std::{ env, - ffi::OsStr, + ffi::{c_void, OsStr}, sync::{atomic::Ordering, Arc}, }; use std::{io::Write, panic, sync::atomic::AtomicU32, thread}; -use telemetry_events::LocationData; -use telemetry_events::Panic; -use telemetry_events::PanicRequest; +use telemetry_events::{LocationData, Panic, PanicRequest}; use url::Url; use util::ResultExt; -use crate::stdout_is_a_pty; static PANIC_COUNT: AtomicU32 = AtomicU32::new(0); pub fn init_panic_hook( @@ -69,25 +64,35 @@ pub fn init_panic_hook( ); std::process::exit(-1); } + let main_module_base_address = get_main_module_base_address(); let backtrace = Backtrace::new(); - let mut backtrace = backtrace + let mut symbols = backtrace .frames() .iter() .flat_map(|frame| { - frame - .symbols() - .iter() - .filter_map(|frame| Some(format!("{:#}", frame.name()?))) + let base = frame + .module_base_address() + .unwrap_or(main_module_base_address); + frame.symbols().iter().map(move |symbol| { + format!( + "{}+{}", + symbol + .name() + .as_ref() + .map_or("".to_owned(), <_>::to_string), + (frame.ip() as isize).saturating_sub(base as isize) + ) + }) }) .collect::>(); // Strip out leading stack frames for rust panic-handling. - if let Some(ix) = backtrace + if let Some(ix) = symbols .iter() - .position(|name| name == "rust_begin_unwind") + .position(|name| name == "rust_begin_unwind" || name == "_rust_begin_unwind") { - backtrace.drain(0..=ix); + symbols.drain(0..=ix); } let panic_data = telemetry_events::Panic { @@ -98,12 +103,13 @@ pub fn init_panic_hook( line: location.line(), }), app_version: app_version.to_string(), - release_channel: RELEASE_CHANNEL.display_name().into(), + release_channel: RELEASE_CHANNEL.dev_name().into(), + target: env!("TARGET").to_owned().into(), os_name: telemetry::os_name(), os_version: Some(telemetry::os_version()), architecture: env::consts::ARCH.into(), panicked_on: Utc::now().timestamp_millis(), - backtrace, + backtrace: symbols, system_id: system_id.clone(), installation_id: installation_id.clone(), session_id: session_id.clone(), @@ -133,6 +139,25 @@ pub fn init_panic_hook( })); } +#[cfg(not(target_os = "windows"))] +fn get_main_module_base_address() -> *mut c_void { + let mut dl_info = libc::Dl_info { + dli_fname: std::ptr::null(), + dli_fbase: std::ptr::null_mut(), + dli_sname: std::ptr::null(), + dli_saddr: std::ptr::null_mut(), + }; + unsafe { + libc::dladdr(get_main_module_base_address as _, &mut dl_info); + } + dl_info.dli_fbase +} + +#[cfg(target_os = "windows")] +fn get_main_module_base_address() -> *mut c_void { + std::ptr::null_mut() +} + pub fn init( http_client: Arc, system_id: Option, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index e2dc36a21f..4e8dd1bcba 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -5,10 +5,13 @@ pub(crate) mod linux_prompts; #[cfg(target_os = "macos")] pub(crate) mod mac_only_instance; mod open_listener; +mod quick_action_bar; #[cfg(target_os = "windows")] pub(crate) mod windows_only_instance; +use anyhow::Context as _; pub use app_menus::*; +use assets::Assets; use assistant::PromptBuilder; use breadcrumbs::Breadcrumbs; use client::{zed_urls, ZED_URL_SCHEME}; @@ -17,18 +20,17 @@ use command_palette_hooks::CommandPaletteFilter; use editor::ProposedChangesEditorToolbar; use editor::{scroll::Autoscroll, Editor, MultiBuffer}; use feature_flags::FeatureFlagAppExt; +use futures::FutureExt; +use futures::{channel::mpsc, select_biased, StreamExt}; use gpui::{ actions, point, px, AppContext, AsyncAppContext, Context, FocusableView, MenuItem, PathPromptOptions, PromptLevel, ReadGlobal, Task, TitlebarOptions, View, ViewContext, VisualContext, WindowKind, WindowOptions, }; pub use open_listener::*; - -use anyhow::Context as _; -use assets::Assets; -use futures::{channel::mpsc, select_biased, StreamExt}; use outline_panel::OutlinePanel; -use project::{DirectoryLister, Item}; +use paths::{local_settings_file_relative_path, local_tasks_file_relative_path}; +use project::{DirectoryLister, ProjectItem}; use project_panel::ProjectPanel; use quick_action_bar::QuickActionBar; use recent_projects::open_ssh_project; @@ -42,16 +44,14 @@ use settings::{ use std::any::TypeId; use std::path::PathBuf; use std::{borrow::Cow, ops::Deref, path::Path, sync::Arc}; -use theme::ActiveTheme; -use workspace::notifications::NotificationId; -use workspace::CloseIntent; - -use paths::{local_settings_file_relative_path, local_tasks_file_relative_path}; use terminal_view::terminal_panel::{self, TerminalPanel}; +use theme::ActiveTheme; use util::{asset_str, ResultExt}; use uuid::Uuid; -use vim::VimModeSetting; +use vim_mode_setting::VimModeSetting; use welcome::{BaseKeymap, MultibufferHint}; +use workspace::notifications::NotificationId; +use workspace::CloseIntent; use workspace::{ create_and_open_local_file, notifications::simple_message_notification::MessageNotification, open_new, AppState, NewFile, NewWindow, OpenLog, Toast, Workspace, WorkspaceSettings, @@ -153,52 +153,20 @@ pub fn initialize_workspace( }) .detach(); - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - if let Err(e) = fs::linux_watcher::global(|_| {}) { - let message = format!(db::indoc!{r#" - inotify_init returned {} - - This may be due to system-wide limits on inotify instances. For troubleshooting see: https://zed.dev/docs/linux - "#}, e); - let prompt = cx.prompt(PromptLevel::Critical, "Could not start inotify", Some(&message), - &["Troubleshoot and Quit"]); - cx.spawn(|_, mut cx| async move { - if prompt.await == Ok(0) { - cx.update(|cx| { - cx.open_url("https://zed.dev/docs/linux#could-not-start-inotify"); - cx.quit(); - }).ok(); - } - }).detach() - } + #[cfg(not(target_os = "macos"))] + initialize_file_watcher(cx); if let Some(specs) = cx.gpu_specs() { log::info!("Using GPU: {:?}", specs); - if specs.is_software_emulated && std::env::var("ZED_ALLOW_EMULATED_GPU").is_err() { - let message = format!(db::indoc!{r#" - Zed uses Vulkan for rendering and requires a compatible GPU. - - Currently you are using a software emulated GPU ({}) which - will result in awful performance. - - For troubleshooting see: https://zed.dev/docs/linux - Set ZED_ALLOW_EMULATED_GPU=1 env var to permanently override. - "#}, specs.device_name); - let prompt = cx.prompt(PromptLevel::Critical, "Unsupported GPU", Some(&message), - &["Skip", "Troubleshoot and Quit"]); - cx.spawn(|_, mut cx| async move { - if prompt.await == Ok(1) { - cx.update(|cx| { - cx.open_url("https://zed.dev/docs/linux#zed-fails-to-open-windows"); - cx.quit(); - }).ok(); - } - }).detach() - } + show_software_emulation_warning_if_needed(specs, cx); } let inline_completion_button = cx.new_view(|cx| { - inline_completion_button::InlineCompletionButton::new(app_state.fs.clone(), cx) + inline_completion_button::InlineCompletionButton::new( + workspace.weak_handle(), + app_state.fs.clone(), + cx, + ) }); let diagnostic_summary = @@ -217,16 +185,15 @@ pub fn initialize_workspace( status_bar.add_left_item(activity_indicator, cx); status_bar.add_right_item(inline_completion_button, cx); status_bar.add_right_item(active_buffer_language, cx); - status_bar.add_right_item(active_toolchain_language, cx); + status_bar.add_right_item(active_toolchain_language, cx); status_bar.add_right_item(vim_mode_indicator, cx); status_bar.add_right_item(cursor_position, cx); }); - auto_update::notify_of_any_new_update(cx); + auto_update_ui::notify_of_any_new_update(cx); let handle = cx.view().downgrade(); cx.on_window_should_close(move |cx| { - handle .update(cx, |workspace, cx| { // We'll handle closing asynchronously @@ -236,351 +203,8 @@ pub fn initialize_workspace( .unwrap_or(true) }); - let prompt_builder = prompt_builder.clone(); - cx.spawn(|workspace_handle, mut cx| async move { - let assistant_panel = - assistant::AssistantPanel::load(workspace_handle.clone(), prompt_builder, cx.clone()); - - let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); - let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone()); - let terminal_panel = TerminalPanel::load(workspace_handle.clone(), cx.clone()); - let channels_panel = - collab_ui::collab_panel::CollabPanel::load(workspace_handle.clone(), cx.clone()); - let chat_panel = - collab_ui::chat_panel::ChatPanel::load(workspace_handle.clone(), cx.clone()); - let notification_panel = collab_ui::notification_panel::NotificationPanel::load( - workspace_handle.clone(), - cx.clone(), - ); - - let ( - project_panel, - outline_panel, - terminal_panel, - assistant_panel, - channels_panel, - chat_panel, - notification_panel, - ) = futures::try_join!( - project_panel, - outline_panel, - terminal_panel, - assistant_panel, - channels_panel, - chat_panel, - notification_panel, - )?; - - workspace_handle.update(&mut cx, |workspace, cx| { - workspace.add_panel(assistant_panel, cx); - workspace.add_panel(project_panel, cx); - workspace.add_panel(outline_panel, cx); - workspace.add_panel(terminal_panel, cx); - workspace.add_panel(channels_panel, cx); - workspace.add_panel(chat_panel, cx); - workspace.add_panel(notification_panel, cx); - }) - }) - .detach(); - - workspace - .register_action(about) - .register_action(|_, _: &Minimize, cx| { - cx.minimize_window(); - }) - .register_action(|_, _: &Zoom, cx| { - cx.zoom_window(); - }) - .register_action(|_, _: &ToggleFullScreen, cx| { - cx.toggle_fullscreen(); - }) - .register_action(|_, action: &OpenZedUrl, cx| { - OpenListener::global(cx).open_urls(vec![action.url.clone()]) - }) - .register_action(|_, action: &OpenBrowser, cx| cx.open_url(&action.url)) - .register_action(move |_, _: &zed_actions::IncreaseBufferFontSize, cx| { - theme::adjust_buffer_font_size(cx, |size| *size += px(1.0)) - }) - .register_action(|workspace, _: &workspace::Open, cx| { - workspace.client() - .telemetry() - .report_app_event("open project".to_string()); - let paths = workspace.prompt_for_open_path( - PathPromptOptions { - files: true, - directories: true, - multiple: true, - }, - DirectoryLister::Project(workspace.project().clone()), - cx, - ); - - cx.spawn(|this, mut cx| async move { - let Some(paths) = paths.await.log_err().flatten() else { - return; - }; - - if let Some(task) = this - .update(&mut cx, |this, cx| { - if this.project().read(cx).is_local() { - this.open_workspace_for_paths(false, paths, cx) - } else { - open_new_ssh_project_from_project(this, paths, cx) - } - }) - .log_err() - { - task.await.log_err(); - } - }) - .detach() - }) - .register_action(move |_, _: &zed_actions::DecreaseBufferFontSize, cx| { - theme::adjust_buffer_font_size(cx, |size| *size -= px(1.0)) - }) - .register_action(move |_, _: &zed_actions::ResetBufferFontSize, cx| { - theme::reset_buffer_font_size(cx) - }) - .register_action(move |_, _: &zed_actions::IncreaseUiFontSize, cx| { - theme::adjust_ui_font_size(cx, |size| *size += px(1.0)) - }) - .register_action(move |_, _: &zed_actions::DecreaseUiFontSize, cx| { - theme::adjust_ui_font_size(cx, |size| *size -= px(1.0)) - }) - .register_action(move |_, _: &zed_actions::ResetUiFontSize, cx| { - theme::reset_ui_font_size(cx) - }) - .register_action(move |_, _: &zed_actions::IncreaseBufferFontSize, cx| { - theme::adjust_buffer_font_size(cx, |size| *size += px(1.0)) - }) - .register_action(move |_, _: &zed_actions::DecreaseBufferFontSize, cx| { - theme::adjust_buffer_font_size(cx, |size| *size -= px(1.0)) - }) - .register_action(move |_, _: &zed_actions::ResetBufferFontSize, cx| { - theme::reset_buffer_font_size(cx) - }) - .register_action(|_, _: &install_cli::Install, cx| { - cx.spawn(|workspace, mut cx| async move { - if cfg!(any(target_os = "linux", target_os = "freebsd")) { - let prompt = cx.prompt( - PromptLevel::Warning, - "CLI should already be installed", - Some("If you installed Zed from our official release add ~/.local/bin to your PATH.\n\nIf you installed Zed from a different source like your package manager, then you may need to create an alias/symlink manually.\n\nDepending on your package manager, the CLI might be named zeditor, zedit, zed-editor or something else."), - &["Ok"], - ); - cx.background_executor().spawn(prompt).detach(); - return Ok(()); - } - let path = install_cli::install_cli(cx.deref()) - .await - .context("error creating CLI symlink")?; - - workspace.update(&mut cx, |workspace, cx| { - struct InstalledZedCli; - - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - format!( - "Installed `zed` to {}. You can launch {} from your terminal.", - path.to_string_lossy(), - ReleaseChannel::global(cx).display_name() - ), - ), - cx, - ) - })?; - register_zed_scheme(&cx).await.log_err(); - Ok(()) - }) - .detach_and_prompt_err("Error installing zed cli", cx, |_, _| None); - }) - .register_action(|_, _: &install_cli::RegisterZedScheme, cx| { - cx.spawn(|workspace, mut cx| async move { - register_zed_scheme(&cx).await?; - workspace.update(&mut cx, |workspace, cx| { - struct RegisterZedScheme; - - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - format!( - "zed:// links will now open in {}.", - ReleaseChannel::global(cx).display_name() - ), - ), - cx, - ) - })?; - Ok(()) - }) - .detach_and_prompt_err( - "Error registering zed:// scheme", - cx, - |_, _| None, - ); - }) - .register_action(|workspace, _: &OpenLog, cx| { - open_log_file(workspace, cx); - }) - .register_action(|workspace, _: &zed_actions::OpenLicenses, cx| { - open_bundled_file( - workspace, - asset_str::("licenses.md"), - "Open Source License Attribution", - "Markdown", - cx, - ); - }) - .register_action( - move |workspace: &mut Workspace, - _: &zed_actions::OpenTelemetryLog, - cx: &mut ViewContext| { - open_telemetry_log_file(workspace, cx); - }, - ) - .register_action( - move |_: &mut Workspace, - _: &zed_actions::OpenKeymap, - cx: &mut ViewContext| { - open_settings_file(paths::keymap_file(), || settings::initial_keymap_content().as_ref().into(), cx); - }, - ) - .register_action( - move |_: &mut Workspace, _: &OpenSettings, cx: &mut ViewContext| { - open_settings_file( - paths::settings_file(), - || settings::initial_user_settings_content().as_ref().into(), - cx, - ); - }, - ) - .register_action( - |_: &mut Workspace, _: &OpenAccountSettings, cx: &mut ViewContext| { - cx.open_url(&zed_urls::account_url(cx)); - }, - ) - .register_action( - move |_: &mut Workspace, _: &OpenTasks, cx: &mut ViewContext| { - open_settings_file( - paths::tasks_file(), - || settings::initial_tasks_content().as_ref().into(), - cx, - ); - }, - ) - .register_action(open_project_settings_file) - .register_action(open_project_tasks_file) - .register_action( - move |workspace: &mut Workspace, - _: &zed_actions::OpenDefaultKeymap, - cx: &mut ViewContext| { - open_bundled_file( - workspace, - settings::default_keymap(), - "Default Key Bindings", - "JSON", - cx, - ); - }, - ) - .register_action( - move |workspace: &mut Workspace, - _: &OpenDefaultSettings, - cx: &mut ViewContext| { - open_bundled_file( - workspace, - settings::default_settings(), - "Default Settings", - "JSON", - cx, - ); - }, - ) - .register_action( - |workspace: &mut Workspace, - _: &project_panel::ToggleFocus, - cx: &mut ViewContext| { - workspace.toggle_panel_focus::(cx); - }, - ) - .register_action( - |workspace: &mut Workspace, - _: &outline_panel::ToggleFocus, - cx: &mut ViewContext| { - workspace.toggle_panel_focus::(cx); - }, - ) - .register_action( - |workspace: &mut Workspace, - _: &collab_ui::collab_panel::ToggleFocus, - cx: &mut ViewContext| { - workspace.toggle_panel_focus::(cx); - }, - ) - .register_action( - |workspace: &mut Workspace, - _: &collab_ui::chat_panel::ToggleFocus, - cx: &mut ViewContext| { - workspace.toggle_panel_focus::(cx); - }, - ) - .register_action( - |workspace: &mut Workspace, - _: &collab_ui::notification_panel::ToggleFocus, - cx: &mut ViewContext| { - workspace - .toggle_panel_focus::(cx); - }, - ) - .register_action( - |workspace: &mut Workspace, - _: &terminal_panel::ToggleFocus, - cx: &mut ViewContext| { - workspace.toggle_panel_focus::(cx); - }, - ) - .register_action({ - let app_state = Arc::downgrade(&app_state); - move |_, _: &NewWindow, cx| { - if let Some(app_state) = app_state.upgrade() { - open_new(Default::default(), app_state, cx, |workspace, cx| { - Editor::new_file(workspace, &Default::default(), cx) - }) - .detach(); - } - } - }) - .register_action({ - let app_state = Arc::downgrade(&app_state); - move |_, _: &NewFile, cx| { - if let Some(app_state) = app_state.upgrade() { - open_new(Default::default(), app_state, cx, |workspace, cx| { - Editor::new_file(workspace, &Default::default(), cx) - }) - .detach(); - } - } - }); - if workspace.project().read(cx).is_via_ssh() { - workspace.register_action({ - move |workspace, _: &OpenServerSettings, cx| { - let open_server_settings = workspace.project().update(cx, |project, cx| { - project.open_server_settings(cx) - }); - - cx.spawn(|workspace, mut cx| async move { - let buffer = open_server_settings.await?; - - workspace.update(&mut cx, |workspace, cx| { - workspace.open_path(buffer.read(cx).project_path(cx).expect("Settings file must have a location"), None, true, cx) - })?.await?; - - anyhow::Ok(()) - }).detach_and_log_err(cx); - } - }); - } + initialize_panels(prompt_builder.clone(), cx); + register_actions(app_state.clone(), workspace, cx); workspace.focus_handle(cx).focus(cx); }) @@ -610,6 +234,488 @@ fn feature_gate_zed_pro_actions(cx: &mut AppContext) { .detach(); } +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +fn initialize_file_watcher(cx: &mut ViewContext) { + if let Err(e) = fs::fs_watcher::global(|_| {}) { + let message = format!( + db::indoc! {r#" + inotify_init returned {} + + This may be due to system-wide limits on inotify instances. For troubleshooting see: https://zed.dev/docs/linux + "#}, + e + ); + let prompt = cx.prompt( + PromptLevel::Critical, + "Could not start inotify", + Some(&message), + &["Troubleshoot and Quit"], + ); + cx.spawn(|_, mut cx| async move { + if prompt.await == Ok(0) { + cx.update(|cx| { + cx.open_url("https://zed.dev/docs/linux#could-not-start-inotify"); + cx.quit(); + }) + .ok(); + } + }) + .detach() + } +} + +#[cfg(target_os = "windows")] +fn initialize_file_watcher(cx: &mut ViewContext) { + if let Err(e) = fs::fs_watcher::global(|_| {}) { + let message = format!( + db::indoc! {r#" + ReadDirectoryChangesW initialization failed: {} + + This may occur on network filesystems and WSL paths. For troubleshooting see: https://zed.dev/docs/windows + "#}, + e + ); + let prompt = cx.prompt( + PromptLevel::Critical, + "Could not start ReadDirectoryChangesW", + Some(&message), + &["Troubleshoot and Quit"], + ); + cx.spawn(|_, mut cx| async move { + if prompt.await == Ok(0) { + cx.update(|cx| { + cx.open_url("https://zed.dev/docs/windows"); + cx.quit() + }) + .ok(); + } + }) + .detach() + } +} + +fn show_software_emulation_warning_if_needed( + specs: gpui::GpuSpecs, + cx: &mut ViewContext, +) { + if specs.is_software_emulated && std::env::var("ZED_ALLOW_EMULATED_GPU").is_err() { + let message = format!( + db::indoc! {r#" + Zed uses Vulkan for rendering and requires a compatible GPU. + + Currently you are using a software emulated GPU ({}) which + will result in awful performance. + + For troubleshooting see: https://zed.dev/docs/linux + Set ZED_ALLOW_EMULATED_GPU=1 env var to permanently override. + "#}, + specs.device_name + ); + let prompt = cx.prompt( + PromptLevel::Critical, + "Unsupported GPU", + Some(&message), + &["Skip", "Troubleshoot and Quit"], + ); + cx.spawn(|_, mut cx| async move { + if prompt.await == Ok(1) { + cx.update(|cx| { + cx.open_url("https://zed.dev/docs/linux#zed-fails-to-open-windows"); + cx.quit(); + }) + .ok(); + } + }) + .detach() + } +} + +fn initialize_panels(prompt_builder: Arc, cx: &mut ViewContext) { + let assistant2_feature_flag = cx.wait_for_flag::(); + let git_ui_feature_flag = cx.wait_for_flag::(); + + let prompt_builder = prompt_builder.clone(); + + cx.spawn(|workspace_handle, mut cx| async move { + let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); + let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone()); + let terminal_panel = TerminalPanel::load(workspace_handle.clone(), cx.clone()); + let channels_panel = + collab_ui::collab_panel::CollabPanel::load(workspace_handle.clone(), cx.clone()); + let chat_panel = + collab_ui::chat_panel::ChatPanel::load(workspace_handle.clone(), cx.clone()); + let notification_panel = collab_ui::notification_panel::NotificationPanel::load( + workspace_handle.clone(), + cx.clone(), + ); + let assistant_panel = + assistant::AssistantPanel::load(workspace_handle.clone(), prompt_builder, cx.clone()); + + let ( + project_panel, + outline_panel, + terminal_panel, + channels_panel, + chat_panel, + notification_panel, + assistant_panel, + ) = futures::try_join!( + project_panel, + outline_panel, + terminal_panel, + channels_panel, + chat_panel, + notification_panel, + assistant_panel, + )?; + + workspace_handle.update(&mut cx, |workspace, cx| { + workspace.add_panel(project_panel, cx); + workspace.add_panel(outline_panel, cx); + workspace.add_panel(terminal_panel, cx); + workspace.add_panel(channels_panel, cx); + workspace.add_panel(chat_panel, cx); + workspace.add_panel(notification_panel, cx); + workspace.add_panel(assistant_panel, cx) + })?; + + let git_ui_enabled = { + let mut git_ui_feature_flag = git_ui_feature_flag.fuse(); + let mut timeout = + FutureExt::fuse(smol::Timer::after(std::time::Duration::from_secs(5))); + + select_biased! { + is_git_ui_enabled = git_ui_feature_flag => is_git_ui_enabled, + _ = timeout => false, + } + }; + let git_panel = if git_ui_enabled { + Some(git_ui::git_panel::GitPanel::load(workspace_handle.clone(), cx.clone()).await?) + } else { + None + }; + workspace_handle.update(&mut cx, |workspace, cx| { + if let Some(git_panel) = git_panel { + workspace.add_panel(git_panel, cx); + } + })?; + + let is_assistant2_enabled = if cfg!(test) { + false + } else { + let mut assistant2_feature_flag = assistant2_feature_flag.fuse(); + let mut timeout = + FutureExt::fuse(smol::Timer::after(std::time::Duration::from_secs(5))); + + select_biased! { + is_assistant2_enabled = assistant2_feature_flag => is_assistant2_enabled, + _ = timeout => false, + } + }; + let assistant2_panel = if is_assistant2_enabled { + Some(assistant2::AssistantPanel::load(workspace_handle.clone(), cx.clone()).await?) + } else { + None + }; + workspace_handle.update(&mut cx, |workspace, cx| { + if let Some(assistant2_panel) = assistant2_panel { + workspace.add_panel(assistant2_panel, cx); + } + + if is_assistant2_enabled { + workspace.register_action(assistant2::InlineAssistant::inline_assist); + } else { + workspace.register_action(assistant::AssistantPanel::inline_assist); + } + })?; + + anyhow::Ok(()) + }) + .detach(); +} + +fn register_actions( + app_state: Arc, + workspace: &mut Workspace, + cx: &mut ViewContext, +) { + workspace + .register_action(about) + .register_action(|_, _: &Minimize, cx| { + cx.minimize_window(); + }) + .register_action(|_, _: &Zoom, cx| { + cx.zoom_window(); + }) + .register_action(|_, _: &ToggleFullScreen, cx| { + cx.toggle_fullscreen(); + }) + .register_action(|_, action: &OpenZedUrl, cx| { + OpenListener::global(cx).open_urls(vec![action.url.clone()]) + }) + .register_action(|_, action: &OpenBrowser, cx| cx.open_url(&action.url)) + .register_action(move |_, _: &zed_actions::IncreaseBufferFontSize, cx| { + theme::adjust_buffer_font_size(cx, |size| *size += px(1.0)) + }) + .register_action(|workspace, _: &workspace::Open, cx| { + workspace + .client() + .telemetry() + .report_app_event("open project".to_string()); + let paths = workspace.prompt_for_open_path( + PathPromptOptions { + files: true, + directories: true, + multiple: true, + }, + DirectoryLister::Project(workspace.project().clone()), + cx, + ); + + cx.spawn(|this, mut cx| async move { + let Some(paths) = paths.await.log_err().flatten() else { + return; + }; + + if let Some(task) = this + .update(&mut cx, |this, cx| { + if this.project().read(cx).is_local() { + this.open_workspace_for_paths(false, paths, cx) + } else { + open_new_ssh_project_from_project(this, paths, cx) + } + }) + .log_err() + { + task.await.log_err(); + } + }) + .detach() + }) + .register_action(move |_, _: &zed_actions::DecreaseBufferFontSize, cx| { + theme::adjust_buffer_font_size(cx, |size| *size -= px(1.0)) + }) + .register_action(move |_, _: &zed_actions::ResetBufferFontSize, cx| { + theme::reset_buffer_font_size(cx) + }) + .register_action(move |_, _: &zed_actions::IncreaseUiFontSize, cx| { + theme::adjust_ui_font_size(cx, |size| *size += px(1.0)) + }) + .register_action(move |_, _: &zed_actions::DecreaseUiFontSize, cx| { + theme::adjust_ui_font_size(cx, |size| *size -= px(1.0)) + }) + .register_action(move |_, _: &zed_actions::ResetUiFontSize, cx| { + theme::reset_ui_font_size(cx) + }) + .register_action(move |_, _: &zed_actions::IncreaseBufferFontSize, cx| { + theme::adjust_buffer_font_size(cx, |size| *size += px(1.0)) + }) + .register_action(move |_, _: &zed_actions::DecreaseBufferFontSize, cx| { + theme::adjust_buffer_font_size(cx, |size| *size -= px(1.0)) + }) + .register_action(move |_, _: &zed_actions::ResetBufferFontSize, cx| { + theme::reset_buffer_font_size(cx) + }) + .register_action(install_cli) + .register_action(|_, _: &install_cli::RegisterZedScheme, cx| { + cx.spawn(|workspace, mut cx| async move { + register_zed_scheme(&cx).await?; + workspace.update(&mut cx, |workspace, cx| { + struct RegisterZedScheme; + + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + format!( + "zed:// links will now open in {}.", + ReleaseChannel::global(cx).display_name() + ), + ), + cx, + ) + })?; + Ok(()) + }) + .detach_and_prompt_err("Error registering zed:// scheme", cx, |_, _| None); + }) + .register_action(|workspace, _: &OpenLog, cx| { + open_log_file(workspace, cx); + }) + .register_action(|workspace, _: &zed_actions::OpenLicenses, cx| { + open_bundled_file( + workspace, + asset_str::("licenses.md"), + "Open Source License Attribution", + "Markdown", + cx, + ); + }) + .register_action( + move |workspace: &mut Workspace, + _: &zed_actions::OpenTelemetryLog, + cx: &mut ViewContext| { + open_telemetry_log_file(workspace, cx); + }, + ) + .register_action( + move |_: &mut Workspace, + _: &zed_actions::OpenKeymap, + cx: &mut ViewContext| { + open_settings_file( + paths::keymap_file(), + || settings::initial_keymap_content().as_ref().into(), + cx, + ); + }, + ) + .register_action( + move |_: &mut Workspace, _: &OpenSettings, cx: &mut ViewContext| { + open_settings_file( + paths::settings_file(), + || settings::initial_user_settings_content().as_ref().into(), + cx, + ); + }, + ) + .register_action( + |_: &mut Workspace, _: &OpenAccountSettings, cx: &mut ViewContext| { + cx.open_url(&zed_urls::account_url(cx)); + }, + ) + .register_action( + move |_: &mut Workspace, _: &OpenTasks, cx: &mut ViewContext| { + open_settings_file( + paths::tasks_file(), + || settings::initial_tasks_content().as_ref().into(), + cx, + ); + }, + ) + .register_action(open_project_settings_file) + .register_action(open_project_tasks_file) + .register_action( + move |workspace: &mut Workspace, + _: &zed_actions::OpenDefaultKeymap, + cx: &mut ViewContext| { + open_bundled_file( + workspace, + settings::default_keymap(), + "Default Key Bindings", + "JSON", + cx, + ); + }, + ) + .register_action( + move |workspace: &mut Workspace, + _: &OpenDefaultSettings, + cx: &mut ViewContext| { + open_bundled_file( + workspace, + settings::default_settings(), + "Default Settings", + "JSON", + cx, + ); + }, + ) + .register_action( + |workspace: &mut Workspace, + _: &project_panel::ToggleFocus, + cx: &mut ViewContext| { + workspace.toggle_panel_focus::(cx); + }, + ) + .register_action( + |workspace: &mut Workspace, + _: &outline_panel::ToggleFocus, + cx: &mut ViewContext| { + workspace.toggle_panel_focus::(cx); + }, + ) + .register_action( + |workspace: &mut Workspace, + _: &collab_ui::collab_panel::ToggleFocus, + cx: &mut ViewContext| { + workspace.toggle_panel_focus::(cx); + }, + ) + .register_action( + |workspace: &mut Workspace, + _: &collab_ui::chat_panel::ToggleFocus, + cx: &mut ViewContext| { + workspace.toggle_panel_focus::(cx); + }, + ) + .register_action( + |workspace: &mut Workspace, + _: &collab_ui::notification_panel::ToggleFocus, + cx: &mut ViewContext| { + workspace + .toggle_panel_focus::(cx); + }, + ) + .register_action( + |workspace: &mut Workspace, + _: &terminal_panel::ToggleFocus, + cx: &mut ViewContext| { + workspace.toggle_panel_focus::(cx); + }, + ) + .register_action({ + let app_state = Arc::downgrade(&app_state); + move |_, _: &NewWindow, cx| { + if let Some(app_state) = app_state.upgrade() { + open_new(Default::default(), app_state, cx, |workspace, cx| { + Editor::new_file(workspace, &Default::default(), cx) + }) + .detach(); + } + } + }) + .register_action({ + let app_state = Arc::downgrade(&app_state); + move |_, _: &NewFile, cx| { + if let Some(app_state) = app_state.upgrade() { + open_new(Default::default(), app_state, cx, |workspace, cx| { + Editor::new_file(workspace, &Default::default(), cx) + }) + .detach(); + } + } + }); + if workspace.project().read(cx).is_via_ssh() { + workspace.register_action({ + move |workspace, _: &OpenServerSettings, cx| { + let open_server_settings = workspace + .project() + .update(cx, |project, cx| project.open_server_settings(cx)); + + cx.spawn(|workspace, mut cx| async move { + let buffer = open_server_settings.await?; + + workspace + .update(&mut cx, |workspace, cx| { + workspace.open_path( + buffer + .read(cx) + .project_path(cx) + .expect("Settings file must have a location"), + None, + true, + cx, + ) + })? + .await?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + }); + } +} + fn initialize_pane(workspace: &Workspace, pane: &View, cx: &mut ViewContext) { pane.update(cx, |pane, cx| { pane.toolbar().update(cx, |toolbar, cx| { @@ -638,7 +744,7 @@ fn initialize_pane(workspace: &Workspace, pane: &View, cx: &mut ViewContex }); } -fn about(_: &mut Workspace, _: &zed_actions::About, cx: &mut gpui::ViewContext) { +fn about(_: &mut Workspace, _: &zed_actions::About, cx: &mut ViewContext) { let release_channel = ReleaseChannel::global(cx).display_name(); let version = env!("CARGO_PKG_VERSION"); let message = format!("{release_channel} {version}"); @@ -656,6 +762,45 @@ fn test_panic(_: &TestPanic, _: &mut AppContext) { panic!("Ran the TestPanic action") } +fn install_cli(_: &mut Workspace, _: &install_cli::Install, cx: &mut ViewContext) { + const LINUX_PROMPT_DETAIL: &str = "If you installed Zed from our official release add ~/.local/bin to your PATH.\n\nIf you installed Zed from a different source like your package manager, then you may need to create an alias/symlink manually.\n\nDepending on your package manager, the CLI might be named zeditor, zedit, zed-editor or something else."; + + cx.spawn(|workspace, mut cx| async move { + if cfg!(any(target_os = "linux", target_os = "freebsd")) { + let prompt = cx.prompt( + PromptLevel::Warning, + "CLI should already be installed", + Some(LINUX_PROMPT_DETAIL), + &["Ok"], + ); + cx.background_executor().spawn(prompt).detach(); + return Ok(()); + } + let path = install_cli::install_cli(cx.deref()) + .await + .context("error creating CLI symlink")?; + + workspace.update(&mut cx, |workspace, cx| { + struct InstalledZedCli; + + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + format!( + "Installed `zed` to {}. You can launch {} from your terminal.", + path.to_string_lossy(), + ReleaseChannel::global(cx).display_name() + ), + ), + cx, + ) + })?; + register_zed_scheme(&cx).await.log_err(); + Ok(()) + }) + .detach_and_prompt_err("Error installing zed cli", cx, |_, _| None); +} + fn quit(_: &Quit, cx: &mut AppContext) { let should_confirm = WorkspaceSettings::get_global(cx).confirm_quit; cx.spawn(|mut cx| async move { @@ -823,8 +968,13 @@ pub fn handle_keymap_file_changes( }) .detach(); - cx.on_keyboard_layout_change(move |_| { - keyboard_layout_tx.unbounded_send(()).ok(); + let mut current_mapping = settings::get_key_equivalents(cx.keyboard_layout()); + cx.on_keyboard_layout_change(move |cx| { + let next_mapping = settings::get_key_equivalents(cx.keyboard_layout()); + if next_mapping != current_mapping { + current_mapping = next_mapping; + keyboard_layout_tx.unbounded_send(()).ok(); + } }) .detach(); @@ -3161,12 +3311,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#" - { - "base_keymap": "Atom" - } - "# - .into(), + &r#"{"base_keymap": "Atom"}"#.into(), Default::default(), ) .await @@ -3176,16 +3321,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#" - [ - { - "bindings": { - "backspace": "test1::A" - } - } - ] - "# - .into(), + &r#"[{"bindings": {"backspace": "test1::A"}}]"#.into(), Default::default(), ) .await @@ -3228,16 +3364,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#" - [ - { - "bindings": { - "backspace": "test1::B" - } - } - ] - "# - .into(), + &r#"[{"bindings": {"backspace": "test1::B"}}]"#.into(), Default::default(), ) .await @@ -3257,12 +3384,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#" - { - "base_keymap": "JetBrains" - } - "# - .into(), + &r#"{"base_keymap": "JetBrains"}"#.into(), Default::default(), ) .await @@ -3289,24 +3411,20 @@ mod tests { // From the Atom keymap use workspace::ActivatePreviousPane; // From the JetBrains keymap - use pane::ActivatePrevItem; + use diagnostics::Deploy; + workspace .update(cx, |workspace, _| { - workspace - .register_action(|_, _: &A, _| {}) - .register_action(|_, _: &B, _| {}); + workspace.register_action(|_, _: &A, _cx| {}); + workspace.register_action(|_, _: &B, _cx| {}); + workspace.register_action(|_, _: &Deploy, _cx| {}); }) .unwrap(); app_state .fs .save( "/settings.json".as_ref(), - &r#" - { - "base_keymap": "Atom" - } - "# - .into(), + &r#"{"base_keymap": "Atom"}"#.into(), Default::default(), ) .await @@ -3315,16 +3433,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#" - [ - { - "bindings": { - "backspace": "test2::A" - } - } - ] - "# - .into(), + &r#"[{"bindings": {"backspace": "test2::A"}}]"#.into(), Default::default(), ) .await @@ -3362,16 +3471,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#" - [ - { - "bindings": { - "backspace": null - } - } - ] - "# - .into(), + &r#"[{"bindings": {"backspace": null}}]"#.into(), Default::default(), ) .await @@ -3391,12 +3491,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#" - { - "base_keymap": "JetBrains" - } - "# - .into(), + &r#"{"base_keymap": "JetBrains"}"#.into(), Default::default(), ) .await @@ -3404,12 +3499,7 @@ mod tests { cx.background_executor.run_until_parked(); - assert_key_bindings_for( - workspace.into(), - cx, - vec![("[", &ActivatePrevItem)], - line!(), - ); + assert_key_bindings_for(workspace.into(), cx, vec![("6", &Deploy)], line!()); } #[gpui::test] @@ -3478,6 +3568,7 @@ mod tests { app_state.languages.add(markdown_language()); + vim_mode_setting::init(cx); theme::init(theme::LoadThemes::JustBase, cx); audio::init((), cx); channel::init(&app_state.client, app_state.user_store.clone(), cx); @@ -3490,6 +3581,7 @@ mod tests { language::init(cx); editor::init(cx); collab_ui::init(&app_state, cx); + git_ui::init(cx); project_panel::init((), cx); outline_panel::init((), cx); terminal_view::init(cx); @@ -3498,7 +3590,8 @@ mod tests { app_state.client.http_client().clone(), cx, ); - language_model::init( + language_model::init(cx); + language_models::init( app_state.user_store.clone(), app_state.client.clone(), app_state.fs.clone(), @@ -3506,11 +3599,7 @@ mod tests { ); let prompt_builder = assistant::init(app_state.fs.clone(), app_state.client.clone(), false, cx); - repl::init( - app_state.fs.clone(), - app_state.client.telemetry().clone(), - cx, - ); + repl::init(app_state.fs.clone(), cx); repl::notebook::init(cx); tasks_ui::init(cx); initialize_workspace(app_state.clone(), prompt_builder, cx); diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index 5c01724ba7..b3817f5578 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -23,7 +23,10 @@ pub fn app_menus() -> Vec { zed_actions::OpenDefaultKeymap, ), MenuItem::action("Open Project Settings", super::OpenProjectSettings), - MenuItem::action("Select Theme...", theme_selector::Toggle::default()), + MenuItem::action( + "Select Theme...", + zed_actions::theme_selector::Toggle::default(), + ), ], }), MenuItem::separator(), @@ -32,11 +35,14 @@ pub fn app_menus() -> Vec { items: vec![], }), MenuItem::separator(), - MenuItem::action("Extensions", extensions_ui::Extensions), + MenuItem::action("Extensions", zed_actions::Extensions), MenuItem::action("Install CLI", install_cli::Install), MenuItem::separator(), + #[cfg(target_os = "macos")] MenuItem::action("Hide Zed", super::Hide), + #[cfg(target_os = "macos")] MenuItem::action("Hide Others", super::HideOthers), + #[cfg(target_os = "macos")] MenuItem::action("Show All", super::ShowAll), MenuItem::action("Quit", Quit), ], @@ -50,7 +56,7 @@ pub fn app_menus() -> Vec { MenuItem::action("Open…", workspace::Open), MenuItem::action( "Open Recent...", - recent_projects::OpenRecent { + zed_actions::OpenRecent { create_new_window: true, }, ), @@ -146,11 +152,14 @@ pub fn app_menus() -> Vec { MenuItem::action("Back", workspace::GoBack), MenuItem::action("Forward", workspace::GoForward), MenuItem::separator(), - MenuItem::action("Command Palette...", command_palette::Toggle), + MenuItem::action("Command Palette...", zed_actions::command_palette::Toggle), MenuItem::separator(), MenuItem::action("Go to File...", workspace::ToggleFileFinder::default()), // MenuItem::action("Go to Symbol in Project", project_symbols::Toggle), - MenuItem::action("Go to Symbol in Editor...", editor::actions::ToggleOutline), + MenuItem::action( + "Go to Symbol in Editor...", + zed_actions::outline::ToggleOutline, + ), MenuItem::action("Go to Line/Column...", editor::actions::ToggleGoToLine), MenuItem::separator(), MenuItem::action("Go to Definition", editor::actions::GoToDefinition), @@ -176,7 +185,7 @@ pub fn app_menus() -> Vec { MenuItem::action("View Telemetry", zed_actions::OpenTelemetryLog), MenuItem::action("View Dependency Licenses", zed_actions::OpenLicenses), MenuItem::action("Show Welcome", workspace::Welcome), - MenuItem::action("Give Feedback...", feedback::GiveFeedback), + MenuItem::action("Give Feedback...", zed_actions::feedback::GiveFeedback), MenuItem::separator(), MenuItem::action( "Documentation", diff --git a/crates/zed/src/zed/inline_completion_registry.rs b/crates/zed/src/zed/inline_completion_registry.rs index aa0707d851..a2a59dd45c 100644 --- a/crates/zed/src/zed/inline_completion_registry.rs +++ b/crates/zed/src/zed/inline_completion_registry.rs @@ -1,19 +1,20 @@ use std::{cell::RefCell, rc::Rc, sync::Arc}; -use client::telemetry::Telemetry; +use client::Client; use collections::HashMap; use copilot::{Copilot, CopilotCompletionProvider}; use editor::{Editor, EditorMode}; +use feature_flags::{FeatureFlagAppExt, ZetaFeatureFlag}; use gpui::{AnyWindowHandle, AppContext, Context, ViewContext, WeakView}; -use language::language_settings::all_language_settings; +use language::language_settings::{all_language_settings, InlineCompletionProvider}; use settings::SettingsStore; use supermaven::{Supermaven, SupermavenCompletionProvider}; -pub fn init(telemetry: Arc, cx: &mut AppContext) { +pub fn init(client: Arc, cx: &mut AppContext) { let editors: Rc, AnyWindowHandle>>> = Rc::default(); cx.observe_new_views({ let editors = editors.clone(); - let telemetry = telemetry.clone(); + let client = client.clone(); move |editor: &mut Editor, cx: &mut ViewContext| { if editor.mode() != EditorMode::Full { return; @@ -34,7 +35,7 @@ pub fn init(telemetry: Arc, cx: &mut AppContext) { .borrow_mut() .insert(editor_handle, cx.window_handle()); let provider = all_language_settings(None, cx).inline_completions.provider; - assign_inline_completion_provider(editor, provider, &telemetry, cx); + assign_inline_completion_provider(editor, provider, &client, cx); } }) .detach(); @@ -43,25 +44,61 @@ pub fn init(telemetry: Arc, cx: &mut AppContext) { for (editor, window) in editors.borrow().iter() { _ = window.update(cx, |_window, cx| { _ = editor.update(cx, |editor, cx| { - assign_inline_completion_provider(editor, provider, &telemetry, cx); + assign_inline_completion_provider(editor, provider, &client, cx); }) }); } - cx.observe_global::(move |cx| { - let new_provider = all_language_settings(None, cx).inline_completions.provider; - if new_provider != provider { - provider = new_provider; - for (editor, window) in editors.borrow().iter() { - _ = window.update(cx, |_window, cx| { - _ = editor.update(cx, |editor, cx| { - assign_inline_completion_provider(editor, provider, &telemetry, cx); - }) - }); + if cx.has_flag::() { + cx.on_action(clear_zeta_edit_history); + } + + cx.observe_flag::({ + let editors = editors.clone(); + let client = client.clone(); + move |active, cx| { + let provider = all_language_settings(None, cx).inline_completions.provider; + assign_inline_completion_providers(&editors, provider, &client, cx); + if active && !cx.is_action_available(&zeta::ClearHistory) { + cx.on_action(clear_zeta_edit_history); } } }) .detach(); + + cx.observe_global::({ + let editors = editors.clone(); + let client = client.clone(); + move |cx| { + let new_provider = all_language_settings(None, cx).inline_completions.provider; + if new_provider != provider { + provider = new_provider; + assign_inline_completion_providers(&editors, provider, &client, cx) + } + } + }) + .detach(); +} + +fn clear_zeta_edit_history(_: &zeta::ClearHistory, cx: &mut AppContext) { + if let Some(zeta) = zeta::Zeta::global(cx) { + zeta.update(cx, |zeta, _| zeta.clear_history()); + } +} + +fn assign_inline_completion_providers( + editors: &Rc, AnyWindowHandle>>>, + provider: InlineCompletionProvider, + client: &Arc, + cx: &mut AppContext, +) { + for (editor, window) in editors.borrow().iter() { + _ = window.update(cx, |_window, cx| { + _ = editor.update(cx, |editor, cx| { + assign_inline_completion_provider(editor, provider, &client, cx); + }) + }); + } } fn register_backward_compatible_actions(editor: &mut Editor, cx: &ViewContext) { @@ -103,7 +140,7 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &ViewContext, + client: &Arc, cx: &mut ViewContext, ) { match provider { @@ -117,17 +154,27 @@ fn assign_inline_completion_provider( }); } } - let provider = cx.new_model(|_| { - CopilotCompletionProvider::new(copilot).with_telemetry(telemetry.clone()) - }); + let provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); editor.set_inline_completion_provider(Some(provider), cx); } } language::language_settings::InlineCompletionProvider::Supermaven => { if let Some(supermaven) = Supermaven::global(cx) { - let provider = cx.new_model(|_| { - SupermavenCompletionProvider::new(supermaven).with_telemetry(telemetry.clone()) - }); + let provider = cx.new_model(|_| SupermavenCompletionProvider::new(supermaven)); + editor.set_inline_completion_provider(Some(provider), cx); + } + } + language::language_settings::InlineCompletionProvider::Zeta => { + if cx.has_flag::() || cfg!(debug_assertions) { + let zeta = zeta::Zeta::register(client.clone(), cx); + if let Some(buffer) = editor.buffer().read(cx).as_singleton() { + if buffer.read(cx).file().is_some() { + zeta.update(cx, |zeta, cx| { + zeta.register_buffer(&buffer, cx); + }); + } + } + let provider = cx.new_model(|_| zeta::ZetaInlineCompletionProvider::new(zeta)); editor.set_inline_completion_provider(Some(provider), cx); } } diff --git a/crates/zed/src/zed/linux_prompts.rs b/crates/zed/src/zed/linux_prompts.rs index 1961a5f9cd..aa262a11b9 100644 --- a/crates/zed/src/zed/linux_prompts.rs +++ b/crates/zed/src/zed/linux_prompts.rs @@ -1,13 +1,15 @@ use gpui::{ div, AppContext, EventEmitter, FocusHandle, FocusableView, FontWeight, InteractiveElement, - IntoElement, ParentElement, PromptHandle, PromptLevel, PromptResponse, Render, - RenderablePromptHandle, Styled, ViewContext, VisualContext, WindowContext, + IntoElement, ParentElement, PromptHandle, PromptLevel, PromptResponse, Refineable, Render, + RenderablePromptHandle, Styled, TextStyleRefinement, View, ViewContext, VisualContext, + WindowContext, }; +use markdown::{Markdown, MarkdownStyle}; use settings::Settings; use theme::ThemeSettings; use ui::{ - h_flex, v_flex, ButtonCommon, ButtonStyle, Clickable, ElevationIndex, FluentBuilder, LabelSize, - TintColor, + h_flex, v_flex, ActiveTheme, ButtonCommon, ButtonStyle, Clickable, ElevationIndex, + FluentBuilder, LabelSize, TintColor, }; use workspace::ui::StyledExt; @@ -28,10 +30,27 @@ pub fn fallback_prompt_renderer( |cx| FallbackPromptRenderer { _level: level, message: message.to_string(), - detail: detail.map(ToString::to_string), actions: actions.iter().map(ToString::to_string).collect(), focus: cx.focus_handle(), active_action_id: 0, + detail: detail.filter(|text| !text.is_empty()).map(|text| { + cx.new_view(|cx| { + let settings = ThemeSettings::get_global(cx); + let mut base_text_style = cx.text_style(); + base_text_style.refine(&TextStyleRefinement { + font_family: Some(settings.ui_font.family.clone()), + font_size: Some(settings.ui_font_size.into()), + color: Some(ui::Color::Muted.color(cx)), + ..Default::default() + }); + let markdown_style = MarkdownStyle { + base_text_style, + selection_background_color: { cx.theme().players().local().selection }, + ..Default::default() + }; + Markdown::new(text.to_string(), markdown_style, None, None, cx) + }) + }), } }); @@ -42,10 +61,10 @@ pub fn fallback_prompt_renderer( pub struct FallbackPromptRenderer { _level: PromptLevel, message: String, - detail: Option, actions: Vec, focus: FocusHandle, active_action_id: usize, + detail: Option>, } impl FallbackPromptRenderer { @@ -111,13 +130,11 @@ impl Render for FallbackPromptRenderer { .child(self.message.clone()) .text_color(ui::Color::Default.color(cx)), ) - .children(self.detail.clone().map(|detail| { - div() - .w_full() - .text_xs() - .text_color(ui::Color::Muted.color(cx)) - .child(detail) - })) + .children( + self.detail + .clone() + .map(|detail| div().w_full().text_xs().child(detail)), + ) .child(h_flex().justify_end().gap_2().children( self.actions.iter().enumerate().rev().map(|(ix, action)| { ui::Button::new(ix, action.clone()) diff --git a/crates/quick_action_bar/src/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs similarity index 60% rename from crates/quick_action_bar/src/quick_action_bar.rs rename to crates/zed/src/zed/quick_action_bar.rs index 7849620093..48a0be9ef9 100644 --- a/crates/quick_action_bar/src/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -1,14 +1,16 @@ +mod markdown_preview; +mod repl_menu; + use assistant::assistant_settings::AssistantSettings; use assistant::AssistantPanel; use editor::actions::{ AddSelectionAbove, AddSelectionBelow, DuplicateLineDown, GoToDiagnostic, GoToHunk, GoToPrevDiagnostic, GoToPrevHunk, MoveLineDown, MoveLineUp, SelectAll, SelectLargerSyntaxNode, - SelectNext, SelectSmallerSyntaxNode, ToggleGoToLine, ToggleOutline, + SelectNext, SelectSmallerSyntaxNode, ToggleGoToLine, }; use editor::{Editor, EditorSettings}; - use gpui::{ - Action, AnchorCorner, ClickEvent, ElementId, EventEmitter, FocusHandle, FocusableView, + Action, ClickEvent, Corner, ElementId, EventEmitter, FocusHandle, FocusableView, InteractiveElement, ParentElement, Render, Styled, Subscription, View, ViewContext, WeakView, }; use search::{buffer_search, BufferSearchBar}; @@ -17,13 +19,11 @@ use ui::{ prelude::*, ButtonStyle, ContextMenu, IconButton, IconButtonShape, IconName, IconSize, PopoverMenu, PopoverMenuHandle, Tooltip, }; +use vim_mode_setting::VimModeSetting; use workspace::{ item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, }; -use zed_actions::InlineAssist; - -mod repl_menu; -mod toggle_markdown_preview; +use zed_actions::{outline::ToggleOutline, InlineAssist}; pub struct QuickActionBar { _inlay_hints_enabled_subscription: Option, @@ -92,21 +92,27 @@ impl Render for QuickActionBar { inlay_hints_enabled, supports_inlay_hints, git_blame_inline_enabled, + show_git_blame_gutter, auto_signature_help_enabled, + inline_completions_enabled, ) = { let editor = editor.read(cx); let selection_menu_enabled = editor.selection_menu_enabled(cx); let inlay_hints_enabled = editor.inlay_hints_enabled(); let supports_inlay_hints = editor.supports_inlay_hints(cx); let git_blame_inline_enabled = editor.git_blame_inline_enabled(); + let show_git_blame_gutter = editor.show_git_blame_gutter(); let auto_signature_help_enabled = editor.auto_signature_help_enabled(cx); + let inline_completions_enabled = editor.inline_completions_enabled(cx); ( selection_menu_enabled, inlay_hints_enabled, supports_inlay_hints, git_blame_inline_enabled, + show_git_blame_gutter, auto_signature_help_enabled, + inline_completions_enabled, ) }; @@ -152,19 +158,20 @@ impl Render for QuickActionBar { let editor_selections_dropdown = selection_menu_enabled.then(|| { let focus = editor.focus_handle(cx); + PopoverMenu::new("editor-selections-dropdown") .trigger( IconButton::new("toggle_editor_selections_icon", IconName::CursorIBeam) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) - .selected(self.toggle_selections_handle.is_deployed()) + .toggle_state(self.toggle_selections_handle.is_deployed()) .when(!self.toggle_selections_handle.is_deployed(), |this| { this.tooltip(|cx| Tooltip::text("Selection Controls", cx)) }), ) .with_handle(self.toggle_selections_handle.clone()) - .anchor(AnchorCorner::TopRight) + .anchor(Corner::TopRight) .menu(move |cx| { let focus = focus.clone(); let menu = ContextMenu::build(cx, move |menu, _| { @@ -199,34 +206,78 @@ impl Render for QuickActionBar { }); let editor = editor.downgrade(); - let editor_settings_dropdown = PopoverMenu::new("editor-settings") - .trigger( - IconButton::new("toggle_editor_settings_icon", IconName::Sliders) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .style(ButtonStyle::Subtle) - .selected(self.toggle_settings_handle.is_deployed()) - .when(!self.toggle_settings_handle.is_deployed(), |this| { - this.tooltip(|cx| Tooltip::text("Editor Controls", cx)) - }), - ) - .anchor(AnchorCorner::TopRight) - .with_handle(self.toggle_settings_handle.clone()) - .menu(move |cx| { - let menu = ContextMenu::build(cx, |mut menu, _| { - if supports_inlay_hints { + let editor_settings_dropdown = { + let vim_mode_enabled = VimModeSetting::get_global(cx).0; + + PopoverMenu::new("editor-settings") + .trigger( + IconButton::new("toggle_editor_settings_icon", IconName::Sliders) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .toggle_state(self.toggle_settings_handle.is_deployed()) + .when(!self.toggle_settings_handle.is_deployed(), |this| { + this.tooltip(|cx| Tooltip::text("Editor Controls", cx)) + }), + ) + .anchor(Corner::TopRight) + .with_handle(self.toggle_settings_handle.clone()) + .menu(move |cx| { + let menu = ContextMenu::build(cx, |mut menu, _| { + if supports_inlay_hints { + menu = menu.toggleable_entry( + "Inlay Hints", + inlay_hints_enabled, + IconPosition::Start, + Some(editor::actions::ToggleInlayHints.boxed_clone()), + { + let editor = editor.clone(); + move |cx| { + editor + .update(cx, |editor, cx| { + editor.toggle_inlay_hints( + &editor::actions::ToggleInlayHints, + cx, + ); + }) + .ok(); + } + }, + ); + } + menu = menu.toggleable_entry( - "Inlay Hints", - inlay_hints_enabled, + "Selection Menu", + selection_menu_enabled, IconPosition::Start, - Some(editor::actions::ToggleInlayHints.boxed_clone()), + Some(editor::actions::ToggleSelectionMenu.boxed_clone()), { let editor = editor.clone(); move |cx| { editor .update(cx, |editor, cx| { - editor.toggle_inlay_hints( - &editor::actions::ToggleInlayHints, + editor.toggle_selection_menu( + &editor::actions::ToggleSelectionMenu, + cx, + ) + }) + .ok(); + } + }, + ); + + menu = menu.toggleable_entry( + "Auto Signature Help", + auto_signature_help_enabled, + IconPosition::Start, + Some(editor::actions::ToggleAutoSignatureHelp.boxed_clone()), + { + let editor = editor.clone(); + move |cx| { + editor + .update(cx, |editor, cx| { + editor.toggle_auto_signature_help_menu( + &editor::actions::ToggleAutoSignatureHelp, cx, ); }) @@ -234,76 +285,94 @@ impl Render for QuickActionBar { } }, ); - } - menu = menu.toggleable_entry( - "Inline Git Blame", - git_blame_inline_enabled, - IconPosition::Start, - Some(editor::actions::ToggleGitBlameInline.boxed_clone()), - { - let editor = editor.clone(); - move |cx| { - editor - .update(cx, |editor, cx| { - editor.toggle_git_blame_inline( - &editor::actions::ToggleGitBlameInline, - cx, - ) - }) - .ok(); - } - }, - ); + menu = menu.toggleable_entry( + "Inline Completions", + inline_completions_enabled, + IconPosition::Start, + Some(editor::actions::ToggleInlineCompletions.boxed_clone()), + { + let editor = editor.clone(); + move |cx| { + editor + .update(cx, |editor, cx| { + editor.toggle_inline_completions( + &editor::actions::ToggleInlineCompletions, + cx, + ); + }) + .ok(); + } + }, + ); - menu = menu.toggleable_entry( - "Selection Menu", - selection_menu_enabled, - IconPosition::Start, - Some(editor::actions::ToggleSelectionMenu.boxed_clone()), - { - let editor = editor.clone(); - move |cx| { - editor - .update(cx, |editor, cx| { - editor.toggle_selection_menu( - &editor::actions::ToggleSelectionMenu, - cx, - ) - }) - .ok(); - } - }, - ); + menu = menu.separator(); - menu = menu.toggleable_entry( - "Auto Signature Help", - auto_signature_help_enabled, - IconPosition::Start, - Some(editor::actions::ToggleAutoSignatureHelp.boxed_clone()), - { - let editor = editor.clone(); - move |cx| { - editor - .update(cx, |editor, cx| { - editor.toggle_auto_signature_help_menu( - &editor::actions::ToggleAutoSignatureHelp, - cx, - ); - }) - .ok(); - } - }, - ); + menu = menu.toggleable_entry( + "Inline Git Blame", + git_blame_inline_enabled, + IconPosition::Start, + Some(editor::actions::ToggleGitBlameInline.boxed_clone()), + { + let editor = editor.clone(); + move |cx| { + editor + .update(cx, |editor, cx| { + editor.toggle_git_blame_inline( + &editor::actions::ToggleGitBlameInline, + cx, + ) + }) + .ok(); + } + }, + ); - menu - }); - Some(menu) - }); + menu = menu.toggleable_entry( + "Column Git Blame", + show_git_blame_gutter, + IconPosition::Start, + Some(editor::actions::ToggleGitBlame.boxed_clone()), + { + let editor = editor.clone(); + move |cx| { + editor + .update(cx, |editor, cx| { + editor.toggle_git_blame( + &editor::actions::ToggleGitBlame, + cx, + ) + }) + .ok(); + } + }, + ); + + menu = menu.separator(); + + menu = menu.toggleable_entry( + "Vim Mode", + vim_mode_enabled, + IconPosition::Start, + None, + { + move |cx| { + let new_value = !vim_mode_enabled; + VimModeSetting::override_global(VimModeSetting(new_value), cx); + cx.refresh(); + } + }, + ); + + menu + }); + Some(menu) + }) + }; h_flex() .id("quick action bar") - .gap(DynamicSpacing::Base06.rems(cx)) + .gap(DynamicSpacing::Base04.rems(cx)) .children(self.render_repl_menu(cx)) .children(self.render_toggle_markdown_preview(self.workspace.clone(), cx)) .children(search_button) @@ -361,7 +430,7 @@ impl RenderOnce for QuickActionBarButton { .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) - .selected(self.toggled) + .toggle_state(self.toggled) .tooltip(move |cx| { Tooltip::for_action_in(tooltip.clone(), &*action, &self.focus_handle, cx) }) diff --git a/crates/quick_action_bar/src/toggle_markdown_preview.rs b/crates/zed/src/zed/quick_action_bar/markdown_preview.rs similarity index 98% rename from crates/quick_action_bar/src/toggle_markdown_preview.rs rename to crates/zed/src/zed/quick_action_bar/markdown_preview.rs index 527da3a568..5162cb0644 100644 --- a/crates/quick_action_bar/src/toggle_markdown_preview.rs +++ b/crates/zed/src/zed/quick_action_bar/markdown_preview.rs @@ -5,7 +5,7 @@ use markdown_preview::{ use ui::{prelude::*, text_for_keystroke, IconButtonShape, Tooltip}; use workspace::Workspace; -use crate::QuickActionBar; +use super::QuickActionBar; impl QuickActionBar { pub fn render_toggle_markdown_preview( diff --git a/crates/quick_action_bar/src/repl_menu.rs b/crates/zed/src/zed/quick_action_bar/repl_menu.rs similarity index 98% rename from crates/quick_action_bar/src/repl_menu.rs rename to crates/zed/src/zed/quick_action_bar/repl_menu.rs index d2649d4180..35a1f15771 100644 --- a/crates/quick_action_bar/src/repl_menu.rs +++ b/crates/zed/src/zed/quick_action_bar/repl_menu.rs @@ -1,5 +1,6 @@ use std::time::Duration; +use gpui::ElementId; use gpui::{percentage, Animation, AnimationExt, AnyElement, Transformation, View}; use picker::Picker; use repl::{ @@ -11,11 +12,9 @@ use ui::{ prelude::*, ButtonLike, ContextMenu, IconWithIndicator, Indicator, IntoElement, PopoverMenu, PopoverMenuHandle, Tooltip, }; - -use gpui::ElementId; use util::ResultExt; -use crate::QuickActionBar; +use super::QuickActionBar; const ZED_REPL_DOCUMENTATION: &str = "https://zed.dev/docs/repl"; @@ -310,6 +309,7 @@ impl QuickActionBar { worktree_id, ButtonLike::new("kernel-selector") .style(ButtonStyle::Subtle) + .size(ButtonSize::Compact) .child( h_flex() .w_full() @@ -354,12 +354,14 @@ impl QuickActionBar { let tooltip: SharedString = SharedString::from(format!("Setup Zed REPL for {}", language)); Some( h_flex() + .gap(DynamicSpacing::Base06.rems(cx)) .child(self.render_kernel_selector(cx)) .child( IconButton::new("toggle_repl_icon", IconName::ReplNeutral) - .size(ButtonSize::Compact) - .icon_color(Color::Muted) .style(ButtonStyle::Subtle) + .shape(ui::IconButtonShape::Square) + .icon_size(ui::IconSize::Small) + .icon_color(Color::Muted) .tooltip(move |cx| Tooltip::text(tooltip.clone(), cx)) .on_click(|_, cx| { cx.open_url(&format!("{}#installation", ZED_REPL_DOCUMENTATION)) @@ -386,7 +388,7 @@ fn session_state(session: View, cx: &WindowContext) -> ReplMenuState { indicator: None, kernel_name: kernel_name.clone(), kernel_language: kernel_language.clone(), - // todo!(): Technically not shutdown, but indeterminate + // TODO: Technically not shutdown, but indeterminate status: KernelStatus::Shutdown, // current_delta: Duration::default(), } @@ -402,7 +404,7 @@ fn session_state(session: View, cx: &WindowContext) -> ReplMenuState { status: session.kernel.status(), ..fill_fields() }, - Kernel::RunningKernel(kernel) => match &kernel.execution_state { + Kernel::RunningKernel(kernel) => match &kernel.execution_state() { ExecutionState::Idle => ReplMenuState { tooltip: format!("Run code on {} ({})", kernel_name, kernel_language).into(), indicator: Some(Indicator::dot().color(Color::Success)), diff --git a/crates/zed_actions/Cargo.toml b/crates/zed_actions/Cargo.toml index ee279cde65..1bf26dc4f0 100644 --- a/crates/zed_actions/Cargo.toml +++ b/crates/zed_actions/Cargo.toml @@ -10,4 +10,5 @@ workspace = true [dependencies] gpui.workspace = true +schemars.workspace = true serde.workspace = true diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 7ea5c923c2..6b2691cf76 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -1,5 +1,6 @@ use gpui::{actions, impl_actions}; -use serde::Deserialize; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; // If the zed binary doesn't use anything in this crate, it will be optimized away // and the actions won't initialize. So we just provide an empty initialization function @@ -32,6 +33,7 @@ actions!( Quit, OpenKeymap, About, + Extensions, OpenLicenses, OpenTelemetryLog, DecreaseBufferFontSize, @@ -43,9 +45,119 @@ actions!( ] ); +pub mod branches { + use gpui::actions; + + actions!(branches, [OpenRecent]); +} + +pub mod command_palette { + use gpui::actions; + + actions!(command_palette, [Toggle]); +} + +pub mod feedback { + use gpui::actions; + + actions!(feedback, [GiveFeedback]); +} + +pub mod theme_selector { + use gpui::impl_actions; + use serde::Deserialize; + + #[derive(PartialEq, Clone, Default, Debug, Deserialize)] + pub struct Toggle { + /// A list of theme names to filter the theme selector down to. + pub themes_filter: Option>, + } + + impl_actions!(theme_selector, [Toggle]); +} + #[derive(Clone, Default, Deserialize, PartialEq)] pub struct InlineAssist { pub prompt: Option, } impl_actions!(assistant, [InlineAssist]); + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct OpenRecent { + #[serde(default)] + pub create_new_window: bool, +} +gpui::impl_actions!(projects, [OpenRecent]); +gpui::actions!(projects, [OpenRemote]); + +/// Where to spawn the task in the UI. +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum RevealTarget { + /// In the central pane group, "main" editor area. + Center, + /// In the terminal dock, "regular" terminal items' place. + #[default] + Dock, +} + +/// Spawn a task with name or open tasks modal +#[derive(Debug, PartialEq, Clone, Deserialize)] +#[serde(untagged)] +pub enum Spawn { + /// Spawns a task by the name given. + ByName { + task_name: String, + #[serde(default)] + reveal_target: Option, + }, + /// Spawns a task via modal's selection. + ViaModal { + /// Selected task's `reveal_target` property override. + #[serde(default)] + reveal_target: Option, + }, +} + +impl Spawn { + pub fn modal() -> Self { + Self::ViaModal { + reveal_target: None, + } + } +} + +/// Rerun last task +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct Rerun { + /// Controls whether the task context is reevaluated prior to execution of a task. + /// If it is not, environment variables such as ZED_COLUMN, ZED_FILE are gonna be the same as in the last execution of a task + /// If it is, these variables will be updated to reflect current state of editor at the time task::Rerun is executed. + /// default: false + #[serde(default)] + pub reevaluate_context: bool, + /// Overrides `allow_concurrent_runs` property of the task being reran. + /// Default: null + #[serde(default)] + pub allow_concurrent_runs: Option, + /// Overrides `use_new_terminal` property of the task being reran. + /// Default: null + #[serde(default)] + pub use_new_terminal: Option, + + /// If present, rerun the task with this ID, otherwise rerun the last task. + pub task_id: Option, +} + +impl_actions!(task, [Spawn, Rerun]); + +pub mod outline { + use std::sync::OnceLock; + + use gpui::{action_as, AnyView, WindowContext}; + + action_as!(outline, ToggleOutline as Toggle); + /// A pointer to outline::toggle function, exposed here to sewer the breadcrumbs <-> outline dependency. + pub static TOGGLE_OUTLINE: OnceLock)> = OnceLock::new(); +} diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml new file mode 100644 index 0000000000..5db6fddbbb --- /dev/null +++ b/crates/zeta/Cargo.toml @@ -0,0 +1,64 @@ +[package] +name = "zeta" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" +exclude = ["fixtures"] + +[lints] +workspace = true + +[lib] +path = "src/zeta.rs" +doctest = false + +[features] +test-support = [] + +[dependencies] +anyhow.workspace = true +arrayvec.workspace = true +client.workspace = true +collections.workspace = true +editor.workspace = true +futures.workspace = true +gpui.workspace = true +http_client.workspace = true +indoc.workspace = true +inline_completion.workspace = true +language.workspace = true +language_models.workspace = true +log.workspace = true +menu.workspace = true +rpc.workspace = true +serde_json.workspace = true +settings.workspace = true +similar.workspace = true +telemetry.workspace = true +telemetry_events.workspace = true +theme.workspace = true +ui.workspace = true +uuid.workspace = true +workspace.workspace = true + +[dev-dependencies] +collections = { workspace = true, features = ["test-support"] } +client = { workspace = true, features = ["test-support"] } +clock = { workspace = true, features = ["test-support"] } +ctor.workspace = true +editor = { workspace = true, features = ["test-support"] } +env_logger.workspace = true +gpui = { workspace = true, features = ["test-support"] } +http_client = { workspace = true, features = ["test-support"] } +indoc.workspace = true +language = { workspace = true, features = ["test-support"] } +reqwest_client = { workspace = true, features = ["test-support"] } +rpc = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +theme = { workspace = true, features = ["test-support"] } +tree-sitter-go.workspace = true +tree-sitter-rust.workspace = true +workspace = { workspace = true, features = ["test-support"] } +worktree = { workspace = true, features = ["test-support"] } +call = { workspace = true, features = ["test-support"] } diff --git a/crates/zeta/LICENSE-GPL b/crates/zeta/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/zeta/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/zeta/src/rate_completion_modal.rs b/crates/zeta/src/rate_completion_modal.rs new file mode 100644 index 0000000000..99e1febd9b --- /dev/null +++ b/crates/zeta/src/rate_completion_modal.rs @@ -0,0 +1,637 @@ +use crate::{InlineCompletion, InlineCompletionRating, Zeta}; +use editor::Editor; +use gpui::{ + actions, prelude::*, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, + HighlightStyle, Model, StyledText, TextStyle, View, ViewContext, +}; +use language::{language_settings, OffsetRangeExt}; +use settings::Settings; +use std::time::Duration; +use theme::ThemeSettings; +use ui::{prelude::*, KeyBinding, List, ListItem, ListItemSpacing, Tooltip}; +use workspace::{ModalView, Workspace}; + +actions!( + zeta, + [ + RateCompletions, + ThumbsUp, + ThumbsDown, + ThumbsUpActiveCompletion, + ThumbsDownActiveCompletion, + NextEdit, + PreviousEdit, + FocusCompletions, + PreviewCompletion, + ] +); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(move |workspace: &mut Workspace, _cx| { + workspace.register_action(|workspace, _: &RateCompletions, cx| { + RateCompletionModal::toggle(workspace, cx); + }); + }) + .detach(); +} + +pub struct RateCompletionModal { + zeta: Model, + active_completion: Option, + selected_index: usize, + focus_handle: FocusHandle, + _subscription: gpui::Subscription, +} + +struct ActiveCompletion { + completion: InlineCompletion, + feedback_editor: View, +} + +impl RateCompletionModal { + pub fn toggle(workspace: &mut Workspace, cx: &mut ViewContext) { + if let Some(zeta) = Zeta::global(cx) { + workspace.toggle_modal(cx, |cx| RateCompletionModal::new(zeta, cx)); + } + } + + pub fn new(zeta: Model, cx: &mut ViewContext) -> Self { + let subscription = cx.observe(&zeta, |_, _, cx| cx.notify()); + Self { + zeta, + selected_index: 0, + focus_handle: cx.focus_handle(), + active_completion: None, + _subscription: subscription, + } + } + + fn dismiss(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { + cx.emit(DismissEvent); + } + + fn select_next(&mut self, _: &menu::SelectNext, cx: &mut ViewContext) { + self.selected_index += 1; + self.selected_index = usize::min( + self.selected_index, + self.zeta.read(cx).recent_completions().count(), + ); + cx.notify(); + } + + fn select_prev(&mut self, _: &menu::SelectPrev, cx: &mut ViewContext) { + self.selected_index = self.selected_index.saturating_sub(1); + cx.notify(); + } + + fn select_next_edit(&mut self, _: &NextEdit, cx: &mut ViewContext) { + let next_index = self + .zeta + .read(cx) + .recent_completions() + .skip(self.selected_index) + .enumerate() + .skip(1) // Skip straight to the next item + .find(|(_, completion)| !completion.edits.is_empty()) + .map(|(ix, _)| ix + self.selected_index); + + if let Some(next_index) = next_index { + self.selected_index = next_index; + cx.notify(); + } + } + + fn select_prev_edit(&mut self, _: &PreviousEdit, cx: &mut ViewContext) { + let zeta = self.zeta.read(cx); + let completions_len = zeta.recent_completions_len(); + + let prev_index = self + .zeta + .read(cx) + .recent_completions() + .rev() + .skip((completions_len - 1) - self.selected_index) + .enumerate() + .skip(1) // Skip straight to the previous item + .find(|(_, completion)| !completion.edits.is_empty()) + .map(|(ix, _)| self.selected_index - ix); + + if let Some(prev_index) = prev_index { + self.selected_index = prev_index; + cx.notify(); + } + cx.notify(); + } + + fn select_first(&mut self, _: &menu::SelectFirst, cx: &mut ViewContext) { + self.selected_index = 0; + cx.notify(); + } + + fn select_last(&mut self, _: &menu::SelectLast, cx: &mut ViewContext) { + self.selected_index = self.zeta.read(cx).recent_completions_len() - 1; + cx.notify(); + } + + fn thumbs_up(&mut self, _: &ThumbsUp, cx: &mut ViewContext) { + self.zeta.update(cx, |zeta, cx| { + let completion = zeta + .recent_completions() + .skip(self.selected_index) + .next() + .cloned(); + + if let Some(completion) = completion { + zeta.rate_completion( + &completion, + InlineCompletionRating::Positive, + "".to_string(), + cx, + ); + } + }); + self.select_next_edit(&Default::default(), cx); + cx.notify(); + } + + fn thumbs_up_active(&mut self, _: &ThumbsUpActiveCompletion, cx: &mut ViewContext) { + self.zeta.update(cx, |zeta, cx| { + if let Some(active) = &self.active_completion { + zeta.rate_completion( + &active.completion, + InlineCompletionRating::Positive, + active.feedback_editor.read(cx).text(cx), + cx, + ); + } + }); + + let current_completion = self + .active_completion + .as_ref() + .map(|completion| completion.completion.clone()); + self.select_completion(current_completion, false, cx); + self.select_next_edit(&Default::default(), cx); + self.confirm(&Default::default(), cx); + + cx.notify(); + } + + fn thumbs_down_active(&mut self, _: &ThumbsDownActiveCompletion, cx: &mut ViewContext) { + if let Some(active) = &self.active_completion { + if active.feedback_editor.read(cx).text(cx).is_empty() { + return; + } + + self.zeta.update(cx, |zeta, cx| { + zeta.rate_completion( + &active.completion, + InlineCompletionRating::Negative, + active.feedback_editor.read(cx).text(cx), + cx, + ); + }); + } + + let current_completion = self + .active_completion + .as_ref() + .map(|completion| completion.completion.clone()); + self.select_completion(current_completion, false, cx); + self.select_next_edit(&Default::default(), cx); + self.confirm(&Default::default(), cx); + + cx.notify(); + } + + fn focus_completions(&mut self, _: &FocusCompletions, cx: &mut ViewContext) { + cx.focus_self(); + cx.notify(); + } + + fn preview_completion(&mut self, _: &PreviewCompletion, cx: &mut ViewContext) { + let completion = self + .zeta + .read(cx) + .recent_completions() + .skip(self.selected_index) + .take(1) + .next() + .cloned(); + + self.select_completion(completion, false, cx); + } + + fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { + let completion = self + .zeta + .read(cx) + .recent_completions() + .skip(self.selected_index) + .take(1) + .next() + .cloned(); + + self.select_completion(completion, true, cx); + } + + pub fn select_completion( + &mut self, + completion: Option, + focus: bool, + cx: &mut ViewContext, + ) { + // Avoid resetting completion rating if it's already selected. + if let Some(completion) = completion.as_ref() { + self.selected_index = self + .zeta + .read(cx) + .recent_completions() + .enumerate() + .find(|(_, completion_b)| completion.id == completion_b.id) + .map(|(ix, _)| ix) + .unwrap_or(self.selected_index); + cx.notify(); + + if let Some(prev_completion) = self.active_completion.as_ref() { + if completion.id == prev_completion.completion.id { + if focus { + cx.focus_view(&prev_completion.feedback_editor); + } + return; + } + } + } + + self.active_completion = completion.map(|completion| ActiveCompletion { + completion, + feedback_editor: cx.new_view(|cx| { + let mut editor = Editor::multi_line(cx); + editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); + editor.set_show_line_numbers(false, cx); + editor.set_show_scrollbars(false, cx); + editor.set_show_git_diff_gutter(false, cx); + editor.set_show_code_actions(false, cx); + editor.set_show_runnables(false, cx); + editor.set_show_wrap_guides(false, cx); + editor.set_show_indent_guides(false, cx); + editor.set_show_inline_completions(Some(false), cx); + editor.set_placeholder_text("Add your feedback…", cx); + if focus { + cx.focus_self(); + } + editor + }), + }); + cx.notify(); + } + + fn render_active_completion(&mut self, cx: &mut ViewContext) -> Option { + let active_completion = self.active_completion.as_ref()?; + let completion_id = active_completion.completion.id; + + let mut diff = active_completion + .completion + .snapshot + .text_for_range(active_completion.completion.excerpt_range.clone()) + .collect::(); + + let mut delta = 0; + let mut diff_highlights = Vec::new(); + for (old_range, new_text) in active_completion.completion.edits.iter() { + let old_range = old_range.to_offset(&active_completion.completion.snapshot); + let old_start_in_text = + old_range.start - active_completion.completion.excerpt_range.start + delta; + let old_end_in_text = + old_range.end - active_completion.completion.excerpt_range.start + delta; + if old_start_in_text < old_end_in_text { + diff_highlights.push(( + old_start_in_text..old_end_in_text, + HighlightStyle { + background_color: Some(cx.theme().status().deleted_background), + strikethrough: Some(gpui::StrikethroughStyle { + thickness: px(1.), + color: Some(cx.theme().colors().text_muted), + }), + ..Default::default() + }, + )); + } + + if !new_text.is_empty() { + diff.insert_str(old_end_in_text, new_text); + diff_highlights.push(( + old_end_in_text..old_end_in_text + new_text.len(), + HighlightStyle { + background_color: Some(cx.theme().status().created_background), + ..Default::default() + }, + )); + delta += new_text.len(); + } + } + + let settings = ThemeSettings::get_global(cx).clone(); + let text_style = TextStyle { + color: cx.theme().colors().editor_foreground, + font_size: settings.buffer_font_size(cx).into(), + font_family: settings.buffer_font.family, + font_features: settings.buffer_font.features, + font_fallbacks: settings.buffer_font.fallbacks, + line_height: relative(settings.buffer_line_height.value()), + font_weight: settings.buffer_font.weight, + font_style: settings.buffer_font.style, + ..Default::default() + }; + + let rated = self.zeta.read(cx).is_completion_rated(completion_id); + let was_shown = self.zeta.read(cx).was_completion_shown(completion_id); + let feedback_empty = active_completion + .feedback_editor + .read(cx) + .text(cx) + .is_empty(); + + let border_color = cx.theme().colors().border; + let bg_color = cx.theme().colors().editor_background; + + let label_container = || h_flex().pl_1().gap_1p5(); + + Some( + v_flex() + .size_full() + .overflow_hidden() + .child( + div() + .id("diff") + .py_4() + .px_6() + .size_full() + .bg(bg_color) + .overflow_scroll() + .child(StyledText::new(diff).with_highlights(&text_style, diff_highlights)), + ) + .when_some((!rated).then(|| ()), |this, _| { + this.child( + h_flex() + .p_2() + .gap_2() + .border_y_1() + .border_color(border_color) + + .child( + Icon::new(IconName::Info) + .size(IconSize::XSmall) + .color(Color::Muted) + ) + .child( + div() + .w_full() + .pr_2() + .flex_wrap() + .child( + Label::new("Ensure you explain why this completion is negative or positive. In case it's negative, report what you expected instead.") + .size(LabelSize::Small) + .color(Color::Muted) + ) + ) + ) + }) + .when_some((!rated).then(|| ()), |this, _| { + this.child( + div() + .h_40() + .pt_1() + .bg(bg_color) + .child(active_completion.feedback_editor.clone()) + ) + }) + .child( + h_flex() + .p_1() + .h_8() + .max_h_8() + .border_t_1() + .border_color(border_color) + .max_w_full() + .justify_between() + .children(if rated { + Some( + label_container() + .child( + Icon::new(IconName::Check) + .size(IconSize::Small) + .color(Color::Success), + ) + .child(Label::new("Rated completion.").color(Color::Muted)), + ) + } else if active_completion.completion.edits.is_empty() { + Some( + label_container() + .child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child(Label::new("No edits produced.").color(Color::Muted)), + ) + } else if !was_shown { + Some( + label_container() + .child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child(Label::new("Completion wasn't shown because another valid one was already on screen.")), + ) + } else { + Some(label_container()) + }) + .child( + h_flex() + .gap_1() + .child( + Button::new("bad", "Bad Completion") + .key_binding(KeyBinding::for_action_in( + &ThumbsDown, + &self.focus_handle(cx), + cx, + )) + .style(ButtonStyle::Filled) + .icon(IconName::ThumbsDown) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .disabled(rated || feedback_empty) + .when(feedback_empty, |this| { + this.tooltip(|cx| { + Tooltip::text("Explain what's bad about it before reporting it", cx) + }) + }) + .on_click(cx.listener(move |this, _, cx| { + this.thumbs_down_active( + &ThumbsDownActiveCompletion, + cx, + ); + })), + ) + .child( + Button::new("good", "Good Completion") + .key_binding(KeyBinding::for_action_in( + &ThumbsUp, + &self.focus_handle(cx), + cx, + )) + .style(ButtonStyle::Filled) + .icon(IconName::ThumbsUp) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .disabled(rated) + .on_click(cx.listener(move |this, _, cx| { + this.thumbs_up_active(&ThumbsUpActiveCompletion, cx); + })), + ), + ), + ), + ) + } +} + +impl Render for RateCompletionModal { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let border_color = cx.theme().colors().border; + + h_flex() + .key_context("RateCompletionModal") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::dismiss)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::select_prev)) + .on_action(cx.listener(Self::select_prev_edit)) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_next_edit)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::thumbs_up)) + .on_action(cx.listener(Self::thumbs_up_active)) + .on_action(cx.listener(Self::thumbs_down_active)) + .on_action(cx.listener(Self::focus_completions)) + .on_action(cx.listener(Self::preview_completion)) + .bg(cx.theme().colors().elevated_surface_background) + .border_1() + .border_color(border_color) + .w(cx.viewport_size().width - px(320.)) + .h(cx.viewport_size().height - px(300.)) + .rounded_lg() + .shadow_lg() + .child( + v_flex() + .border_r_1() + .border_color(border_color) + .w_96() + .h_full() + .flex_shrink_0() + .overflow_hidden() + .child( + h_flex() + .px_2() + .py_1() + .justify_between() + .border_b_1() + .border_color(border_color) + .child( + Icon::new(IconName::ZedPredict) + .size(IconSize::Small) + ) + .child( + Label::new("From most recent to oldest") + .color(Color::Muted) + .size(LabelSize::Small), + ) + ) + .child( + div() + .id("completion_list") + .p_0p5() + .h_full() + .overflow_y_scroll() + .child( + List::new() + .empty_message( + div() + .p_2() + .child( + Label::new("No completions yet. Use the editor to generate some and rate them!") + .color(Color::Muted), + ) + .into_any_element(), + ) + .children(self.zeta.read(cx).recent_completions().cloned().enumerate().map( + |(index, completion)| { + let selected = + self.active_completion.as_ref().map_or(false, |selected| { + selected.completion.id == completion.id + }); + let rated = + self.zeta.read(cx).is_completion_rated(completion.id); + + ListItem::new(completion.id) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .focused(index == self.selected_index) + .toggle_state(selected) + .start_slot(if rated { + Icon::new(IconName::Check).color(Color::Success).size(IconSize::Small) + } else if completion.edits.is_empty() { + Icon::new(IconName::File).color(Color::Muted).size(IconSize::Small) + } else { + Icon::new(IconName::FileDiff).color(Color::Accent).size(IconSize::Small) + }) + .child( + v_flex() + .pl_1p5() + .child(Label::new(completion.path.to_string_lossy().to_string()).size(LabelSize::Small)) + .child(Label::new(format!("{} ago, {:.2?}", format_time_ago(completion.response_received_at.elapsed()), completion.latency())) + .color(Color::Muted) + .size(LabelSize::XSmall) + ) + ) + .on_click(cx.listener(move |this, _, cx| { + this.select_completion(Some(completion.clone()), true, cx); + })) + }, + )), + ) + ), + ) + .children(self.render_active_completion(cx)) + .on_mouse_down_out(cx.listener(|_, _, cx| cx.emit(DismissEvent))) + } +} + +impl EventEmitter for RateCompletionModal {} + +impl FocusableView for RateCompletionModal { + fn focus_handle(&self, _cx: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl ModalView for RateCompletionModal {} + +fn format_time_ago(elapsed: Duration) -> String { + let seconds = elapsed.as_secs(); + if seconds < 120 { + "1 minute".to_string() + } else if seconds < 3600 { + format!("{} minutes", seconds / 60) + } else if seconds < 7200 { + "1 hour".to_string() + } else if seconds < 86400 { + format!("{} hours", seconds / 3600) + } else if seconds < 172800 { + "1 day".to_string() + } else { + format!("{} days", seconds / 86400) + } +} diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs new file mode 100644 index 0000000000..a3c6b2928b --- /dev/null +++ b/crates/zeta/src/zeta.rs @@ -0,0 +1,1405 @@ +mod rate_completion_modal; + +pub use rate_completion_modal::*; + +use anyhow::{anyhow, Context as _, Result}; +use arrayvec::ArrayVec; +use client::Client; +use collections::{HashMap, HashSet, VecDeque}; +use futures::AsyncReadExt; +use gpui::{ + actions, AppContext, AsyncAppContext, Context, EntityId, Global, Model, ModelContext, + Subscription, Task, +}; +use http_client::{HttpClient, Method}; +use language::{ + language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot, OffsetRangeExt, + Point, ToOffset, ToPoint, +}; +use language_models::LlmApiToken; +use rpc::{PredictEditsParams, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME}; +use std::{ + borrow::Cow, + cmp, + fmt::Write, + future::Future, + mem, + ops::Range, + path::Path, + sync::Arc, + time::{Duration, Instant}, +}; +use telemetry_events::InlineCompletionRating; +use uuid::Uuid; + +const CURSOR_MARKER: &'static str = "<|user_cursor_is_here|>"; +const START_OF_FILE_MARKER: &'static str = "<|start_of_file|>"; +const EDITABLE_REGION_START_MARKER: &'static str = "<|editable_region_start|>"; +const EDITABLE_REGION_END_MARKER: &'static str = "<|editable_region_end|>"; +const BUFFER_CHANGE_GROUPING_INTERVAL: Duration = Duration::from_secs(1); + +actions!(zeta, [ClearHistory]); + +#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)] +pub struct InlineCompletionId(Uuid); + +impl From for gpui::ElementId { + fn from(value: InlineCompletionId) -> Self { + gpui::ElementId::Uuid(value.0) + } +} + +impl std::fmt::Display for InlineCompletionId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl InlineCompletionId { + fn new() -> Self { + Self(Uuid::new_v4()) + } +} + +#[derive(Clone)] +struct ZetaGlobal(Model); + +impl Global for ZetaGlobal {} + +#[derive(Clone)] +pub struct InlineCompletion { + id: InlineCompletionId, + path: Arc, + excerpt_range: Range, + edits: Arc<[(Range, String)]>, + snapshot: BufferSnapshot, + input_outline: Arc, + input_events: Arc, + input_excerpt: Arc, + output_excerpt: Arc, + request_sent_at: Instant, + response_received_at: Instant, +} + +impl InlineCompletion { + fn latency(&self) -> Duration { + self.response_received_at + .duration_since(self.request_sent_at) + } + + fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option, String)>> { + let mut edits = Vec::new(); + + let mut user_edits = new_snapshot + .edits_since::(&self.snapshot.version) + .peekable(); + for (model_old_range, model_new_text) in self.edits.iter() { + let model_offset_range = model_old_range.to_offset(&self.snapshot); + while let Some(next_user_edit) = user_edits.peek() { + if next_user_edit.old.end < model_offset_range.start { + user_edits.next(); + } else { + break; + } + } + + if let Some(user_edit) = user_edits.peek() { + if user_edit.old.start > model_offset_range.end { + edits.push((model_old_range.clone(), model_new_text.clone())); + } else if user_edit.old == model_offset_range { + let user_new_text = new_snapshot + .text_for_range(user_edit.new.clone()) + .collect::(); + + if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) { + if !model_suffix.is_empty() { + edits.push(( + new_snapshot.anchor_after(user_edit.new.end) + ..new_snapshot.anchor_before(user_edit.new.end), + model_suffix.into(), + )); + } + + user_edits.next(); + } else { + return None; + } + } else { + return None; + } + } else { + edits.push((model_old_range.clone(), model_new_text.clone())); + } + } + + if edits.is_empty() { + None + } else { + Some(edits) + } + } +} + +impl std::fmt::Debug for InlineCompletion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("InlineCompletion") + .field("id", &self.id) + .field("path", &self.path) + .field("edits", &self.edits) + .finish_non_exhaustive() + } +} + +pub struct Zeta { + client: Arc, + events: VecDeque, + registered_buffers: HashMap, + recent_completions: VecDeque, + rated_completions: HashSet, + shown_completions: HashSet, + llm_token: LlmApiToken, + _llm_token_subscription: Subscription, +} + +impl Zeta { + pub fn global(cx: &mut AppContext) -> Option> { + cx.try_global::().map(|global| global.0.clone()) + } + + pub fn register(client: Arc, cx: &mut AppContext) -> Model { + Self::global(cx).unwrap_or_else(|| { + let model = cx.new_model(|cx| Self::new(client, cx)); + cx.set_global(ZetaGlobal(model.clone())); + model + }) + } + + pub fn clear_history(&mut self) { + self.events.clear(); + } + + fn new(client: Arc, cx: &mut ModelContext) -> Self { + let refresh_llm_token_listener = language_models::RefreshLlmTokenListener::global(cx); + + Self { + client, + events: VecDeque::new(), + recent_completions: VecDeque::new(), + rated_completions: HashSet::default(), + shown_completions: HashSet::default(), + registered_buffers: HashMap::default(), + llm_token: LlmApiToken::default(), + _llm_token_subscription: cx.subscribe( + &refresh_llm_token_listener, + |this, _listener, _event, cx| { + let client = this.client.clone(); + let llm_token = this.llm_token.clone(); + cx.spawn(|_this, _cx| async move { + llm_token.refresh(&client).await?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + }, + ), + } + } + + fn push_event(&mut self, event: Event) { + const MAX_EVENT_COUNT: usize = 20; + + if let Some(Event::BufferChange { + new_snapshot: last_new_snapshot, + timestamp: last_timestamp, + .. + }) = self.events.back_mut() + { + // Coalesce edits for the same buffer when they happen one after the other. + let Event::BufferChange { + old_snapshot, + new_snapshot, + timestamp, + } = &event; + + if timestamp.duration_since(*last_timestamp) <= BUFFER_CHANGE_GROUPING_INTERVAL + && old_snapshot.remote_id() == last_new_snapshot.remote_id() + && old_snapshot.version == last_new_snapshot.version + { + *last_new_snapshot = new_snapshot.clone(); + *last_timestamp = *timestamp; + return; + } + } + + self.events.push_back(event); + if self.events.len() > MAX_EVENT_COUNT { + self.events.pop_front(); + } + } + + pub fn register_buffer(&mut self, buffer: &Model, cx: &mut ModelContext) { + let buffer_id = buffer.entity_id(); + let weak_buffer = buffer.downgrade(); + + if let std::collections::hash_map::Entry::Vacant(entry) = + self.registered_buffers.entry(buffer_id) + { + let snapshot = buffer.read(cx).snapshot(); + + entry.insert(RegisteredBuffer { + snapshot, + _subscriptions: [ + cx.subscribe(buffer, move |this, buffer, event, cx| { + this.handle_buffer_event(buffer, event, cx); + }), + cx.observe_release(buffer, move |this, _buffer, _cx| { + this.registered_buffers.remove(&weak_buffer.entity_id()); + }), + ], + }); + }; + } + + fn handle_buffer_event( + &mut self, + buffer: Model, + event: &language::BufferEvent, + cx: &mut ModelContext, + ) { + match event { + language::BufferEvent::Edited => { + self.report_changes_for_buffer(&buffer, cx); + } + _ => {} + } + } + + pub fn request_completion_impl( + &mut self, + buffer: &Model, + position: language::Anchor, + cx: &mut ModelContext, + perform_predict_edits: F, + ) -> Task> + where + F: FnOnce(Arc, LlmApiToken, PredictEditsParams) -> R + 'static, + R: Future> + Send + 'static, + { + let snapshot = self.report_changes_for_buffer(buffer, cx); + let point = position.to_point(&snapshot); + let offset = point.to_offset(&snapshot); + let excerpt_range = excerpt_range_for_position(point, &snapshot); + let events = self.events.clone(); + let path = snapshot + .file() + .map(|f| f.path().clone()) + .unwrap_or_else(|| Arc::from(Path::new("untitled"))); + + let client = self.client.clone(); + let llm_token = self.llm_token.clone(); + + cx.spawn(|this, mut cx| async move { + let request_sent_at = Instant::now(); + + let (input_events, input_excerpt, input_outline) = cx + .background_executor() + .spawn({ + let snapshot = snapshot.clone(); + let excerpt_range = excerpt_range.clone(); + async move { + let mut input_events = String::new(); + for event in events { + if !input_events.is_empty() { + input_events.push('\n'); + input_events.push('\n'); + } + input_events.push_str(&event.to_prompt()); + } + + let input_excerpt = prompt_for_excerpt(&snapshot, &excerpt_range, offset); + let input_outline = prompt_for_outline(&snapshot); + + (input_events, input_excerpt, input_outline) + } + }) + .await; + + log::debug!("Events:\n{}\nExcerpt:\n{}", input_events, input_excerpt); + + let body = PredictEditsParams { + input_events: input_events.clone(), + input_excerpt: input_excerpt.clone(), + outline: Some(input_outline.clone()), + }; + + let response = perform_predict_edits(client, llm_token, body).await?; + + let output_excerpt = response.output_excerpt; + log::debug!("completion response: {}", output_excerpt); + + let inline_completion = Self::process_completion_response( + output_excerpt, + &snapshot, + excerpt_range, + path, + input_outline, + input_events, + input_excerpt, + request_sent_at, + &cx, + ) + .await?; + + this.update(&mut cx, |this, cx| { + this.recent_completions + .push_front(inline_completion.clone()); + if this.recent_completions.len() > 50 { + let completion = this.recent_completions.pop_back().unwrap(); + this.shown_completions.remove(&completion.id); + this.rated_completions.remove(&completion.id); + } + cx.notify(); + })?; + + Ok(inline_completion) + }) + } + + // Generates several example completions of various states to fill the Zeta completion modal + #[cfg(any(test, feature = "test-support"))] + pub fn fill_with_fake_completions(&mut self, cx: &mut ModelContext) -> Task<()> { + let test_buffer_text = indoc::indoc! {r#"a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line + And maybe a short line + + Then a few lines + + and then another + "#}; + + let buffer = cx.new_model(|cx| Buffer::local(test_buffer_text, cx)); + let position = buffer.read(cx).anchor_before(Point::new(1, 0)); + + let completion_tasks = vec![ + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!("{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +[here's an edit] +And maybe a short line +Then a few lines +and then another +{EDITABLE_REGION_END_MARKER} + ", ), + }, + cx, + ), + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +And maybe a short line +[and another edit] +Then a few lines +and then another +{EDITABLE_REGION_END_MARKER} + "#), + }, + cx, + ), + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +And maybe a short line + +Then a few lines + +and then another +{EDITABLE_REGION_END_MARKER} + "#), + }, + cx, + ), + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +And maybe a short line + +Then a few lines + +and then another +{EDITABLE_REGION_END_MARKER} + "#), + }, + cx, + ), + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +And maybe a short line +Then a few lines +[a third completion] +and then another +{EDITABLE_REGION_END_MARKER} + "#), + }, + cx, + ), + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +And maybe a short line +and then another +[fourth completion example] +{EDITABLE_REGION_END_MARKER} + "#), + }, + cx, + ), + self.fake_completion( + &buffer, + position, + PredictEditsResponse { + output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} +a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line +And maybe a short line +Then a few lines +and then another +[fifth and final completion] +{EDITABLE_REGION_END_MARKER} + "#), + }, + cx, + ), + ]; + + cx.spawn(|zeta, mut cx| async move { + for task in completion_tasks { + task.await.unwrap(); + } + + zeta.update(&mut cx, |zeta, _cx| { + zeta.recent_completions.get_mut(2).unwrap().edits = Arc::new([]); + zeta.recent_completions.get_mut(3).unwrap().edits = Arc::new([]); + }) + .ok(); + }) + } + + #[cfg(any(test, feature = "test-support"))] + pub fn fake_completion( + &mut self, + buffer: &Model, + position: language::Anchor, + response: PredictEditsResponse, + cx: &mut ModelContext, + ) -> Task> { + use std::future::ready; + + self.request_completion_impl(buffer, position, cx, |_, _, _| ready(Ok(response))) + } + + pub fn request_completion( + &mut self, + buffer: &Model, + position: language::Anchor, + cx: &mut ModelContext, + ) -> Task> { + self.request_completion_impl(buffer, position, cx, Self::perform_predict_edits) + } + + fn perform_predict_edits( + client: Arc, + llm_token: LlmApiToken, + body: PredictEditsParams, + ) -> impl Future> { + async move { + let http_client = client.http_client(); + let mut token = llm_token.acquire(&client).await?; + let mut did_retry = false; + + loop { + let request_builder = http_client::Request::builder(); + let request = request_builder + .method(Method::POST) + .uri( + http_client + .build_zed_llm_url("/predict_edits", &[])? + .as_ref(), + ) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", token)) + .body(serde_json::to_string(&body)?.into())?; + + let mut response = http_client.send(request).await?; + + if response.status().is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + return Ok(serde_json::from_str(&body)?); + } else if !did_retry + && response + .headers() + .get(EXPIRED_LLM_TOKEN_HEADER_NAME) + .is_some() + { + did_retry = true; + token = llm_token.refresh(&client).await?; + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + return Err(anyhow!( + "error predicting edits.\nStatus: {:?}\nBody: {}", + response.status(), + body + )); + } + } + } + } + + #[allow(clippy::too_many_arguments)] + fn process_completion_response( + output_excerpt: String, + snapshot: &BufferSnapshot, + excerpt_range: Range, + path: Arc, + input_outline: String, + input_events: String, + input_excerpt: String, + request_sent_at: Instant, + cx: &AsyncAppContext, + ) -> Task> { + let snapshot = snapshot.clone(); + cx.background_executor().spawn(async move { + let content = output_excerpt.replace(CURSOR_MARKER, ""); + + let start_markers = content + .match_indices(EDITABLE_REGION_START_MARKER) + .collect::>(); + anyhow::ensure!( + start_markers.len() == 1, + "expected exactly one start marker, found {}", + start_markers.len() + ); + + let end_markers = content + .match_indices(EDITABLE_REGION_END_MARKER) + .collect::>(); + anyhow::ensure!( + end_markers.len() == 1, + "expected exactly one end marker, found {}", + end_markers.len() + ); + + let sof_markers = content + .match_indices(START_OF_FILE_MARKER) + .collect::>(); + anyhow::ensure!( + sof_markers.len() <= 1, + "expected at most one start-of-file marker, found {}", + sof_markers.len() + ); + + let codefence_start = start_markers[0].0; + let content = &content[codefence_start..]; + + let newline_ix = content.find('\n').context("could not find newline")?; + let content = &content[newline_ix + 1..]; + + let codefence_end = content + .rfind(&format!("\n{EDITABLE_REGION_END_MARKER}")) + .context("could not find end marker")?; + let new_text = &content[..codefence_end]; + + let old_text = snapshot + .text_for_range(excerpt_range.clone()) + .collect::(); + + let edits = Self::compute_edits(old_text, new_text, excerpt_range.start, &snapshot); + + Ok(InlineCompletion { + id: InlineCompletionId::new(), + path, + excerpt_range, + edits: edits.into(), + snapshot: snapshot.clone(), + input_outline: input_outline.into(), + input_events: input_events.into(), + input_excerpt: input_excerpt.into(), + output_excerpt: output_excerpt.into(), + request_sent_at, + response_received_at: Instant::now(), + }) + }) + } + + pub fn compute_edits( + old_text: String, + new_text: &str, + offset: usize, + snapshot: &BufferSnapshot, + ) -> Vec<(Range, String)> { + let diff = similar::TextDiff::from_words(old_text.as_str(), new_text); + + let mut edits: Vec<(Range, String)> = Vec::new(); + let mut old_start = offset; + for change in diff.iter_all_changes() { + let value = change.value(); + match change.tag() { + similar::ChangeTag::Equal => { + old_start += value.len(); + } + similar::ChangeTag::Delete => { + let old_end = old_start + value.len(); + if let Some((last_old_range, _)) = edits.last_mut() { + if last_old_range.end == old_start { + last_old_range.end = old_end; + } else { + edits.push((old_start..old_end, String::new())); + } + } else { + edits.push((old_start..old_end, String::new())); + } + old_start = old_end; + } + similar::ChangeTag::Insert => { + if let Some((last_old_range, last_new_text)) = edits.last_mut() { + if last_old_range.end == old_start { + last_new_text.push_str(value); + } else { + edits.push((old_start..old_start, value.into())); + } + } else { + edits.push((old_start..old_start, value.into())); + } + } + } + } + + edits + .into_iter() + .map(|(mut old_range, new_text)| { + let prefix_len = common_prefix( + snapshot.chars_for_range(old_range.clone()), + new_text.chars(), + ); + old_range.start += prefix_len; + let suffix_len = common_prefix( + snapshot.reversed_chars_for_range(old_range.clone()), + new_text[prefix_len..].chars().rev(), + ); + old_range.end = old_range.end.saturating_sub(suffix_len); + + let new_text = new_text[prefix_len..new_text.len() - suffix_len].to_string(); + ( + snapshot.anchor_after(old_range.start)..snapshot.anchor_before(old_range.end), + new_text, + ) + }) + .collect() + } + + pub fn is_completion_rated(&self, completion_id: InlineCompletionId) -> bool { + self.rated_completions.contains(&completion_id) + } + + pub fn was_completion_shown(&self, completion_id: InlineCompletionId) -> bool { + self.shown_completions.contains(&completion_id) + } + + pub fn completion_shown(&mut self, completion_id: InlineCompletionId) { + self.shown_completions.insert(completion_id); + } + + pub fn rate_completion( + &mut self, + completion: &InlineCompletion, + rating: InlineCompletionRating, + feedback: String, + cx: &mut ModelContext, + ) { + telemetry::event!( + "Inline Completion Rated", + rating, + input_events = completion.input_events, + input_excerpt = completion.input_excerpt, + input_outline = completion.input_outline, + output_excerpt = completion.output_excerpt, + feedback + ); + self.client.telemetry().flush_events(); + cx.notify(); + } + + pub fn recent_completions(&self) -> impl DoubleEndedIterator { + self.recent_completions.iter() + } + + pub fn recent_completions_len(&self) -> usize { + self.recent_completions.len() + } + + fn report_changes_for_buffer( + &mut self, + buffer: &Model, + cx: &mut ModelContext, + ) -> BufferSnapshot { + self.register_buffer(buffer, cx); + + let registered_buffer = self + .registered_buffers + .get_mut(&buffer.entity_id()) + .unwrap(); + let new_snapshot = buffer.read(cx).snapshot(); + + if new_snapshot.version != registered_buffer.snapshot.version { + let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone()); + self.push_event(Event::BufferChange { + old_snapshot, + new_snapshot: new_snapshot.clone(), + timestamp: Instant::now(), + }); + } + + new_snapshot + } +} + +fn common_prefix, T2: Iterator>(a: T1, b: T2) -> usize { + a.zip(b) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a.len_utf8()) + .sum() +} + +fn prompt_for_outline(snapshot: &BufferSnapshot) -> String { + let mut input_outline = String::new(); + + writeln!( + input_outline, + "```{}", + snapshot + .file() + .map_or(Cow::Borrowed("untitled"), |file| file + .path() + .to_string_lossy()) + ) + .unwrap(); + + if let Some(outline) = snapshot.outline(None) { + let guess_size = outline.items.len() * 15; + input_outline.reserve(guess_size); + for item in outline.items.iter() { + let spacing = " ".repeat(item.depth); + writeln!(input_outline, "{}{}", spacing, item.text).unwrap(); + } + } + + writeln!(input_outline, "```").unwrap(); + + input_outline +} + +fn prompt_for_excerpt( + snapshot: &BufferSnapshot, + excerpt_range: &Range, + offset: usize, +) -> String { + let mut prompt_excerpt = String::new(); + writeln!( + prompt_excerpt, + "```{}", + snapshot + .file() + .map_or(Cow::Borrowed("untitled"), |file| file + .path() + .to_string_lossy()) + ) + .unwrap(); + + if excerpt_range.start == 0 { + writeln!(prompt_excerpt, "{START_OF_FILE_MARKER}").unwrap(); + } + + let point_range = excerpt_range.to_point(snapshot); + if point_range.start.row > 0 && !snapshot.is_line_blank(point_range.start.row - 1) { + let extra_context_line_range = Point::new(point_range.start.row - 1, 0)..point_range.start; + for chunk in snapshot.text_for_range(extra_context_line_range) { + prompt_excerpt.push_str(chunk); + } + } + writeln!(prompt_excerpt, "{EDITABLE_REGION_START_MARKER}").unwrap(); + for chunk in snapshot.text_for_range(excerpt_range.start..offset) { + prompt_excerpt.push_str(chunk); + } + prompt_excerpt.push_str(CURSOR_MARKER); + for chunk in snapshot.text_for_range(offset..excerpt_range.end) { + prompt_excerpt.push_str(chunk); + } + write!(prompt_excerpt, "\n{EDITABLE_REGION_END_MARKER}").unwrap(); + + if point_range.end.row < snapshot.max_point().row + && !snapshot.is_line_blank(point_range.end.row + 1) + { + let extra_context_line_range = point_range.end + ..Point::new( + point_range.end.row + 1, + snapshot.line_len(point_range.end.row + 1), + ); + for chunk in snapshot.text_for_range(extra_context_line_range) { + prompt_excerpt.push_str(chunk); + } + } + + write!(prompt_excerpt, "\n```").unwrap(); + prompt_excerpt +} + +fn excerpt_range_for_position(point: Point, snapshot: &BufferSnapshot) -> Range { + const CONTEXT_LINES: u32 = 32; + + let mut context_lines_before = CONTEXT_LINES; + let mut context_lines_after = CONTEXT_LINES; + if point.row < CONTEXT_LINES { + context_lines_after += CONTEXT_LINES - point.row; + } else if point.row + CONTEXT_LINES > snapshot.max_point().row { + context_lines_before += (point.row + CONTEXT_LINES) - snapshot.max_point().row; + } + + let excerpt_start_row = point.row.saturating_sub(context_lines_before); + let excerpt_start = Point::new(excerpt_start_row, 0); + let excerpt_end_row = cmp::min(point.row + context_lines_after, snapshot.max_point().row); + let excerpt_end = Point::new(excerpt_end_row, snapshot.line_len(excerpt_end_row)); + excerpt_start.to_offset(snapshot)..excerpt_end.to_offset(snapshot) +} + +struct RegisteredBuffer { + snapshot: BufferSnapshot, + _subscriptions: [gpui::Subscription; 2], +} + +#[derive(Clone)] +enum Event { + BufferChange { + old_snapshot: BufferSnapshot, + new_snapshot: BufferSnapshot, + timestamp: Instant, + }, +} + +impl Event { + fn to_prompt(&self) -> String { + match self { + Event::BufferChange { + old_snapshot, + new_snapshot, + .. + } => { + let mut prompt = String::new(); + + let old_path = old_snapshot + .file() + .map(|f| f.path().as_ref()) + .unwrap_or(Path::new("untitled")); + let new_path = new_snapshot + .file() + .map(|f| f.path().as_ref()) + .unwrap_or(Path::new("untitled")); + if old_path != new_path { + writeln!(prompt, "User renamed {:?} to {:?}\n", old_path, new_path).unwrap(); + } + + let diff = + similar::TextDiff::from_lines(&old_snapshot.text(), &new_snapshot.text()) + .unified_diff() + .to_string(); + if !diff.is_empty() { + write!( + prompt, + "User edited {:?}:\n```diff\n{}\n```", + new_path, diff + ) + .unwrap(); + } + + prompt + } + } + } +} + +#[derive(Debug, Clone)] +struct CurrentInlineCompletion { + buffer_id: EntityId, + completion: InlineCompletion, +} + +impl CurrentInlineCompletion { + fn should_replace_completion(&self, old_completion: &Self, snapshot: &BufferSnapshot) -> bool { + if self.buffer_id != old_completion.buffer_id { + return true; + } + + let Some(old_edits) = old_completion.completion.interpolate(&snapshot) else { + return true; + }; + let Some(new_edits) = self.completion.interpolate(&snapshot) else { + return false; + }; + + if old_edits.len() == 1 && new_edits.len() == 1 { + let (old_range, old_text) = &old_edits[0]; + let (new_range, new_text) = &new_edits[0]; + new_range == old_range && new_text.starts_with(old_text) + } else { + true + } + } +} + +struct PendingCompletion { + id: usize, + _task: Task>, +} + +pub struct ZetaInlineCompletionProvider { + zeta: Model, + pending_completions: ArrayVec, + next_pending_completion_id: usize, + current_completion: Option, +} + +impl ZetaInlineCompletionProvider { + pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(8); + + pub fn new(zeta: Model) -> Self { + Self { + zeta, + pending_completions: ArrayVec::new(), + next_pending_completion_id: 0, + current_completion: None, + } + } +} + +impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvider { + fn name() -> &'static str { + "zeta" + } + + fn display_name() -> &'static str { + "Zeta" + } + + fn show_completions_in_menu() -> bool { + true + } + + fn show_completions_in_normal_mode() -> bool { + true + } + + fn is_enabled( + &self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &AppContext, + ) -> bool { + let buffer = buffer.read(cx); + let file = buffer.file(); + let language = buffer.language_at(cursor_position); + let settings = all_language_settings(file, cx); + settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx) + } + + fn is_refreshing(&self) -> bool { + !self.pending_completions.is_empty() + } + + fn refresh( + &mut self, + buffer: Model, + position: language::Anchor, + debounce: bool, + cx: &mut ModelContext, + ) { + let pending_completion_id = self.next_pending_completion_id; + self.next_pending_completion_id += 1; + + let task = cx.spawn(|this, mut cx| async move { + if debounce { + cx.background_executor().timer(Self::DEBOUNCE_TIMEOUT).await; + } + + let completion_request = this.update(&mut cx, |this, cx| { + this.zeta.update(cx, |zeta, cx| { + zeta.request_completion(&buffer, position, cx) + }) + }); + + let mut completion = None; + if let Ok(completion_request) = completion_request { + completion = Some(CurrentInlineCompletion { + buffer_id: buffer.entity_id(), + completion: completion_request.await?, + }); + } + + this.update(&mut cx, |this, cx| { + if this.pending_completions[0].id == pending_completion_id { + this.pending_completions.remove(0); + } else { + this.pending_completions.clear(); + } + + if let Some(new_completion) = completion { + if let Some(old_completion) = this.current_completion.as_ref() { + let snapshot = buffer.read(cx).snapshot(); + if new_completion.should_replace_completion(&old_completion, &snapshot) { + this.zeta.update(cx, |zeta, _cx| { + zeta.completion_shown(new_completion.completion.id) + }); + this.current_completion = Some(new_completion); + } + } else { + this.zeta.update(cx, |zeta, _cx| { + zeta.completion_shown(new_completion.completion.id) + }); + this.current_completion = Some(new_completion); + } + } else { + this.current_completion = None; + } + + cx.notify(); + }) + }); + + // We always maintain at most two pending completions. When we already + // have two, we replace the newest one. + if self.pending_completions.len() <= 1 { + self.pending_completions.push(PendingCompletion { + id: pending_completion_id, + _task: task, + }); + } else if self.pending_completions.len() == 2 { + self.pending_completions.pop(); + self.pending_completions.push(PendingCompletion { + id: pending_completion_id, + _task: task, + }); + } + } + + fn cycle( + &mut self, + _buffer: Model, + _cursor_position: language::Anchor, + _direction: inline_completion::Direction, + _cx: &mut ModelContext, + ) { + // Right now we don't support cycling. + } + + fn accept(&mut self, _cx: &mut ModelContext) { + self.pending_completions.clear(); + } + + fn discard(&mut self, _cx: &mut ModelContext) { + self.pending_completions.clear(); + self.current_completion.take(); + } + + fn suggest( + &mut self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &mut ModelContext, + ) -> Option { + let CurrentInlineCompletion { + buffer_id, + completion, + .. + } = self.current_completion.as_mut()?; + + // Invalidate previous completion if it was generated for a different buffer. + if *buffer_id != buffer.entity_id() { + self.current_completion.take(); + return None; + } + + let buffer = buffer.read(cx); + let Some(edits) = completion.interpolate(&buffer.snapshot()) else { + self.current_completion.take(); + return None; + }; + + let cursor_row = cursor_position.to_point(buffer).row; + let (closest_edit_ix, (closest_edit_range, _)) = + edits.iter().enumerate().min_by_key(|(_, (range, _))| { + let distance_from_start = cursor_row.abs_diff(range.start.to_point(buffer).row); + let distance_from_end = cursor_row.abs_diff(range.end.to_point(buffer).row); + cmp::min(distance_from_start, distance_from_end) + })?; + + let mut edit_start_ix = closest_edit_ix; + for (range, _) in edits[..edit_start_ix].iter().rev() { + let distance_from_closest_edit = + closest_edit_range.start.to_point(buffer).row - range.end.to_point(buffer).row; + if distance_from_closest_edit <= 1 { + edit_start_ix -= 1; + } else { + break; + } + } + + let mut edit_end_ix = closest_edit_ix + 1; + for (range, _) in &edits[edit_end_ix..] { + let distance_from_closest_edit = + range.start.to_point(buffer).row - closest_edit_range.end.to_point(buffer).row; + if distance_from_closest_edit <= 1 { + edit_end_ix += 1; + } else { + break; + } + } + + Some(inline_completion::InlineCompletion { + edits: edits[edit_start_ix..edit_end_ix].to_vec(), + }) + } +} + +#[cfg(test)] +mod tests { + use client::test::FakeServer; + use clock::FakeSystemClock; + use gpui::TestAppContext; + use http_client::FakeHttpClient; + use indoc::indoc; + use language_models::RefreshLlmTokenListener; + use rpc::proto; + use settings::SettingsStore; + + use super::*; + + #[gpui::test] + fn test_inline_completion_basic_interpolation(cx: &mut AppContext) { + let buffer = cx.new_model(|cx| Buffer::local("Lorem ipsum dolor", cx)); + let completion = InlineCompletion { + edits: to_completion_edits( + [(2..5, "REM".to_string()), (9..11, "".to_string())], + &buffer, + cx, + ) + .into(), + path: Path::new("").into(), + snapshot: buffer.read(cx).snapshot(), + id: InlineCompletionId::new(), + excerpt_range: 0..0, + input_outline: "".into(), + input_events: "".into(), + input_excerpt: "".into(), + output_excerpt: "".into(), + request_sent_at: Instant::now(), + response_received_at: Instant::now(), + }; + + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(2..5, "REM".to_string()), (9..11, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(2..2, "REM".to_string()), (6..8, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.undo(cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(2..5, "REM".to_string()), (9..11, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(3..3, "EM".to_string()), (7..9, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(4..4, "M".to_string()), (8..10, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(9..11, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(4..4, "M".to_string()), (8..10, "".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx)); + assert_eq!( + from_completion_edits( + &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(4..4, "M".to_string())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx)); + assert_eq!(completion.interpolate(&buffer.read(cx).snapshot()), None); + } + + #[gpui::test] + async fn test_inline_completion_end_of_buffer(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + client::init_settings(cx); + }); + + let buffer_content = "lorem\n"; + let completion_response = indoc! {" + ```animals.js + <|start_of_file|> + <|editable_region_start|> + lorem + ipsum + <|editable_region_end|> + ```"}; + + let http_client = FakeHttpClient::create(move |_| async move { + Ok(http_client::Response::builder() + .status(200) + .body( + serde_json::to_string(&PredictEditsResponse { + output_excerpt: completion_response.to_string(), + }) + .unwrap() + .into(), + ) + .unwrap()) + }); + + let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); + cx.update(|cx| { + RefreshLlmTokenListener::register(client.clone(), cx); + }); + let server = FakeServer::for_client(42, &client, cx).await; + + let zeta = cx.new_model(|cx| Zeta::new(client, cx)); + let buffer = cx.new_model(|cx| Buffer::local(buffer_content, cx)); + let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); + let completion_task = + zeta.update(cx, |zeta, cx| zeta.request_completion(&buffer, cursor, cx)); + + let token_request = server.receive::().await.unwrap(); + server.respond( + token_request.receipt(), + proto::GetLlmTokenResponse { token: "".into() }, + ); + + let completion = completion_task.await.unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.edit(completion.edits.iter().cloned(), None, cx) + }); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "lorem\nipsum" + ); + } + + fn to_completion_edits( + iterator: impl IntoIterator, String)>, + buffer: &Model, + cx: &AppContext, + ) -> Vec<(Range, String)> { + let buffer = buffer.read(cx); + iterator + .into_iter() + .map(|(range, text)| { + ( + buffer.anchor_after(range.start)..buffer.anchor_before(range.end), + text, + ) + }) + .collect() + } + + fn from_completion_edits( + editor_edits: &[(Range, String)], + buffer: &Model, + cx: &AppContext, + ) -> Vec<(Range, String)> { + let buffer = buffer.read(cx); + editor_edits + .iter() + .map(|(range, text)| { + ( + range.start.to_offset(buffer)..range.end.to_offset(buffer), + text.clone(), + ) + }) + .collect() + } + + #[ctor::ctor] + fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } + } +} diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index bc7ba52869..d807da8193 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -43,6 +43,8 @@ - [Inline Assistant](./assistant/inline-assistant.md) - [Commands](./assistant/commands.md) - [Prompts](./assistant/prompting.md) +- [Context Servers](./assistant/context-servers.md) + - [Model Context Protocol](./assistant/model-context-protocol.md) # Extensions @@ -51,7 +53,8 @@ - [Developing Extensions](./extensions/developing-extensions.md) - [Language Extensions](./extensions/languages.md) - [Theme Extensions](./extensions/themes.md) -- [Slash Commands](./extensions/slash-commands.md) +- [Slash Command Extensions](./extensions/slash-commands.md) +- [Context Server Extensions](./extensions/context-servers.md) # Language Support diff --git a/docs/src/assistant/assistant.md b/docs/src/assistant/assistant.md index ee4796ec02..94144882f0 100644 --- a/docs/src/assistant/assistant.md +++ b/docs/src/assistant/assistant.md @@ -15,3 +15,5 @@ This section covers various aspects of the Assistant: - [Using Commands](./commands.md): Explore slash commands that enhance the Assistant's capabilities and future extensibility. - [Prompting & Prompt Library](./prompting.md): Learn how to write and save prompts, how to use the Prompt Library, and how to edit prompt templates. + +- [Context Servers](./context-servers.md): Learn about context servers that enhance the Assistant's capabilities via the [Model Context Protocol](./model-context-protocol.md). diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index 2145bd9504..8e558007bf 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -192,6 +192,30 @@ The Zed Assistant comes pre-configured to use the latest version for common mode You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). OpenAI `o1` models should set `max_completion_tokens` as well to avoid incurring high reasoning token costs. Custom models will be listed in the model dropdown in the assistant panel. +### OpenAI API Compatible + +Zed supports using OpenAI compatible APIs by specifying a custom `endpoint` and `available_models` for the OpenAI provider. + +#### X.ai Grok + +Example configuration for using X.ai Grok with Zed: + +```json + "language_models": { + "openai": { + "api_url": "https://api.x.ai/v1", + "available_models": [ + { + "name": "grok-beta", + "display_name": "X.ai Grok (Beta)", + "max_tokens": 131072 + } + ], + "version": "1" + }, + } +``` + ### Advanced configuration {#advanced-configuration} #### Example Configuration diff --git a/docs/src/assistant/context-servers.md b/docs/src/assistant/context-servers.md new file mode 100644 index 0000000000..398442044c --- /dev/null +++ b/docs/src/assistant/context-servers.md @@ -0,0 +1,49 @@ +# Context Servers + +Context servers are a mechanism for pulling context into the Assistant from an external source. They are powered by the [Model Context Protocol](./model-context-protocol.md). + +Currently Zed supports context servers providing [slash commands](./commands.md) for use in the Assistant. + +## Installation + +Context servers can be installed via [extensions](../extensions/context-servers.md). + +If you don't already have a context server, check out one of these: + +- [Postgres Context Server](https://github.com/zed-extensions/postgres-context-server) + +## Configuration + +Context servers may require some configuration in order to run or to change their behavior. + +You can configure each context server using the `context_servers` setting in your `settings.json`: + +```json +{ + "context_servers": { + "postgres-context-server": { + "settings": { + "database_url": "postgresql://postgres@localhost/my_database" + } + } +} +``` + +If desired, you may also provide a custom command to execute a context server: + +```json +{ + "context_servers": { + "my-context-server": { + "command": { + "path": "/path/to/my-context-server", + "args": ["run"], + "env": {} + }, + "settings": { + "enable_something": true + } + } + } +} +``` diff --git a/docs/src/assistant/model-context-protocol.md b/docs/src/assistant/model-context-protocol.md new file mode 100644 index 0000000000..74e16b59ff --- /dev/null +++ b/docs/src/assistant/model-context-protocol.md @@ -0,0 +1,21 @@ +# Model Context Protocol + +Zed uses the [Model Context Protocol](https://modelcontextprotocol.io/) to interact with [context servers](./context-servers.md): + +> The Model Context Protocol (MCP) is an open protocol that enables seamless integration between LLM applications and external data sources and tools. Whether you're building an AI-powered IDE, enhancing a chat interface, or creating custom AI workflows, MCP provides a standardized way to connect LLMs with the context they need. + +Check out the [Anthropic news post](https://www.anthropic.com/news/model-context-protocol) and the [Zed blog post](https://zed.dev/blog/mcp) for an introduction to MCP. + +## Try it out + +Want to try it for yourself? + +The following context servers are available today as Zed extensions: + +- [Postgres Context Server](https://github.com/zed-extensions/postgres-context-server) + +## Bring your own context server + +If there's an existing context server you'd like to bring to Zed, check out the [context server extension docs](../extensions/context-servers.md) for how to make it available as an extension. + +If you are interested in building your own context server, check out the [Model Context Protocol docs](https://modelcontextprotocol.io/introduction#get-started-with-mcp) to get started. diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 3b9e72a08b..dce2fc5552 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -56,6 +56,8 @@ You can customize a wide range of settings for each language, including: - [`hard_tabs`](./configuring-zed.md#hard-tabs): Use tabs instead of spaces for indentation - [`preferred_line_length`](./configuring-zed.md#preferred-line-length): The recommended maximum line length - [`soft_wrap`](./configuring-zed.md#soft-wrap): How to wrap long lines of code +- [`show_completions_on_input`](./configuring-zed.md#show-completions-on-input): Whether or not to show completions as you type +- [`show_completion_documentation`](./configuring-zed.md#show-completion-documentation): Whether to display inline and alongside documentation for items in the completions menu These settings allow you to maintain specific coding styles across different languages and projects. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index e6e6b662c0..ede2717cb9 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -133,6 +133,48 @@ Define extensions which should be installed (`true`) or never installed (`false` } ``` +## Restore on Startup + +- Description: Controls session restoration on startup. +- Setting: `restore_on_startup` +- Default: `last_session` + +**Options** + +1. Restore all workspaces that were open when quitting Zed: + +```json +{ + "restore_on_startup": "last_session" +} +``` + +2. Restore the workspace that was closed last: + +```json +{ + "restore_on_startup": "last_workspace" +} +``` + +3. Always start with an empty editor: + +```json +{ + "restore_on_startup": "none" +} +``` + +## Autoscroll on Clicks + +- Description: Whether to scroll when clicking near the edge of the visible text area. +- Setting: `autoscroll_on_clicks` +- Default: `false` + +**Options** + +`boolean` values + ## Auto Update - Description: Whether or not to automatically check for updates. @@ -425,6 +467,12 @@ List of `string` values "current_line_highlight": "all" ``` +## LSP Highlight Debounce + +- Description: The debounce delay before querying highlights from the language server based on the current cursor location. +- Setting: `lsp_highlight_debounce` +- Default: `75` + ## Cursor Blink - Description: Whether or not the cursor blinks. @@ -486,7 +534,11 @@ List of `string` values "git_diff": true, "search_results": true, "selected_symbol": true, - "diagnostics": true + "diagnostics": "all", + "axes": { + "horizontal": true, + "vertical": true, + }, }, ``` @@ -572,8 +624,81 @@ List of `string` values ### Diagnostics -- Description: Whether to show diagnostic indicators in the scrollbar. +- Description: Which diagnostic indicators to show in the scrollbar. - Setting: `diagnostics` +- Default: `all` + +**Options** + +1. Show all diagnostics: + +```json +{ + "diagnostics": "all" +} +``` + +2. Do not show any diagnostics: + +```json +{ + "diagnostics": "none" +} +``` + +3. Show only errors: + +```json +{ + "diagnostics": "error" +} +``` + +4. Show only errors and warnings: + +```json +{ + "diagnostics": "warning" +} +``` + +5. Show only errors, warnings, and information: + +```json +{ + "diagnostics": "information" +} +``` + +### Axes + +- Description: Forcefully enable or disable the scrollbar for each axis +- Setting: `axes` +- Default: + +```json +"scrollbar": { + "axes": { + "horizontal": true, + "vertical": true, + }, +} +``` + +#### Horizontal + +- Description: When false, forcefully disables the horizontal scrollbar. Otherwise, obey other settings. +- Setting: `horizontal` +- Default: `true` + +**Options** + +`boolean` values + +#### Vertical + +- Description: When false, forcefully disables the vertical scrollbar. Otherwise, obey other settings. +- Setting: `vertical` - Default: `true` **Options** @@ -624,7 +749,8 @@ List of `string` values "close_position": "right", "file_icons": false, "git_status": false, - "activate_on_close": "history" + "activate_on_close": "history", + "always_show_close_button": false }, ``` @@ -680,7 +806,7 @@ List of `string` values } ``` -2. Activate the neighbour tab (prefers the right one, if present): +2. Activate the right neighbour tab if present: ```json { @@ -688,6 +814,20 @@ List of `string` values } ``` +3. Activate the left neighbour tab if present: + +```json +{ + "activate_on_close": "left_neighbour" +} +``` + +### Always show the close button + +- Description: Whether to always show the close button on tabs. +- Setting: `always_show_close_button` +- Default: `false` + ## Editor Toolbar - Description: Whether or not to show various elements in the editor toolbar. @@ -977,6 +1117,7 @@ The result is still `)))` and not `))))))`, which is what it would be by default "**/.git", "**/.svn", "**/.hg", + "**/.jj", "**/CVS", "**/.DS_Store", "**/Thumbs.db", @@ -1318,19 +1459,19 @@ To override settings for a language, add an entry for that languages name to the The following settings can be overridden for each specific language: -- `enable_language_server` -- `ensure_final_newline_on_save` -- `format_on_save` -- `formatter` -- `hard_tabs` -- `preferred_line_length` -- `remove_trailing_whitespace_on_save` -- `show_inline_completions` -- `show_whitespaces` -- `soft_wrap` -- `tab_size` -- `use_autoclose` -- `always_treat_brackets_as_autoclosed` +- [`enable_language_server`](#enable-language-server) +- [`ensure_final_newline_on_save`](#ensure-final-newline-on-save) +- [`format_on_save`](#format-on-save) +- [`formatter`](#formatter) +- [`hard_tabs`](#hard-tabs) +- [`preferred_line_length`](#preferred-line-length) +- [`remove_trailing_whitespace_on_save`](#remove-trailing-whitespace-on-save) +- [`show_inline_completions`](#show-inline-completions) +- [`show_whitespaces`](#show-whitespaces) +- [`soft_wrap`](#soft-wrap) +- [`tab_size`](#tab-size) +- [`use_autoclose`](#use-autoclose) +- [`always_treat_brackets_as_autoclosed`](#always-treat-brackets-as-autoclosed) These values take in the same options as the root-level settings with the same name. @@ -1418,11 +1559,11 @@ Or to set a `socks5` proxy: ## File Finder -### Modal Width +### Modal Max Width -- Description: Width of the file finder modal. Can take one of a few values: `small`, `medium`, `large`, `xlarge`, and `full`. -- Setting: `modal_width` -- Default: `medium` +- Description: Max-width of the file finder modal. It can take one of these values: `small`, `medium`, `large`, `xlarge`, and `full`. +- Setting: `max_modal_width` +- Default: `small` ## Preferred Line Length @@ -1499,16 +1640,6 @@ Or to set a `socks5` proxy: `boolean` values -## Completion Documentation Debounce Delay - -- Description: The debounce delay before re-querying the language server for completion documentation when not included in original completion list. -- Setting: `completion_documentation_secondary_query_debounce` -- Default: `300` ms - -**Options** - -`integer` values - ## Show Inline Completions - Description: Whether to show inline completions as you type or manually by triggering `editor::ShowInlineCompletion`. @@ -1544,6 +1675,7 @@ Or to set a `socks5` proxy: 2. `prefer_line` (deprecated, same as `none`) 3. `editor_width` to wrap lines that overflow the editor width 4. `preferred_line_length` to wrap lines that overflow `preferred_line_length` config value +5. `bounded` to wrap lines at the minimum of `editor_width` and `preferred_line_length` ## Wrap Guides (Vertical Rulers) @@ -1628,7 +1760,7 @@ List of `integer` column numbers "button": false, "shell": {}, "toolbar": { - "title": true + "breadcrumbs": true }, "working_directory": "current_project_directory" } @@ -1946,7 +2078,7 @@ Disable with: ## Terminal: Toolbar -- Description: Whether or not to show various elements in the terminal toolbar. It only affects terminals placed in the editor pane. +- Description: Whether or not to show various elements in the terminal toolbar. - Setting: `toolbar` - Default: @@ -1954,7 +2086,7 @@ Disable with: { "terminal": { "toolbar": { - "title": true + "breadcrumbs": true } } } @@ -1962,7 +2094,13 @@ Disable with: **Options** -At the moment, only the `title` option is available, it controls displaying of the terminal title that can be changed via `PROMPT_COMMAND`. If the title is hidden, the terminal toolbar is not displayed. +At the moment, only the `breadcrumbs` option is available, it controls displaying of the terminal title that can be changed via `PROMPT_COMMAND`. + +If the terminal title is empty, the breadcrumbs won't be shown. + +The shell running in the terminal needs to be configured to emit the title. + +Example command to set the title: `echo -e "\e]2;New Title\007";` ### Terminal: Button @@ -2124,6 +2262,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "button": true, "default_width": 240, "dock": "left", + "entry_spacing": "comfortable", "file_icons": true, "folder_icons": true, "git_status": true, @@ -2165,6 +2304,30 @@ Run the `theme selector: toggle` action in the command palette to see a current } ``` +### Entry Spacing + +- Description: Spacing between worktree entries +- Setting: `entry_spacing` +- Default: `comfortable` + +**Options** + +1. Comfortable entry spacing + +```json +{ + "entry_spacing": "comfortable" +} +``` + +2. Standard entry spacing + +```json +{ + "entry_spacing": "standard" +} +``` + ### Git Status - Description: Indicates newly created and updated files diff --git a/docs/src/development/linux.md b/docs/src/development/linux.md index 5dba44d2f0..1505f99e88 100644 --- a/docs/src/development/linux.md +++ b/docs/src/development/linux.md @@ -6,11 +6,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). ## Dependencies -- Install [Rust](https://www.rust-lang.org/tools/install). If it's already installed, make sure it's up-to-date: - - ```sh - rustup update - ``` +- Install [rustup](https://www.rust-lang.org/tools/install) - Install the necessary system libraries: diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 2fd076b0fa..b9e7e4acec 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -6,7 +6,8 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). ## Dependencies -- Install [Rust](https://www.rust-lang.org/tools/install) +- Install [rustup](https://www.rust-lang.org/tools/install) + - Install [Xcode](https://apps.apple.com/us/app/xcode/id497799835?mt=12) from the macOS App Store, or from the [Apple Developer](https://developer.apple.com/download/all/) website. Note this requires a developer account. > Ensure you launch Xcode after installing, and install the macOS components, which is the default option. @@ -24,12 +25,6 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). sudo xcodebuild -license accept ``` -- Install the Rust wasm toolchain: - - ```sh - rustup target add wasm32-wasip1 - ``` - - Install `cmake` (required by [a dependency](https://docs.rs/wasmtime-c-api-impl/latest/wasmtime_c_api/)) ```sh @@ -129,3 +124,16 @@ Then clean and rebuild the project: cargo clean cargo run ``` + +## Tips & Tricks + +If you are building Zed a lot, you may find that macOS continually verifies new +builds which can add a few seconds to your iteration cycles. + +To fix this, you can: + +- Run `sudo spctl developer-mode enable-terminal` to enable the Developer Tools panel in System Settings. +- In System Settings, search for "Developer Tools" and add your terminal (e.g. iTerm or Ghostty) to the list under "Allow applications to use developer tools" +- Restart your terminal. + +Thanks to the nextest developers for publishing [this](https://nexte.st/docs/installation/macos/#gatekeeper). diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index f95cfb3ed0..4d1e565a57 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -8,21 +8,11 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). ## Dependencies -- Install [Rust](https://www.rust-lang.org/tools/install). If it's already installed, make sure it's up-to-date: - - ```sh - rustup update - ``` - -- Install the Rust wasm toolchain: - - ```sh - rustup target add wasm32-wasip1 - ``` +- Install [rustup](https://www.rust-lang.org/tools/install) - Install [Visual Studio](https://visualstudio.microsoft.com/downloads/) with the optional components `MSVC v*** - VS YYYY C++ x64/x86 build tools` and `MSVC v*** - VS YYYY C++ x64/x86 Spectre-mitigated libs (latest)` (`v***` is your VS version and `YYYY` is year when your VS was released. Pay attention to the architecture and change it to yours if needed.) - Install Windows 11 or 10 SDK depending on your system, but ensure that at least `Windows 10 SDK version 2104 (10.0.20348.0)` is installed on your machine. You can download it from the [Windows SDK Archive](https://developer.microsoft.com/windows/downloads/windows-sdk/) -- Install [CMake](https://cmake.org/download) +- Install [CMake](https://cmake.org/download) (required by [a dependency](https://docs.rs/wasmtime-c-api-impl/latest/wasmtime_c_api/)) ## Backend dependencies @@ -149,3 +139,5 @@ New-ItemProperty -Path "HKLM:\SYSTEM\CurrentControlSet\Control\FileSystem" -Name ``` For more information on this, please see [win32 docs](https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=powershell) + +(note that you will need to restart your system after enabling longpath support) diff --git a/docs/src/extensions/context-servers.md b/docs/src/extensions/context-servers.md new file mode 100644 index 0000000000..cb29690971 --- /dev/null +++ b/docs/src/extensions/context-servers.md @@ -0,0 +1,39 @@ +# Context Servers + +Extensions may provide [context servers](../assistant/context-servers.md) for use in the Assistant. + +## Example extension + +To see a working example of an extension that provides context servers, check out the [`postgres-context-server` extension](https://github.com/zed-extensions/postgres-context-server). + +This extension can be [installed as a dev extension](./developing-extensions.html#developing-an-extension-locally) if you want to try it out for yourself. + +## Defining context servers + +A given extension may provide one or more context servers. Each context server must be registered in the `extension.toml`: + +```toml +[context_servers.my-context-server] +``` + +Then, in the Rust code for your extension, implement the `context_server_command` method on your extension: + +```rust +impl zed::Extension for MyExtension { + fn context_server_command( + &mut self, + context_server_id: &ContextServerId, + project: &zed::Project, + ) -> Result { + Ok(zed::Command { + command: get_path_to_context_server_executable()?, + args: get_args_for_context_server()?, + env: get_env_for_context_server()?, + }) + } +} +``` + +This method should return the command to start up a context server, along with any arguments or environment variables necessary for it to function. + +If you need to download the context server from an external source—like GitHub Releases or npm—you can also do this here. diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 36939d4f1e..c404d260a0 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -7,6 +7,17 @@ Extensions can add the following capabilities to Zed: - [Languages](./languages.md) - [Themes](./themes.md) - [Slash Commands](./slash-commands.md) +- [Context Servers](./context-servers.md) + +## Developing an Extension Locally + +Before starting to develop an extension for Zed, be sure to [install Rust via rustup](https://www.rust-lang.org/tools/install). + +When developing an extension, you can use it in Zed without needing to publish it by installing it as a _dev extension_. + +From the extensions page, click the `Install Dev Extension` button and select the directory containing your extension. + +If you already have a published extension with the same name installed, your dev extension will override it. ## Directory Structure of a Zed Extension @@ -74,16 +85,6 @@ impl zed::Extension for MyExtension { zed::register_extension!(MyExtension); ``` -## Developing an Extension Locally - -Before starting to develop an extension for Zed, be sure to [install Rust via rustup](https://www.rust-lang.org/tools/install). - -When developing an extension, you can use it in Zed without needing to publish it by installing it as a _dev extension_. - -From the extensions page, click the `Install Dev Extension` button and select the directory containing your extension. - -If you already have a published extension with the same name installed, your dev extension will override it. - ## Publishing your extension To publish an extension, open a PR to [the `zed-industries/extensions` repo](https://github.com/zed-industries/extensions). diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index b7e0cb4482..40a41d36b7 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -20,26 +20,28 @@ path_suffixes = ["myl"] line_comments = ["# "] ``` -- `name` is the human readable name that will show up in the Select Language dropdown. -- `grammar` is the name of a grammar. Grammars are registered separately, described below. -- `path_suffixes` (optional) is an array of file suffixes that should be associated with this language. This supports glob patterns like `config/**/*.toml` where `**` matches 0 or more directories and `*` matches 0 or more characters. -- `line_comments` (optional) is an array of strings that are used to identify line comments in the language. +- `name` (required) is the human readable name that will show up in the Select Language dropdown. +- `grammar` (required) is the name of a grammar. Grammars are registered separately, described below. +- `path_suffixes` is an array of file suffixes that should be associated with this language. Unlike `file_types` in settings, this does not support glob patterns. +- `line_comments` is an array of strings that are used to identify line comments in the language. This is used for the `editor::ToggleComments` keybind: `{#kb editor::ToggleComments}` for toggling lines of code. +- `tab_size` defines the indentation/tab size used for this language (default is `4`). +- `hard_tabs` whether to indent with tabs (`true`) or spaces (`false`, the default). +- `first_line_pattern` is a regular expression, that in addition to `path_suffixes` (above) or `file_types` in settings can be used to match files which should use this language. For example Zed uses this to identify Shell Scripts by matching the [shebangs lines](https://github.com/zed-industries/zed/blob/main/crates/languages/src/bash/config.toml) in the first line of a script. ## Grammar @@ -49,10 +51,10 @@ Zed uses the [Tree-sitter](https://tree-sitter.github.io) parsing library to pro ```toml [grammars.gleam] repository = "https://github.com/gleam-lang/tree-sitter-gleam" -commit = "58b7cac8fc14c92b0677c542610d8738c373fa81" +rev = "58b7cac8fc14c92b0677c542610d8738c373fa81" ``` -The `repository` field must specify a repository where the Tree-sitter grammar should be loaded from, and the `commit` field must contain the SHA of the Git commit to use. An extension can provide multiple grammars by referencing multiple tree-sitter repositories. +The `repository` field must specify a repository where the Tree-sitter grammar should be loaded from, and the `rev` field must contain a Git revision to use, such as the SHA of a Git commit. An extension can provide multiple grammars by referencing multiple tree-sitter repositories. ## Tree-sitter Queries @@ -67,6 +69,7 @@ several features: - Syntax overrides - Text redactions - Runnable code detection +- Selecting classes, functions, etc. The following sections elaborate on how [Tree-sitter queries](https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax) enable these features in Zed, using [JSON syntax](https://www.json.org/json-en.html) as a guiding example. @@ -200,19 +203,19 @@ Here's an example from an `injections.scm` file for Markdown: ```scheme (fenced_code_block (info_string - (language) @language) - (code_fence_content) @content) + (language) @injection.language) + (code_fence_content) @injection.content) ((inline) @content - (#set! "language" "markdown-inline")) + (#set! injection.language "markdown-inline")) ``` This query identifies fenced code blocks, capturing the language specified in the info string and the content within the block. It also captures inline content and sets its language to "markdown-inline". -| Capture | Description | -| --------- | ---------------------------------------------------------- | -| @language | Captures the language identifier for a code block | -| @content | Captures the content to be treated as a different language | +| Capture | Description | +| ------------------- | ---------------------------------------------------------- | +| @injection.language | Captures the language identifier for a code block | +| @injection.content | Captures the content to be treated as a different language | Note that we couldn't use JSON as an example here because it doesn't support language injections. @@ -257,6 +260,44 @@ For example, in JavaScript, we also disable auto-closing of single quotes within (comment) @comment.inclusive ``` +### Text objects + +The `textobjects.scm` file defines rules for navigating by text objects. This was added in Zed v0.165 and is currently used only in Vim mode. + +Vim provides two levels of granularity for navigating around files. Section-by-section with `[]` etc., and method-by-method with `]m` etc. Even languages that don't support functions and classes can work well by defining similar concepts. For example CSS defines a rule-set as a method, and a media-query as a class. + +For languages with closures, these typically should not count as functions in Zed. This is best-effort however, as languages like Javascript do not syntactically differentiate syntactically between closures and top-level function declarations. + +For languages with declarations like C, provide queries that match `@class.around` or `@function.around`. The `if` and `ic` text objects will default to these if there is no inside. + +If you are not sure what to put in textobjects.scm, both [nvim-treesitter-textobjects](https://github.com/nvim-treesitter/nvim-treesitter-textobjects), and the [Helix editor](https://github.com/helix-editor/helix) have queries for many languages. You can refer to the Zed [built-in languages](https://github.com/zed-industries/zed/tree/main/crates/languages/src) to see how to adapt these. + +| Capture | Description | Vim mode | +| ---------------- | ----------------------------------------------------------------------- | ------------------------------------------------ | +| @function.around | An entire function definition or equivalent small section of a file. | `[m`, `]m`, `[M`,`]M` motions. `af` text object | +| @function.inside | The function body (the stuff within the braces). | `if` text object | +| @class.around | An entire class definition or equivalent large section of a file. | `[[`, `]]`, `[]`, `][` motions. `ac` text object | +| @class.inside | The contents of a class definition. | `ic` text object | +| @comment.around | An entire comment (e.g. all adjacent line comments, or a block comment) | `gc` text object | +| @comment.inside | The contents of a comment | `igc` text object (rarely supported) | + +For example: + +```scheme +; include only the content of the method in the function +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +; match function.around for declarations with no body +(function_signature_item) @function.around + +; join all adjacent comments into one +(comment)+ @comment.around +``` + ### Text redactions The `redactions.scm` file defines text redaction rules. When collaborating and sharing your screen, it makes sure that certain syntax nodes are rendered in a redacted mode to avoid them from leaking. @@ -322,12 +363,12 @@ TBD: `#set! tag` Zed uses the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) to provide advanced language support. -An extension may provide any number of language servers. To provide a language server from your extension, add an entry to your `extension.toml` with the name of your language server and the language it applies to: +An extension may provide any number of language servers. To provide a language server from your extension, add an entry to your `extension.toml` with the name of your language server and the language(s) it applies to: ```toml [language_servers.my-language] name = "My Language LSP" -language = "My Language" +languages = ["My Language"] ``` Then, in the Rust code for your extension, implement the `language_server_command` method on your extension: diff --git a/docs/src/extensions/themes.md b/docs/src/extensions/themes.md index 4737a99a3e..ecdbdace59 100644 --- a/docs/src/extensions/themes.md +++ b/docs/src/extensions/themes.md @@ -2,13 +2,13 @@ The `themes` directory in an extension should contain one or more theme files. -Each theme file should adhere to the JSON schema specified at [`https://zed.dev/schema/themes/v0.1.0.json`](https://zed.dev/schema/themes/v0.1.0.json). +Each theme file should adhere to the JSON schema specified at [`https://zed.dev/schema/themes/v0.2.0.json`](https://zed.dev/schema/themes/v0.2.0.json). See [this blog post](https://zed.dev/blog/user-themes-now-in-preview) for more details about creating themes. ## Theme JSON Structure -The structure of a Zed theme is defined in the [Zed Theme JSON Schema](https://zed.dev/schema/themes/v0.1.0.json). +The structure of a Zed theme is defined in the [Zed Theme JSON Schema](https://zed.dev/schema/themes/v0.2.0.json). A Zed theme consists of a Theme Family object including: diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index 22d4741d3f..e489fb2dc8 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -6,7 +6,7 @@ Welcome to Zed! We are excited to have you. Here is a jumping-off point to getti ### macOS -You can obtain the stable builds via the [download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview) After the first manual installation, Zed will periodically check for and install updates automatically for you. +Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. You can also install Zed stable via Homebrew: @@ -22,7 +22,7 @@ brew install --cask zed@preview ### Linux -For most people, the easiest way to install Zed is through our installation script: +For most Linux users, the easiest way to install Zed is through our installation script: ```sh curl -f https://zed.dev/install.sh | sh @@ -40,28 +40,22 @@ If this script is insufficient for your use case or you run into problems runnin ## Command Palette -The Command Palette is the main way to access functionality in Zed, and its keybinding is the first one you should make yourself familiar with. - -To open the Command Palette, use {#kb command_palette::Toggle}. - -The Command Palette allows you to access pretty much any functionality that's available in Zed. +The Command Palette is the main way to access pretty much any functionality that's available in Zed. Its keybinding is the first one you should make yourself familiar with. To open it, hit: {#kb command_palette::Toggle}. ![The opened Command Palette](https://zed.dev/img/features/command-palette.jpg) -Try it! Open the Command Palette and type in `new file`. You should see the list of commands being filtered down to `workspace: new file`. Hit return and you end up with a new buffer! +Try it! Open the Command Palette and type in `new file`. You should see the list of commands being filtered down to `workspace: new file`. Hit return and you end up with a new buffer. Any time you see instructions that include commands of the form `zed: ...` or `editor: ...` and so on that means you need to execute them in the Command Palette. ## Configure Zed -Use {#kb zed::OpenSettings} to open your custom settings to set things like fonts, formatting settings, per-language settings, and more. +To open your custom settings to set things like fonts, formatting settings, per-language settings, and more, use the {#kb zed::OpenSettings} keybinding. -On macOS, you can access the default configuration using the `Zed > Settings > Open Default Settings` menu item. See [Configuring Zed](./configuring-zed.md) for all available settings. - -On Linux, you can access the default configuration via the Command Palette. Open it with {#kb zed::OpenDefaultSettings} and type in `zed: open default settings` and then hit return. +To see all available settings, open the Command Palette with {#kb command_palette::Toggle} and search for "zed: open default settings". You can also check them all out in the [Configuring Zed](./configuring-zed.md) documentation. ## Set up your key bindings -On macOS, you can access the default key binding set using the `Zed > Settings > Open Default Key Bindings` menu item. Use cmd-k cmd-s|ctrl-k ctrl-s to open your custom keymap to add your key bindings. See Key Bindings for more info. +To open your custom keymap to add your key bindings, use the {#kb zed::OpenKeymap} keybinding. -On Linux, you can access the default key bindings via the Command Palette. Open it with ctrl-shift-p and type in `zed: open default keymap` and then hit return. +To access the default key binding set, open the Command Palette with {#kb command_palette::Toggle} and search for "zed: open default keymap". See [Key Bindings](./key-bindings.md) for more info. diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 68db517480..4d0a33ce55 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -130,7 +130,7 @@ When multiple keybindings have the same keystroke and are active at the same tim The other kind of conflict that arises is when you have two bindings, one of which is a prefix of the other. For example if you have `"ctrl-w":"editor::DeleteToNextWordEnd"` and `"ctrl-w left":"editor::DeleteToEndOfLine"`. -When this happens, and both bindings are active in the current context, Zed will wait for 1 second after you tupe `ctrl-w` to se if you're about to type `left`. If you don't type anything, or if you type a different key, then `DeleteToNextWordEnd` will be triggered. If you do, then `DeleteToEndOfLine` will be triggered. +When this happens, and both bindings are active in the current context, Zed will wait for 1 second after you type `ctrl-w` to see if you're about to type `left`. If you don't type anything, or if you type a different key, then `DeleteToNextWordEnd` will be triggered. If you do, then `DeleteToEndOfLine` will be triggered. ### Non-QWERTY keyboards @@ -146,20 +146,15 @@ Finally keyboards that support extended Latin alphabets (usually ISO keyboards) For example on a German QWERTZ keyboard, the `cmd->` shortcut is moved to `cmd-:` because `cmd->` is the system window switcher and this is where that shortcut is typed on a QWERTY keyboard. `cmd-+` stays the same because + is still typable without option, and as a result, `cmd-[` and `cmd-]` become `cmd-ö` and `cmd-ä`, moving out of the way of the `+` key. -If you are defining shortcuts in your personal keymap, you can opt-out of the key equivalent mapping by setting `use_layout_keys` to `true` in your keymap: +If you are defining shortcuts in your personal keymap, you can opt into the key equivalent mapping by setting `use_key_equivalents` to `true` in your keymap: ```json [ { + "use_key_equivalents": true, "bindings": { "ctrl->": "editor::Indent" // parsed as ctrl-: when a German QWERTZ keyboard is active } - }, - { - "use_layout_keys": true, - "bindings": { - "ctrl->": "editor::Indent" // remains ctrl-> when a German QWERTZ keyboard is active - } } ] ``` diff --git a/docs/src/languages/astro.md b/docs/src/languages/astro.md index 8553369c70..e140832e89 100644 --- a/docs/src/languages/astro.md +++ b/docs/src/languages/astro.md @@ -1,6 +1,6 @@ # Astro -Astro support is available through the [Astro extension](https://github.com/zed-industries/zed/tree/main/extensions/astro). +Astro support is available through the [Astro extension](https://github.com/zed-extensions/astro). - Tree Sitter: [virchau13/tree-sitter-astro](https://github.com/virchau13/tree-sitter-astro) - Language Server: [withastro/language-tools](https://github.com/withastro/language-tools) diff --git a/docs/src/languages/c.md b/docs/src/languages/c.md index ce4a27a412..cd7191e8ea 100644 --- a/docs/src/languages/c.md +++ b/docs/src/languages/c.md @@ -14,6 +14,16 @@ CompileFlags: Add: [-xc] ``` +By default clang and gcc by will recognize `*.C` and `*.H` (uppercase extensions) as C++ and not C and so Zed too follows this convention. If you are working with a C-only project (perhaps one with legacy uppercase pathing like `FILENAME.C`) you can override this behavior by adding this to your settings: + +```json +{ + "file_types": { + "C": ["C", "H"] + } +} +``` + ## Formatting By default Zed will use the `clangd` language server for formatting C code. The Clangd is the same as the `clang-format` CLI tool. To configure this you can add a `.clang-format` file. For example: @@ -31,7 +41,7 @@ You can trigger formatting via {#kb editor::Format} or the `editor: format` acti ```json "languages": { - "C" { + "C": { "format_on_save": "on", "tab_size": 2 } @@ -39,3 +49,17 @@ You can trigger formatting via {#kb editor::Format} or the `editor: format` acti ``` See [Clang-Format Style Options](https://clang.llvm.org/docs/ClangFormatStyleOptions.html) for a complete list of options. + +## Compile Commands + +For some projects Clangd requires a `compile_commands.json` file to properly analyze your project. This file contains the compilation database that tells clangd how your project should be built. + +### CMake Compile Commands + +With CMake, you can generate `compile_commands.json` automatically by adding the following line to your `CMakeLists.txt`: + +```cmake +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +``` + +After building your project, CMake will generate the `compile_commands.json` file in the build directory and clangd will automatically pick it up. diff --git a/docs/src/languages/clojure.md b/docs/src/languages/clojure.md index 85042618a5..3e3ddd8e1f 100644 --- a/docs/src/languages/clojure.md +++ b/docs/src/languages/clojure.md @@ -1,6 +1,6 @@ # Clojure -Clojure support is available through the [Clojure extension](https://github.com/zed-industries/zed/tree/main/extensions/clojure). +Clojure support is available through the [Clojure extension](https://github.com/zed-extensions/clojure). - Tree Sitter: [prcastro/tree-sitter-clojure](https://github.com/prcastro/tree-sitter-clojure) - Language Server: [clojure-lsp/clojure-lsp](https://github.com/clojure-lsp/clojure-lsp) diff --git a/docs/src/languages/cpp.md b/docs/src/languages/cpp.md index b14f16473d..0a9eede258 100644 --- a/docs/src/languages/cpp.md +++ b/docs/src/languages/cpp.md @@ -77,7 +77,7 @@ You can trigger formatting via {#kb editor::Format} or the `editor: format` acti ```json "languages": { - "C++" { + "C++": { "format_on_save": "on", "tab_size": 2 } @@ -98,3 +98,17 @@ Diagnostics: ``` For more advanced usage of clangd configuration file, take a look into their [official page](https://clangd.llvm.org/config.html). + +## Compile Commands + +For some projects Clangd requires a `compile_commands.json` file to properly analyze your project. This file contains the compilation database that tells clangd how your project should be built. + +### CMake Compile Commands + +With CMake, you can generate `compile_commands.json` automatically by adding the following line to your `CMakeLists.txt`: + +```cmake +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +``` + +After building your project, CMake will generate the `compile_commands.json` file in the build directory and clangd will automatically pick it up. diff --git a/docs/src/languages/dart.md b/docs/src/languages/dart.md index 32f312e5dd..9d008c4c2c 100644 --- a/docs/src/languages/dart.md +++ b/docs/src/languages/dart.md @@ -5,9 +5,22 @@ Dart support is available through the [Dart extension](https://github.com/zed-ex - Tree Sitter: [UserNobody14/tree-sitter-dart](https://github.com/UserNobody14/tree-sitter-dart) - Language Server: [dart language-server](https://github.com/dart-lang/sdk) +## Pre-requisites + +You will need to install the Dart SDK. + +You can install dart from [dart.dev/get-dart](https://dart.dev/get-dart) or via the [Flutter Version Management CLI (fvm)](https://fvm.app/documentation/getting-started/installation) + ## Configuration -The `dart` binary can be configured in a Zed settings file with: +The dart extension requires no configuration if you have `dart` in your path: + +```sh +which dart +dart --version +``` + +If you would like to use a specific dart binary or use dart via FVM you can specify the `dart` binary in your Zed settings.jsons file: ```json { @@ -22,7 +35,20 @@ The `dart` binary can be configured in a Zed settings file with: } ``` - +### Formatting + +Dart by-default uses a very conservative maximum line length (80). If you would like the dart LSP to permit a longer line length when auto-formatting, add the following to your Zed settings.json: + +```json +{ + "lsp": { + "dart": { + "settings": { + "lineLength": 140 + } + } + } +} +``` + +Please see the Dart documentation for more information on [dart language-server capabilities](https://github.com/dart-lang/sdk/blob/main/pkg/analysis_server/tool/lsp_spec/README.md). diff --git a/docs/src/languages/elm.md b/docs/src/languages/elm.md index 5da5dd1248..ee351f14ee 100644 --- a/docs/src/languages/elm.md +++ b/docs/src/languages/elm.md @@ -1,6 +1,6 @@ # Elm -Elm support is available through the [Elm extension](https://github.com/zed-industries/zed/tree/main/extensions/elm). +Elm support is available through the [Elm extension](https://github.com/zed-extensions/elm). - Tree Sitter: [elm-tooling/tree-sitter-elm](https://github.com/elm-tooling/tree-sitter-elm) - Language Server: [elm-tooling/elm-language-server](https://github.com/elm-tooling/elm-language-server) diff --git a/docs/src/languages/json.md b/docs/src/languages/json.md index 31fc8c0689..a149c911b7 100644 --- a/docs/src/languages/json.md +++ b/docs/src/languages/json.md @@ -30,8 +30,48 @@ To workaround this behavior you can add the following to your `.prettierrc` } ``` +## JSON Language Server + +Zed automatically out of the box supports JSON Schema validation of `package.json` and `tsconfig.json` files, but `json-language-server` can use JSON Schema definitions in project files, from the [JSON Schema Store](https://www.schemastore.org/json/) or other publicly available URLs for JSON validation. + +### Inline Schema Specification + +To specify a schema inline with your JSON files, add a `$schema` top level key linking to your json schema file. + +For example to for a `.luarc.json` for use with [lua-language-server](https://github.com/LuaLS/lua-language-server/): + +```json +{ + "$schema": "https://raw.githubusercontent.com/sumneko/vscode-lua/master/setting/schema.json", + "runtime.version": "Lua 5.4" +} +``` + +### Schema Specification via Settings + +You can alternatively associate JSON Schemas with file paths by via Zed LSP settings. + +To + +```json +"lsp": { + "json-language-server": { + "settings": { + "json": { + "schemas": [ + { + "fileMatch": ["*/*.luarc.json"], + "url": "https://raw.githubusercontent.com/sumneko/vscode-lua/master/setting/schema.json" + } + ] + } + } + } +} +``` + +You can also pass any of the [supported settings](https://github.com/Microsoft/vscode/blob/main/extensions/json-language-features/server/README.md#settings) to json-language-server by specifying them in your Zed settings.json: + diff --git a/docs/src/languages/luau.md b/docs/src/languages/luau.md index c7abd0cae9..0f4f22cacc 100644 --- a/docs/src/languages/luau.md +++ b/docs/src/languages/luau.md @@ -1,11 +1,11 @@ # Luau -[Luau](https://luau-lang.org/) is a fast, small, safe, gradually typed embeddable scripting language derived from Lua. Luau was developed by Roblox and available under the MIT license. +[Luau](https://luau.org/) is a fast, small, safe, gradually typed, embeddable scripting language derived from Lua. Luau was developed by Roblox and is available under the MIT license. Luau language support in Zed is provided by the community-maintained [Luau extension](https://github.com/4teapo/zed-luau). Report issues to: [https://github.com/4teapo/zed-luau/issues](https://github.com/4teapo/zed-luau/issues) -- Tree Sitter: [tree-sitter-grammars/tree-sitter-luau](https://github.com/tree-sitter-grammars/tree-sitter-luau) +- Tree Sitter: [4teapo/tree-sitter-luau](https://github.com/4teapo/tree-sitter-luau) - Language Server: [JohnnyMorganz/luau-lsp](https://github.com/JohnnyMorganz/luau-lsp) ## Configuration diff --git a/docs/src/languages/ocaml.md b/docs/src/languages/ocaml.md index 10ffee86dd..65f7a68cda 100644 --- a/docs/src/languages/ocaml.md +++ b/docs/src/languages/ocaml.md @@ -1,6 +1,6 @@ # OCaml -OCaml support is available through the [OCaml extension](https://github.com/zed-industries/zed/tree/main/extensions/ocaml). +OCaml support is available through the [OCaml extension](https://github.com/zed-extensions/ocaml). - Tree Sitter: [tree-sitter/tree-sitter-ocaml](https://github.com/tree-sitter/tree-sitter-ocaml) - Language Server: [ocaml/ocaml-lsp](https://github.com/ocaml/ocaml-lsp) diff --git a/docs/src/languages/php.md b/docs/src/languages/php.md index f72cb3f6aa..3cdc4c84df 100644 --- a/docs/src/languages/php.md +++ b/docs/src/languages/php.md @@ -13,6 +13,18 @@ The PHP extension offers both `phpactor` and `intelephense` language server supp `phpactor` is enabled by default. +## Phpactor + +The Zed PHP Extension can install `phpactor` automatically but requires `php` to installed and available in your path: + +```sh +# brew install php # macOS +# sudo apt-get install php # Debian/Ubuntu +# yum install php # CentOS/RHEL +# pacman -S php # Arch Linux +which php +``` + ## Intelephense [Intelephense](https://intelephense.com/) is a [proprietary](https://github.com/bmewburn/vscode-intelephense/blob/master/LICENSE.txt#L29) language server for PHP operating under a freemium model. Certain features require purchase of a [premium license](https://intelephense.com/). To use these features you must place your [license.txt file](https://intelephense.com/faq.html) at `~/intelephense/licence.txt` inside your home directory. diff --git a/docs/src/languages/proto.md b/docs/src/languages/proto.md index 934080a1d7..777fd81b8a 100644 --- a/docs/src/languages/proto.md +++ b/docs/src/languages/proto.md @@ -1,9 +1,44 @@ # Proto -Proto/proto3 (Protocol Buffers definition language) support is available natively in Zed. +Proto/proto3 (Protocol Buffers definition language) support is available through the [Proto extension](https://github.com/zed-industries/zed/tree/main/extensions/proto). - Tree Sitter: [coder3101/tree-sitter-proto](https://github.com/coder3101/tree-sitter-proto) -- Language Server: [protols](https://github.com/coder3101/protols) +- Language Servers: [protobuf-language-server](https://github.com/lasorda/protobuf-language-server) + + diff --git a/docs/src/languages/r.md b/docs/src/languages/r.md index 934df61e59..472f84da1f 100644 --- a/docs/src/languages/r.md +++ b/docs/src/languages/r.md @@ -7,15 +7,54 @@ R support is available through the [R extension](https://github.com/ocsmit/zed-r ## Installation -- [Install R](https://cloud.r-project.org/) -- Install the R packages `languageserver` and `lintr`: +1. [Download and Install R](https://cloud.r-project.org/). +2. Install the R packages `languageserver` and `lintr`: ```R install.packages("languageserver") install.packages("lintr") ``` -- Install the [R extension](https://github.com/ocsmit/zed-r) through Zed's extensions +3. Install the [R Zed extension](https://github.com/ocsmit/zed-r) through Zed's extensions manager. + +For example on macOS: + +```sh +brew install --cask r +Rscript --version +Rscript -e 'options(repos = "https://cran.rstudio.com/"); install.packages("languageserver")' +Rscript -e 'options(repos = "https://cran.rstudio.com/"); install.packages("lintr")' +Rscript -e 'packageVersion("languageserver")' +Rscript -e 'packageVersion("lintr")' +``` + +## Ark Installation + +To use the Zed REPL with R you need to install [Ark](https://github.com/posit-dev/ark), an R Kernel for Jupyter applications. +You can down the latest version from the [Ark GitHub Releases](https://github.com/posit-dev/ark/releases) and then extract the `ark` binary to a directory in your `PATH`. + +For example to install the latest non-debug build: + +```sh +# macOS +cd /tmp +curl -L -o ark-latest-darwin.zip \ + $(curl -s "https://api.github.com/repos/posit-dev/ark/releases/latest" | \ + jq -r '.assets[] | select(.name | contains("darwin-universal") and (contains("debug") | not)) | .browser_download_url') +unzip ark-latest-darwin.zip ark +sudo mv /tmp/ark /usr/local/bin/ +``` + +```sh +# Linux X86_64 +cd /tmp +curl -L -o ark-latest-linux.zip \ + $(curl -s "https://api.github.com/repos/posit-dev/ark/releases/latest" \ + | jq -r '.assets[] | select(.name | contains("linux-x64") and (contains("debug") | not)) | .browser_download_url' + ) +unzip ark-latest-linux.zip ark +sudo mv /tmp/ark /usr/local/bin/ +``` "] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string"] }, - { start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] }, -] -word_characters = ["#", "$", "-"] -scope_opt_in_language_servers = ["tailwindcss-language-server"] -prettier_parser_name = "astro" -prettier_plugins = ["prettier-plugin-astro"] - -[overrides.string] -word_characters = ["-"] -opt_into_language_servers = ["tailwindcss-language-server"] diff --git a/extensions/astro/languages/astro/highlights.scm b/extensions/astro/languages/astro/highlights.scm deleted file mode 100644 index a565e22b6e..0000000000 --- a/extensions/astro/languages/astro/highlights.scm +++ /dev/null @@ -1,25 +0,0 @@ -(tag_name) @tag -(erroneous_end_tag_name) @keyword -(doctype) @tag.doctype -(attribute_name) @property -(attribute_value) @string -(comment) @comment - -[ - (attribute_value) - (quoted_attribute_value) -] @string - -"=" @operator - -[ - "{" - "}" -] @punctuation.bracket - -[ - "<" - ">" - "" -] @tag.delimiter diff --git a/extensions/astro/languages/astro/injections.scm b/extensions/astro/languages/astro/injections.scm deleted file mode 100644 index 4647715e2b..0000000000 --- a/extensions/astro/languages/astro/injections.scm +++ /dev/null @@ -1,21 +0,0 @@ -(frontmatter - (frontmatter_js_block) @content - (#set! "language" "typescript")) - -(attribute_interpolation - (attribute_js_expr) @content - (#set! "language" "typescript")) - -(html_interpolation - (permissible_text) @content - (#set! "language" "typescript")) - -(script_element - (raw_text) @content - (#set! "language" "typescript")) - -; TODO: add scss/less or more injections -; https://github.com/virchau13/tree-sitter-astro/blob/4be180759ec13651f72bacee65fa477c64222a1a/queries/injections.scm#L18-L27 -(style_element - (raw_text) @content - (#set! "language" "css")) diff --git a/extensions/astro/languages/astro/overrides.scm b/extensions/astro/languages/astro/overrides.scm deleted file mode 100644 index e84d1a3e60..0000000000 --- a/extensions/astro/languages/astro/overrides.scm +++ /dev/null @@ -1,6 +0,0 @@ -[ - (attribute_value) - (quoted_attribute_value) -] @string - -(comment) @comment.inclusive diff --git a/extensions/astro/src/astro.rs b/extensions/astro/src/astro.rs deleted file mode 100644 index 72c3646246..0000000000 --- a/extensions/astro/src/astro.rs +++ /dev/null @@ -1,168 +0,0 @@ -use std::collections::HashMap; -use std::{env, fs}; - -use serde::Deserialize; -use zed_extension_api::{self as zed, serde_json, Result}; - -const SERVER_PATH: &str = "node_modules/@astrojs/language-server/bin/nodeServer.js"; -const PACKAGE_NAME: &str = "@astrojs/language-server"; - -const TYPESCRIPT_PACKAGE_NAME: &str = "typescript"; - -/// The relative path to TypeScript's SDK. -const TYPESCRIPT_TSDK_PATH: &str = "node_modules/typescript/lib"; - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct PackageJson { - #[serde(default)] - dependencies: HashMap, - #[serde(default)] - dev_dependencies: HashMap, -} - -struct AstroExtension { - did_find_server: bool, - typescript_tsdk_path: String, -} - -impl AstroExtension { - fn server_exists(&self) -> bool { - fs::metadata(SERVER_PATH).map_or(false, |stat| stat.is_file()) - } - - fn server_script_path( - &mut self, - language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let server_exists = self.server_exists(); - if self.did_find_server && server_exists { - self.install_typescript_if_needed(worktree)?; - return Ok(SERVER_PATH.to_string()); - } - - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::CheckingForUpdate, - ); - let version = zed::npm_package_latest_version(PACKAGE_NAME)?; - - if !server_exists - || zed::npm_package_installed_version(PACKAGE_NAME)?.as_ref() != Some(&version) - { - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::Downloading, - ); - let result = zed::npm_install_package(PACKAGE_NAME, &version); - match result { - Ok(()) => { - if !self.server_exists() { - Err(format!( - "installed package '{PACKAGE_NAME}' did not contain expected path '{SERVER_PATH}'", - ))?; - } - } - Err(error) => { - if !self.server_exists() { - Err(error)?; - } - } - } - } - - self.install_typescript_if_needed(worktree)?; - self.did_find_server = true; - Ok(SERVER_PATH.to_string()) - } - - /// Returns whether a local copy of TypeScript exists in the worktree. - fn typescript_exists_for_worktree(&self, worktree: &zed::Worktree) -> Result { - let package_json = worktree.read_text_file("package.json")?; - let package_json: PackageJson = serde_json::from_str(&package_json) - .map_err(|err| format!("failed to parse package.json: {err}"))?; - - let dev_dependencies = &package_json.dev_dependencies; - let dependencies = &package_json.dependencies; - - // Since the extension is not allowed to read the filesystem within the project - // except through the worktree (which does not contains `node_modules`), we check - // the `package.json` to see if `typescript` is listed in the dependencies. - Ok(dev_dependencies.contains_key(TYPESCRIPT_PACKAGE_NAME) - || dependencies.contains_key(TYPESCRIPT_PACKAGE_NAME)) - } - - fn install_typescript_if_needed(&mut self, worktree: &zed::Worktree) -> Result<()> { - if self - .typescript_exists_for_worktree(worktree) - .unwrap_or_default() - { - println!("found local TypeScript installation at '{TYPESCRIPT_TSDK_PATH}'"); - return Ok(()); - } - - let installed_typescript_version = - zed::npm_package_installed_version(TYPESCRIPT_PACKAGE_NAME)?; - let latest_typescript_version = zed::npm_package_latest_version(TYPESCRIPT_PACKAGE_NAME)?; - - if installed_typescript_version.as_ref() != Some(&latest_typescript_version) { - println!("installing {TYPESCRIPT_PACKAGE_NAME}@{latest_typescript_version}"); - zed::npm_install_package(TYPESCRIPT_PACKAGE_NAME, &latest_typescript_version)?; - } else { - println!("typescript already installed"); - } - - self.typescript_tsdk_path = env::current_dir() - .unwrap() - .join(TYPESCRIPT_TSDK_PATH) - .to_string_lossy() - .to_string(); - - Ok(()) - } -} - -impl zed::Extension for AstroExtension { - fn new() -> Self { - Self { - did_find_server: false, - typescript_tsdk_path: TYPESCRIPT_TSDK_PATH.to_owned(), - } - } - - fn language_server_command( - &mut self, - language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let server_path = self.server_script_path(language_server_id, worktree)?; - Ok(zed::Command { - command: zed::node_binary_path()?, - args: vec![ - env::current_dir() - .unwrap() - .join(&server_path) - .to_string_lossy() - .to_string(), - "--stdio".to_string(), - ], - env: Default::default(), - }) - } - - fn language_server_initialization_options( - &mut self, - _language_server_id: &zed::LanguageServerId, - _worktree: &zed::Worktree, - ) -> Result> { - Ok(Some(serde_json::json!({ - "provideFormatter": true, - "typescript": { - "tsdk": self.typescript_tsdk_path - } - }))) - } -} - -zed::register_extension!(AstroExtension); diff --git a/extensions/clojure/Cargo.toml b/extensions/clojure/Cargo.toml deleted file mode 100644 index 4fa4aaee99..0000000000 --- a/extensions/clojure/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_clojure" -version = "0.0.3" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/clojure.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/clojure/LICENSE-APACHE b/extensions/clojure/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3..0000000000 --- a/extensions/clojure/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/clojure/extension.toml b/extensions/clojure/extension.toml deleted file mode 100644 index fc4d9548fe..0000000000 --- a/extensions/clojure/extension.toml +++ /dev/null @@ -1,15 +0,0 @@ -id = "clojure" -name = "Clojure" -description = "Clojure support." -version = "0.0.3" -schema_version = 1 -authors = ["Paulo Roberto de Oliveira Castro "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.clojure-lsp] -name = "clojure-lsp" -language = "Clojure" - -[grammars.clojure] -repository = "https://github.com/prcastro/tree-sitter-clojure" -commit = "38b4f8d264248b2fd09575fbce66f7c22e8929d5" diff --git a/extensions/clojure/languages/clojure/brackets.scm b/extensions/clojure/languages/clojure/brackets.scm deleted file mode 100644 index 191fd9c084..0000000000 --- a/extensions/clojure/languages/clojure/brackets.scm +++ /dev/null @@ -1,3 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) diff --git a/extensions/clojure/languages/clojure/config.toml b/extensions/clojure/languages/clojure/config.toml deleted file mode 100644 index 998335e748..0000000000 --- a/extensions/clojure/languages/clojure/config.toml +++ /dev/null @@ -1,12 +0,0 @@ -name = "Clojure" -grammar = "clojure" -path_suffixes = ["clj", "cljs", "cljc", "cljd", "edn", "bb"] -line_comments = [";; "] -autoclose_before = "}])" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, -] -word_characters = ["-"] diff --git a/extensions/clojure/languages/clojure/highlights.scm b/extensions/clojure/languages/clojure/highlights.scm deleted file mode 100644 index f4abe3bcdd..0000000000 --- a/extensions/clojure/languages/clojure/highlights.scm +++ /dev/null @@ -1,41 +0,0 @@ -;; Literals - -(num_lit) @number - -[ - (char_lit) - (str_lit) -] @string - -[ - (bool_lit) - (nil_lit) -] @constant.builtin - -(kwd_lit) @constant - -;; Comments - -(comment) @comment - -;; Treat quasiquotation as operators for the purpose of highlighting. - -[ - "'" - "`" - "~" - "@" - "~@" -] @operator - - -(list_lit - . - (sym_lit) @function) - -(list_lit - . - (sym_lit) @keyword - (#match? @keyword - "^(do|if|let|var|fn|fn*|loop*|recur|throw|try|catch|finally|set!|new|quote|->|->>)$" - )) diff --git a/extensions/clojure/languages/clojure/indents.scm b/extensions/clojure/languages/clojure/indents.scm deleted file mode 100644 index 9a1cbad161..0000000000 --- a/extensions/clojure/languages/clojure/indents.scm +++ /dev/null @@ -1,3 +0,0 @@ -(_ "[" "]") @indent -(_ "{" "}") @indent -(_ "(" ")") @indent diff --git a/extensions/clojure/languages/clojure/outline.scm b/extensions/clojure/languages/clojure/outline.scm deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/extensions/clojure/src/clojure.rs b/extensions/clojure/src/clojure.rs deleted file mode 100644 index 4804f2e78a..0000000000 --- a/extensions/clojure/src/clojure.rs +++ /dev/null @@ -1,109 +0,0 @@ -use std::fs; -use zed_extension_api::{self as zed, LanguageServerId, Result}; - -struct ClojureExtension { - cached_binary_path: Option, -} - -impl ClojureExtension { - fn language_server_binary_path( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - if let Some(path) = worktree.which("clojure-lsp") { - return Ok(path); - } - - if let Some(path) = &self.cached_binary_path { - if fs::metadata(path).map_or(false, |stat| stat.is_file()) { - return Ok(path.clone()); - } - } - - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::CheckingForUpdate, - ); - let release = zed::latest_github_release( - "clojure-lsp/clojure-lsp", - zed::GithubReleaseOptions { - require_assets: true, - pre_release: false, - }, - )?; - - let (platform, arch) = zed::current_platform(); - let asset_name = format!( - "clojure-lsp-native-{os}-{arch}.zip", - os = match platform { - zed::Os::Mac => "macos", - zed::Os::Linux => "linux", - zed::Os::Windows => "windows", - }, - arch = match arch { - zed::Architecture::Aarch64 => "aarch64", - zed::Architecture::X8664 => "amd64", - zed::Architecture::X86 => - return Err(format!("unsupported architecture: {arch:?}")), - }, - ); - - let asset = release - .assets - .iter() - .find(|asset| asset.name == asset_name) - .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?; - - let version_dir = format!("clojure-lsp-{}", release.version); - let binary_path = format!("{version_dir}/clojure-lsp"); - - if !fs::metadata(&binary_path).map_or(false, |stat| stat.is_file()) { - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::Downloading, - ); - - zed::download_file( - &asset.download_url, - &version_dir, - zed::DownloadedFileType::Zip, - ) - .map_err(|e| format!("failed to download file: {e}"))?; - - let entries = - fs::read_dir(".").map_err(|e| format!("failed to list working directory {e}"))?; - for entry in entries { - let entry = entry.map_err(|e| format!("failed to load directory entry {e}"))?; - if entry.file_name().to_str() != Some(&version_dir) { - fs::remove_dir_all(entry.path()).ok(); - } - } - } - - self.cached_binary_path = Some(binary_path.clone()); - Ok(binary_path) - } -} - -impl zed::Extension for ClojureExtension { - fn new() -> Self { - Self { - cached_binary_path: None, - } - } - - fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - Ok(zed::Command { - command: self.language_server_binary_path(language_server_id, worktree)?, - args: Vec::new(), - env: Default::default(), - }) - } -} - -zed::register_extension!(ClojureExtension); diff --git a/extensions/csharp/Cargo.toml b/extensions/csharp/Cargo.toml index f123980cd6..daac33a960 100644 --- a/extensions/csharp/Cargo.toml +++ b/extensions/csharp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_csharp" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/csharp/extension.toml b/extensions/csharp/extension.toml index 640624cb5f..8813ad0606 100644 --- a/extensions/csharp/extension.toml +++ b/extensions/csharp/extension.toml @@ -1,7 +1,7 @@ id = "csharp" name = "C#" description = "C# support." -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = ["fminkowski "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/csharp/languages/csharp/indents.scm b/extensions/csharp/languages/csharp/indents.scm new file mode 100644 index 0000000000..acb44a5e1e --- /dev/null +++ b/extensions/csharp/languages/csharp/indents.scm @@ -0,0 +1,3 @@ +(_ "{" "}" @end) @indent +(_ "[" "]" @end) @indent +(_ "(" ")" @end) @indent diff --git a/extensions/elixir/Cargo.toml b/extensions/elixir/Cargo.toml index 139d21f1c5..cbc610cdc1 100644 --- a/extensions/elixir/Cargo.toml +++ b/extensions/elixir/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_elixir" -version = "0.1.1" +version = "0.1.2" edition = "2021" publish = false license = "Apache-2.0" @@ -13,4 +13,4 @@ path = "src/elixir.rs" crate-type = ["cdylib"] [dependencies] -zed_extension_api = "0.1.0" +zed_extension_api = "0.2.0" diff --git a/extensions/elixir/extension.toml b/extensions/elixir/extension.toml index ba8a1f6687..05ab75fbef 100644 --- a/extensions/elixir/extension.toml +++ b/extensions/elixir/extension.toml @@ -1,7 +1,7 @@ id = "elixir" name = "Elixir" description = "Elixir support." -version = "0.1.1" +version = "0.1.3" schema_version = 1 authors = ["Marshall Bowers "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/elixir/languages/elixir/highlights.scm b/extensions/elixir/languages/elixir/highlights.scm index 0e779d195c..69e962ae39 100644 --- a/extensions/elixir/languages/elixir/highlights.scm +++ b/extensions/elixir/languages/elixir/highlights.scm @@ -67,6 +67,8 @@ quoted_start: _ @string.special quoted_end: _ @string.special) @string.special +(identifier) @variable + ( (identifier) @comment.unused (#match? @comment.unused "^_") diff --git a/extensions/elixir/languages/elixir/injections.scm b/extensions/elixir/languages/elixir/injections.scm index 4de229f104..009c82505b 100644 --- a/extensions/elixir/languages/elixir/injections.scm +++ b/extensions/elixir/languages/elixir/injections.scm @@ -2,6 +2,6 @@ ((sigil (sigil_name) @_sigil_name - (quoted_content) @content) + (quoted_content) @injection.content) (#eq? @_sigil_name "H") - (#set! language "heex")) + (#set! injection.language "heex")) diff --git a/extensions/elixir/languages/elixir/textobjects.scm b/extensions/elixir/languages/elixir/textobjects.scm new file mode 100644 index 0000000000..da111a1165 --- /dev/null +++ b/extensions/elixir/languages/elixir/textobjects.scm @@ -0,0 +1,46 @@ +(call + target: ((identifier) @_identifier + (#any-of? @_identifier "defmodule" "defprotocol" "defimpl")) + (do_block + "do" + (_)* @class.inside + "end")) @class.around + +(anonymous_function + (stab_clause + right: (body) @function.inside)) @function.around + +(call + target: ((identifier) @_identifier + (#any-of? @_identifier "def" "defmacro" "defmacrop" "defn" "defnp" "defp")) + (do_block + "do" + (_)* @function.inside + "end")) @function.around + +(call + target: ((identifier) @_identifier + (#any-of? @_identifier "def" "defmacro" "defmacrop" "defn" "defnp" "defp")) + (arguments + (_) + (keywords + (pair + value: (_) @function.inside)))) @function.around + +(call + target: ((identifier) @_identifier + (#any-of? @_identifier "defdelegate" "defguard" "defguardp"))) @function.around + +(comment) @comment.around + +(unary_operator + operator: "@" + operand: (call + target: ((identifier) @_identifier + (#any-of? @_identifier "moduledoc" "typedoc" "shortdoc" "doc")) + (arguments + [ + (keywords) @comment.inside + (string + (quoted_content) @comment.inside) + ]))) @comment.around diff --git a/extensions/elixir/languages/heex/injections.scm b/extensions/elixir/languages/heex/injections.scm index b503bcb28d..96c1d7c8c0 100644 --- a/extensions/elixir/languages/heex/injections.scm +++ b/extensions/elixir/languages/heex/injections.scm @@ -4,10 +4,10 @@ (partial_expression_value) (expression_value) (ending_expression_value) - ] @content) - (#set! language "elixir") - (#set! combined) + ] @injection.content) + (#set! injection.language "elixir") + (#set! injection.combined) ) -((expression (expression_value) @content) - (#set! language "elixir")) +((expression (expression_value) @injection.content) + (#set! injection.language "elixir")) diff --git a/extensions/elixir/src/language_servers/elixir_ls.rs b/extensions/elixir/src/language_servers/elixir_ls.rs index 72c705ffbc..9d35d86f74 100644 --- a/extensions/elixir/src/language_servers/elixir_ls.rs +++ b/extensions/elixir/src/language_servers/elixir_ls.rs @@ -107,36 +107,85 @@ impl ElixirLs { } pub fn label_for_completion(&self, completion: Completion) -> Option { + let name = &completion.label; + let detail = completion + .detail + .filter(|detail| detail != "alias") + .map(|detail| format!(": {detail}")) + .unwrap_or("".to_string()); + + let detail_span = CodeLabelSpan::literal(detail, Some("comment.unused".to_string())); + match completion.kind? { - CompletionKind::Module - | CompletionKind::Class - | CompletionKind::Interface - | CompletionKind::Struct => { - let name = completion.label; + CompletionKind::Module | CompletionKind::Class | CompletionKind::Struct => { let defmodule = "defmodule "; - let code = format!("{defmodule}{name}"); + let alias = completion + .label_details + .and_then(|details| details.description) + .filter(|description| description.starts_with("alias")) + .map(|description| format!(" ({description})")) + .unwrap_or("".to_string()); + + let code = format!("{defmodule}{name}{alias}"); + let name_start = defmodule.len(); + let name_end = name_start + name.len(); Some(CodeLabel { code, - spans: vec![CodeLabelSpan::code_range( - defmodule.len()..defmodule.len() + name.len(), - )], + spans: vec![ + CodeLabelSpan::code_range(name_start..name_end), + detail_span, + CodeLabelSpan::code_range(name_end..(name_end + alias.len())), + ], filter_range: (0..name.len()).into(), }) } + CompletionKind::Interface => Some(CodeLabel { + code: name.to_string(), + spans: vec![CodeLabelSpan::code_range(0..name.len()), detail_span], + filter_range: (0..name.len()).into(), + }), + CompletionKind::Field => Some(CodeLabel { + code: name.to_string(), + spans: vec![ + CodeLabelSpan::literal(name, Some("function".to_string())), + detail_span, + ], + filter_range: (0..name.len()).into(), + }), CompletionKind::Function | CompletionKind::Constant => { - let name = completion.label; + let detail = completion + .label_details + .clone() + .and_then(|details| details.detail) + .unwrap_or("".to_string()); + + let description = completion + .label_details + .clone() + .and_then(|details| details.description) + .map(|description| format!(" ({description})")) + .unwrap_or("".to_string()); + let def = "def "; - let code = format!("{def}{name}"); + let code = format!("{def}{name}{detail}{description}"); + + let name_start = def.len(); + let name_end = name_start + name.len(); + let detail_end = name_end + detail.len(); + let description_end = detail_end + description.len(); Some(CodeLabel { code, - spans: vec![CodeLabelSpan::code_range(def.len()..def.len() + name.len())], + spans: vec![ + CodeLabelSpan::code_range(name_start..name_end), + CodeLabelSpan::code_range(name_end..detail_end), + CodeLabelSpan::code_range(detail_end..description_end), + ], filter_range: (0..name.len()).into(), }) } CompletionKind::Operator => { - let name = completion.label; let def_a = "def a "; let code = format!("{def_a}{name} b"); diff --git a/extensions/elm/Cargo.toml b/extensions/elm/Cargo.toml deleted file mode 100644 index f28fc70311..0000000000 --- a/extensions/elm/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_elm" -version = "0.0.1" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/elm.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/elm/LICENSE-APACHE b/extensions/elm/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3..0000000000 --- a/extensions/elm/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/elm/extension.toml b/extensions/elm/extension.toml deleted file mode 100644 index 0c77cf6478..0000000000 --- a/extensions/elm/extension.toml +++ /dev/null @@ -1,15 +0,0 @@ -id = "elm" -name = "Elm" -description = "Elm support." -version = "0.0.1" -schema_version = 1 -authors = ["Quinn Wilton ", "Andrey Kuzmin "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.elm-language-server] -name = "elm-language-server" -language = "Elm" - -[grammars.elm] -repository = "https://github.com/elm-tooling/tree-sitter-elm" -commit = "09dbf221d7491dc8d8839616b27c21b9c025c457" diff --git a/extensions/elm/languages/elm/config.toml b/extensions/elm/languages/elm/config.toml deleted file mode 100644 index a142035cc0..0000000000 --- a/extensions/elm/languages/elm/config.toml +++ /dev/null @@ -1,13 +0,0 @@ -name = "Elm" -grammar = "elm" -path_suffixes = ["elm"] -line_comments = ["-- "] -block_comment = ["{- ", " -}"] -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] }, -] -tab_size = 2 diff --git a/extensions/elm/languages/elm/highlights.scm b/extensions/elm/languages/elm/highlights.scm deleted file mode 100644 index 5723c7eecb..0000000000 --- a/extensions/elm/languages/elm/highlights.scm +++ /dev/null @@ -1,72 +0,0 @@ -[ - "if" - "then" - "else" - "let" - "in" - (case) - (of) - (backslash) - (as) - (port) - (exposing) - (alias) - (import) - (module) - (type) - (arrow) - ] @keyword - -[ - (eq) - (operator_identifier) - (colon) -] @operator - -(type_annotation(lower_case_identifier) @function) -(port_annotation(lower_case_identifier) @function) -(function_declaration_left(lower_case_identifier) @function.definition) - -(function_call_expr - target: (value_expr - name: (value_qid (lower_case_identifier) @function))) - -(exposed_value(lower_case_identifier) @function) -(exposed_type(upper_case_identifier) @type) - -(field_access_expr(value_expr(value_qid)) @identifier) -(lower_pattern) @variable -(record_base_identifier) @identifier - -[ - "(" - ")" -] @punctuation.bracket - -[ - "|" - "," -] @punctuation.delimiter - -(number_constant_expr) @constant - -(type_declaration(upper_case_identifier) @type) -(type_ref) @type -(type_alias_declaration name: (upper_case_identifier) @type) - -(value_expr(upper_case_qid(upper_case_identifier)) @type) - -[ - (line_comment) - (block_comment) -] @comment - -(string_escape) @string.escape - -[ - (open_quote) - (close_quote) - (regular_string_part) - (open_char) - (close_char) -] @string diff --git a/extensions/elm/languages/elm/injections.scm b/extensions/elm/languages/elm/injections.scm deleted file mode 100644 index 0567320675..0000000000 --- a/extensions/elm/languages/elm/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((glsl_content) @content - (#set! "language" "glsl")) diff --git a/extensions/elm/languages/elm/outline.scm b/extensions/elm/languages/elm/outline.scm deleted file mode 100644 index 1d7d5a70b0..0000000000 --- a/extensions/elm/languages/elm/outline.scm +++ /dev/null @@ -1,22 +0,0 @@ -(type_declaration - (type) @context - (upper_case_identifier) @name) @item - -(type_alias_declaration - (type) @context - (alias) @context - name: (upper_case_identifier) @name) @item - -(type_alias_declaration - typeExpression: - (type_expression - part: (record_type - (field_type - name: (lower_case_identifier) @name) @item))) - -(union_variant - name: (upper_case_identifier) @name) @item - -(value_declaration - functionDeclarationLeft: - (function_declaration_left(lower_case_identifier) @name)) @item diff --git a/extensions/elm/src/elm.rs b/extensions/elm/src/elm.rs deleted file mode 100644 index b2c3d404ab..0000000000 --- a/extensions/elm/src/elm.rs +++ /dev/null @@ -1,113 +0,0 @@ -use std::{env, fs}; -use zed::{ - serde_json::{self, Value}, - settings::LspSettings, -}; -use zed_extension_api::{self as zed, Result}; - -const SERVER_PATH: &str = "node_modules/@elm-tooling/elm-language-server/out/node/index.js"; -const PACKAGE_NAME: &str = "@elm-tooling/elm-language-server"; - -struct ElmExtension { - did_find_server: bool, -} - -impl ElmExtension { - fn server_exists(&self) -> bool { - fs::metadata(SERVER_PATH).map_or(false, |stat| stat.is_file()) - } - - fn server_script_path(&mut self, server_id: &zed::LanguageServerId) -> Result { - let server_exists = self.server_exists(); - if self.did_find_server && server_exists { - return Ok(SERVER_PATH.to_string()); - } - - zed::set_language_server_installation_status( - server_id, - &zed::LanguageServerInstallationStatus::CheckingForUpdate, - ); - let version = zed::npm_package_latest_version(PACKAGE_NAME)?; - - if !server_exists - || zed::npm_package_installed_version(PACKAGE_NAME)?.as_ref() != Some(&version) - { - zed::set_language_server_installation_status( - server_id, - &zed::LanguageServerInstallationStatus::Downloading, - ); - let result = zed::npm_install_package(PACKAGE_NAME, &version); - match result { - Ok(()) => { - if !self.server_exists() { - Err(format!( - "installed package '{PACKAGE_NAME}' did not contain expected path '{SERVER_PATH}'", - ))?; - } - } - Err(error) => { - if !self.server_exists() { - Err(error)?; - } - } - } - } - - self.did_find_server = true; - Ok(SERVER_PATH.to_string()) - } -} - -impl zed::Extension for ElmExtension { - fn new() -> Self { - Self { - did_find_server: false, - } - } - - fn language_server_command( - &mut self, - server_id: &zed::LanguageServerId, - _worktree: &zed::Worktree, - ) -> Result { - let server_path = self.server_script_path(server_id)?; - Ok(zed::Command { - command: zed::node_binary_path()?, - args: vec![ - env::current_dir() - .unwrap() - .join(&server_path) - .to_string_lossy() - .to_string(), - "--stdio".to_string(), - ], - env: Default::default(), - }) - } - - fn language_server_workspace_configuration( - &mut self, - server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result> { - // elm-language-server expects workspace didChangeConfiguration notification - // params to be the same as lsp initialization_options - let initialization_options = LspSettings::for_worktree(server_id.as_ref(), worktree)? - .initialization_options - .clone() - .unwrap_or_default(); - - Ok(Some(match initialization_options.clone().as_object_mut() { - Some(op) => { - // elm-language-server requests workspace configuration - // for the `elmLS` section, so we have to nest - // another copy of initialization_options there - op.insert("elmLS".into(), initialization_options); - serde_json::to_value(op).unwrap_or_default() - } - None => initialization_options, - })) - } -} - -zed::register_extension!(ElmExtension); diff --git a/extensions/erlang/Cargo.toml b/extensions/erlang/Cargo.toml index 5067344896..ca354e0cbc 100644 --- a/extensions/erlang/Cargo.toml +++ b/extensions/erlang/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_erlang" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/erlang/extension.toml b/extensions/erlang/extension.toml index 8dd2628fd2..f6e903ccf9 100644 --- a/extensions/erlang/extension.toml +++ b/extensions/erlang/extension.toml @@ -1,7 +1,7 @@ id = "erlang" name = "Erlang" description = "Erlang support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Dairon M ", "Fabian Bergström "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/erlang/languages/erlang/textobjects.scm b/extensions/erlang/languages/erlang/textobjects.scm new file mode 100644 index 0000000000..e802a2f362 --- /dev/null +++ b/extensions/erlang/languages/erlang/textobjects.scm @@ -0,0 +1,6 @@ +(function_clause + body: (_ "->" (_)* @function.inside)) @function.around + +(type_alias ty: (_) @class.inside) @class.around + +(comment)+ @comment.around diff --git a/extensions/haskell/Cargo.toml b/extensions/haskell/Cargo.toml index 0b69075a20..c106a0dd1b 100644 --- a/extensions/haskell/Cargo.toml +++ b/extensions/haskell/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_haskell" -version = "0.1.1" +version = "0.1.2" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/haskell/extension.toml b/extensions/haskell/extension.toml index 2ef30cb3d5..003687136e 100644 --- a/extensions/haskell/extension.toml +++ b/extensions/haskell/extension.toml @@ -1,7 +1,7 @@ id = "haskell" name = "Haskell" description = "Haskell support." -version = "0.1.1" +version = "0.1.2" schema_version = 1 authors = [ "Pocæus ", diff --git a/extensions/haskell/languages/haskell/highlights.scm b/extensions/haskell/languages/haskell/highlights.scm index aca744f5cd..3ffc6512b8 100644 --- a/extensions/haskell/languages/haskell/highlights.scm +++ b/extensions/haskell/languages/haskell/highlights.scm @@ -18,7 +18,7 @@ (integer) @number (exp_negation) @number (exp_literal (float)) @float -(char) @character +(char) @string (string) @string (con_unit) @symbol ; unit, as in () diff --git a/extensions/haskell/languages/haskell/textobjects.scm b/extensions/haskell/languages/haskell/textobjects.scm new file mode 100644 index 0000000000..4302397467 --- /dev/null +++ b/extensions/haskell/languages/haskell/textobjects.scm @@ -0,0 +1,12 @@ +(comment)+ @comment.around + +[ + (adt) + (type_alias) + (newtype) +] @class.around + +(record_fields "{" (_)* @class.inside "}") + +((signature)? (function)+) @function.around +(function rhs:(_) @function.inside) diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index 6bb0c23374..aa020037a7 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,7 +1,7 @@ (tag_name) @tag (erroneous_end_tag_name) @keyword (doctype) @tag.doctype -(attribute_name) @property +(attribute_name) @attribute (attribute_value) @string (comment) @comment diff --git a/extensions/html/languages/html/injections.scm b/extensions/html/languages/html/injections.scm index 9084e373f2..1c31b2a0a9 100644 --- a/extensions/html/languages/html/injections.scm +++ b/extensions/html/languages/html/injections.scm @@ -1,7 +1,7 @@ (script_element - (raw_text) @content - (#set! "language" "javascript")) + (raw_text) @injection.content + (#set! injection.language "javascript")) (style_element - (raw_text) @content - (#set! "language" "css")) + (raw_text) @injection.content + (#set! injection.language "css")) diff --git a/extensions/lua/Cargo.toml b/extensions/lua/Cargo.toml index f577ce1871..8eec6ed62f 100644 --- a/extensions/lua/Cargo.toml +++ b/extensions/lua/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_lua" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/lua/extension.toml b/extensions/lua/extension.toml index 82026f48ba..52120cdfa2 100644 --- a/extensions/lua/extension.toml +++ b/extensions/lua/extension.toml @@ -1,7 +1,7 @@ id = "lua" name = "Lua" description = "Lua support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Max Brunsfeld "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/lua/languages/lua/textobjects.scm b/extensions/lua/languages/lua/textobjects.scm new file mode 100644 index 0000000000..1f8bf66059 --- /dev/null +++ b/extensions/lua/languages/lua/textobjects.scm @@ -0,0 +1,7 @@ +(function_definition + body: (_) @function.inside) @function.around + +(function_declaration + body: (_) @function.inside) @function.around + +(comment)+ @comment.around diff --git a/extensions/ocaml/Cargo.toml b/extensions/ocaml/Cargo.toml deleted file mode 100644 index 6df98bec4c..0000000000 --- a/extensions/ocaml/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_ocaml" -version = "0.1.0" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/ocaml.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/ocaml/LICENSE-APACHE b/extensions/ocaml/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3..0000000000 --- a/extensions/ocaml/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/ocaml/extension.toml b/extensions/ocaml/extension.toml deleted file mode 100644 index bff7c380b5..0000000000 --- a/extensions/ocaml/extension.toml +++ /dev/null @@ -1,25 +0,0 @@ -id = "ocaml" -name = "OCaml" -description = "OCaml support." -version = "0.1.0" -schema_version = 1 -authors = ["Rashid Almheiri <69181766+huwaireb@users.noreply.github.com>"] -repository = "https://github.com/zed-industries/zed" - -[language_servers.ocamllsp] -name = "ocamllsp" -languages = ["OCaml", "OCaml Interface"] - -[grammars.ocaml] -repository = "https://github.com/tree-sitter/tree-sitter-ocaml" -commit = "0b12614ded3ec7ed7ab7933a9ba4f695ba4c342e" -path = "grammars/ocaml" - -[grammars.ocaml_interface] -repository = "https://github.com/tree-sitter/tree-sitter-ocaml" -commit = "0b12614ded3ec7ed7ab7933a9ba4f695ba4c342e" -path = "grammars/interface" - -[grammars.dune] -repository = "https://github.com/WHForks/tree-sitter-dune" -commit = "b3f7882e1b9a1d8811011bf6f0de1c74c9c93949" diff --git a/extensions/ocaml/languages/dune/config.toml b/extensions/ocaml/languages/dune/config.toml deleted file mode 100644 index b4f79850b6..0000000000 --- a/extensions/ocaml/languages/dune/config.toml +++ /dev/null @@ -1,8 +0,0 @@ -name = "Dune" -grammar = "dune" -path_suffixes = ["dune", "dune-project"] -brackets = [ - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } -] -tab_size = 2 diff --git a/extensions/ocaml/languages/dune/highlights.scm b/extensions/ocaml/languages/dune/highlights.scm deleted file mode 100644 index e7a21cd2c5..0000000000 --- a/extensions/ocaml/languages/dune/highlights.scm +++ /dev/null @@ -1,5 +0,0 @@ -(stanza_name) @function -(field_name) @property -(quoted_string) @string -(multiline_string) @string -(action_name) @keyword diff --git a/extensions/ocaml/languages/dune/injections.scm b/extensions/ocaml/languages/dune/injections.scm deleted file mode 100644 index 654b5b2c13..0000000000 --- a/extensions/ocaml/languages/dune/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((ocaml_syntax) @injection.content - (#set! injection.language "ocaml")) diff --git a/extensions/ocaml/languages/ocaml-interface/brackets.scm b/extensions/ocaml/languages/ocaml-interface/brackets.scm deleted file mode 100644 index e7e8145eba..0000000000 --- a/extensions/ocaml/languages/ocaml-interface/brackets.scm +++ /dev/null @@ -1,2 +0,0 @@ -("(" @open ")" @close) -("{" @open "}" @close) diff --git a/extensions/ocaml/languages/ocaml-interface/config.toml b/extensions/ocaml/languages/ocaml-interface/config.toml deleted file mode 100644 index a4378ec3ed..0000000000 --- a/extensions/ocaml/languages/ocaml-interface/config.toml +++ /dev/null @@ -1,12 +0,0 @@ -name = "OCaml Interface" -code_fence_block_name = "ocaml" -grammar = "ocaml_interface" -path_suffixes = ["mli"] -block_comment = ["(* ", " *)"] -autoclose_before = ";,=)}" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true } -] -tab_size = 2 diff --git a/extensions/ocaml/languages/ocaml-interface/highlights.scm b/extensions/ocaml/languages/ocaml-interface/highlights.scm deleted file mode 120000 index e6f0d00d1d..0000000000 --- a/extensions/ocaml/languages/ocaml-interface/highlights.scm +++ /dev/null @@ -1 +0,0 @@ -../ocaml/highlights.scm \ No newline at end of file diff --git a/extensions/ocaml/languages/ocaml-interface/indents.scm b/extensions/ocaml/languages/ocaml-interface/indents.scm deleted file mode 100644 index 0de50a48bb..0000000000 --- a/extensions/ocaml/languages/ocaml-interface/indents.scm +++ /dev/null @@ -1,21 +0,0 @@ -[ - (type_binding) - - (value_specification) - (method_specification) - - (external) - (field_declaration) -] @indent - -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent - -(_ "object" @start "end" @end) @indent - -(signature - "sig" @start - "end" @end) @indent - -";;" @outdent diff --git a/extensions/ocaml/languages/ocaml-interface/outline.scm b/extensions/ocaml/languages/ocaml-interface/outline.scm deleted file mode 100644 index b8539d4cd0..0000000000 --- a/extensions/ocaml/languages/ocaml-interface/outline.scm +++ /dev/null @@ -1,48 +0,0 @@ -(module_type_definition - "module" @context - "type" @context - name: (_) @name) @item - -(module_definition - "module" @context - (module_binding name: (_) @name)) @item - -(type_definition - "type" @context - (type_binding name: (_) @name)) @item - -(class_definition - "class" @context - (class_binding - "virtual"? @context - name: (_) @name)) @item - -(class_type_definition - "class" @context - "type" @context - (class_type_binding - "virtual"? @context - name: (_) @name)) @item - -(instance_variable_definition - "val" @context - "method"? @context - name: (_) @name) @item - -(method_specification - "method" @context - "virtual"? @context - (method_name) @name) @item - -(value_specification - "val" @context - (value_name) @name) @item - -(external - "external" @context - (value_name) @name) @item - -(exception_definition - "exception" @context - (constructor_declaration - (constructor_name) @name)) @item diff --git a/extensions/ocaml/languages/ocaml/brackets.scm b/extensions/ocaml/languages/ocaml/brackets.scm deleted file mode 100644 index 1f5ee9bfa3..0000000000 --- a/extensions/ocaml/languages/ocaml/brackets.scm +++ /dev/null @@ -1,5 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("[|" @open "|]" @close) -("{" @open "}" @close) -("\"" @open "\"" @close) diff --git a/extensions/ocaml/languages/ocaml/config.toml b/extensions/ocaml/languages/ocaml/config.toml deleted file mode 100644 index 7d5b4348d6..0000000000 --- a/extensions/ocaml/languages/ocaml/config.toml +++ /dev/null @@ -1,14 +0,0 @@ -name = "OCaml" -grammar = "ocaml" -path_suffixes = ["ml"] -block_comment = ["(* ", " *)"] -autoclose_before = ";,=)}]" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "{|", end = "|", close = true, newline = true, not_in = ["string"] }, - { start = "[", end = "]", close = true, newline = true }, - { start = "[|", end = "|", close = true, newline = true, not_in = ["string"] }, - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } -] -tab_size = 2 diff --git a/extensions/ocaml/languages/ocaml/highlights.scm b/extensions/ocaml/languages/ocaml/highlights.scm deleted file mode 100644 index 8029d3cc22..0000000000 --- a/extensions/ocaml/languages/ocaml/highlights.scm +++ /dev/null @@ -1,158 +0,0 @@ -; Modules -;-------- - -[(module_name) (module_type_name)] @title - -; Types -;------ - -[(class_name) (class_type_name) (type_constructor)] @type - -(tag) @variant ;; Polymorphic Variants -(constructor_name) @constructor ;; Exceptions, variants and the like - -; Functions -;---------- - -(let_binding - pattern: (value_name) @function - (parameter)) - -(let_binding - pattern: (value_name) @function - body: [(fun_expression) (function_expression)]) - -(value_specification (value_name) @function) - -(external (value_name) @function) - -(method_name) @function - -(infix_expression - left: (value_path (value_name) @function) - operator: (concat_operator) @operator - (#eq? @operator "@@")) - -(infix_expression - operator: (rel_operator) @operator - right: (value_path (value_name) @function) - (#eq? @operator "|>")) - -(application_expression - function: (value_path (value_name) @function)) - -; Variables -;---------- - -(value_pattern) @variable - -(type_variable) @variable.special - -; Properties -;----------- - -[(field_name) (instance_variable_name)] @property - -; Labels -;------- - -[(label_name) (parameter)] @label - -(parameter - pattern: (value_pattern) @label) -; despite the above rule, we should still label value_pattern as a variable -; when a label name is present -(parameter - (label_name) - pattern: (value_pattern) @variable) - -; Constants -;---------- - -(boolean) @boolean - -[(number) (signed_number)] @number - -[(string) (character)] @string - -(quoted_string "{" @string "}" @string) @string -(quoted_string_content) @string - - -(escape_sequence) @string.escape - -[ - (conversion_specification) - (pretty_printing_indication) -] @punctuation.special - -; Operators -;---------- - -(match_expression (match_operator) @keyword) - -(value_definition [(let_operator) (let_and_operator)] @keyword) - -[ - (prefix_operator) - (sign_operator) - (pow_operator) - (mult_operator) - (add_operator) - (concat_operator) - (rel_operator) - (and_operator) - (or_operator) - (assign_operator) - (hash_operator) - (indexing_operator) - (let_operator) - (let_and_operator) - (match_operator) -] @operator - -["*" "#" "::" "<-"] @operator - -; Keywords -;--------- - -[ - "and" "as" "assert" "begin" "class" "constraint" "do" "done" "downto" "else" - "end" "exception" "external" "for" "fun" "function" "functor" "if" "in" - "include" "inherit" "initializer" "lazy" "let" "match" "method" "module" - "mutable" "new" "nonrec" "object" "of" "open" "private" "rec" "sig" "struct" - "then" "to" "try" "type" "val" "virtual" "when" "while" "with" -] @keyword - -; Punctuation -;------------ - -["(" ")" "[" "]" "{" "}" "[|" "|]" "[<" "[>" "[@@" "[@" "[%"] @punctuation.bracket - -(object_type ["<" ">"] @punctuation.bracket) - -[ - "," "." ";" ":" "=" "|" "~" "?" "+" "-" "!" ">" "&" - "->" ";;" ":>" "+=" ":=" ".." -] @punctuation.delimiter - -; Attributes -;----------- - -[ - (attribute) - (item_attribute) - (floating_attribute) - (extension) - (item_extension) - (quoted_extension) - (quoted_item_extension) - -] @attribute - -(attribute_id) @tag - -; Comments -;--------- - -[(comment) (line_number_directive) (directive) (shebang)] @comment diff --git a/extensions/ocaml/languages/ocaml/indents.scm b/extensions/ocaml/languages/ocaml/indents.scm deleted file mode 100644 index 319d2fd971..0000000000 --- a/extensions/ocaml/languages/ocaml/indents.scm +++ /dev/null @@ -1,45 +0,0 @@ -[ - (let_binding) - (type_binding) - - (method_definition) - - (external) - (value_specification) - (method_specification) - - (match_case) - - (function_expression) - - (field_declaration) - (field_expression) - - (application_expression) -] @indent - -(_ "[" "]" @end) @indent -(_ "[|" "|]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent - -(_ "object" @start "end" @end) @indent - -(structure - "struct" @start - "end" @end) @indent - -(signature - "sig" @start - "end" @end) @indent - -(parenthesized_expression - "begin" @start - "end") @indent - -(do_clause - "do" @start - "done" @end) @indent - -";;" @outdent diff --git a/extensions/ocaml/languages/ocaml/outline.scm b/extensions/ocaml/languages/ocaml/outline.scm deleted file mode 100644 index c7f39c219b..0000000000 --- a/extensions/ocaml/languages/ocaml/outline.scm +++ /dev/null @@ -1,59 +0,0 @@ -(_structure_item/value_definition - "let" @context - (let_binding - pattern: (_) @name)) @item - -(_structure_item/exception_definition - "exception" @context - (constructor_declaration - (constructor_name) @name)) @item - -(_structure_item/module_definition - "module" @context - (module_binding - name: (module_name) @name)) @item - -(module_type_definition - "module" @context - "type" @context - name: (_) @name) @item - -(type_definition - "type" @context - (type_binding name: (_) @name)) @item - -(value_specification - "val" @context - (value_name) @name) @item - -(class_definition - "class" @context - (class_binding - "virtual"? @context - name: (_) @name)) @item - -(class_type_definition - "class" @context - "type" @context - (class_type_binding - "virtual"? @context - name: (_) @name)) @item - -(instance_variable_definition - "val" @context - "method"? @context - name: (_) @name) @item - -(method_specification - "method" @context - "virtual"? @context - (method_name) @name) @item - -(method_definition - "method" @context - "virtual"? @context - name: (_) @name) @item - -(external - "external" @context - (value_name) @name) @item diff --git a/extensions/ocaml/src/ocaml.rs b/extensions/ocaml/src/ocaml.rs deleted file mode 100644 index 94e6d55e17..0000000000 --- a/extensions/ocaml/src/ocaml.rs +++ /dev/null @@ -1,219 +0,0 @@ -use std::ops::Range; -use zed::lsp::{Completion, CompletionKind, Symbol, SymbolKind}; -use zed::{CodeLabel, CodeLabelSpan}; -use zed_extension_api::{self as zed, Result}; - -const OPERATOR_CHAR: [char; 17] = [ - '~', '!', '?', '%', '<', ':', '.', '$', '&', '*', '+', '-', '/', '=', '>', '@', '^', -]; - -struct OcamlExtension; - -impl zed::Extension for OcamlExtension { - fn new() -> Self { - Self - } - - fn language_server_command( - &mut self, - _language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let path = worktree.which("ocamllsp").ok_or_else(|| { - "ocamllsp (ocaml-language-server) must be installed manually.".to_string() - })?; - - Ok(zed::Command { - command: path, - args: Vec::new(), - env: worktree.shell_env(), - }) - } - - fn label_for_completion( - &self, - _language_server_id: &zed::LanguageServerId, - completion: Completion, - ) -> Option { - let name = &completion.label; - let detail = completion.detail.as_ref().map(|s| s.replace('\n', " ")); - - match completion.kind.zip(detail) { - Some((CompletionKind::Constructor | CompletionKind::EnumMember, detail)) => { - let (argument, return_t) = detail - .split_once("->") - .map_or((None, detail.as_str()), |(arg, typ)| { - (Some(arg.trim()), typ.trim()) - }); - - let type_decl = "type t = "; - let type_of = argument.map(|_| " of ").unwrap_or_default(); - let argument = argument.unwrap_or_default(); - let terminator = "\n"; - let let_decl = "let _ "; - let let_colon = ": "; - let let_suffix = " = ()"; - let code = format!( - "{type_decl}{name}{type_of}{argument}{terminator}{let_decl}{let_colon}{return_t}{let_suffix}" - ); - - let name_start = type_decl.len(); - let argument_end = name_start + name.len() + type_of.len() + argument.len(); - let colon_start = argument_end + terminator.len() + let_decl.len(); - let return_type_end = code.len() - let_suffix.len(); - Some(CodeLabel { - code, - spans: vec![ - CodeLabelSpan::code_range(name_start..argument_end), - CodeLabelSpan::code_range(colon_start..return_type_end), - ], - filter_range: (0..name.len()).into(), - }) - } - - Some((CompletionKind::Field, detail)) => { - let filter_range_start = if name.starts_with(['~', '?']) { 1 } else { 0 }; - - let record_prefix = "type t = { "; - let record_suffix = "; }"; - let code = format!("{record_prefix}{name} : {detail}{record_suffix}"); - - Some(CodeLabel { - spans: vec![CodeLabelSpan::code_range( - record_prefix.len()..code.len() - record_suffix.len(), - )], - code, - filter_range: (filter_range_start..name.len()).into(), - }) - } - - Some((CompletionKind::Value, detail)) => { - let let_prefix = "let "; - let suffix = " = ()"; - let (l_paren, r_paren) = if name.contains(OPERATOR_CHAR) { - ("( ", " )") - } else { - ("", "") - }; - let code = format!("{let_prefix}{l_paren}{name}{r_paren} : {detail}{suffix}"); - - let name_start = let_prefix.len() + l_paren.len(); - let name_end = name_start + name.len(); - let type_annotation_start = name_end + r_paren.len(); - let type_annotation_end = code.len() - suffix.len(); - - Some(CodeLabel { - spans: vec![ - CodeLabelSpan::code_range(name_start..name_end), - CodeLabelSpan::code_range(type_annotation_start..type_annotation_end), - ], - filter_range: (0..name.len()).into(), - code, - }) - } - - Some((CompletionKind::Method, detail)) => { - let method_decl = "class c : object method "; - let end = " end"; - let code = format!("{method_decl}{name} : {detail}{end}"); - - Some(CodeLabel { - spans: vec![CodeLabelSpan::code_range( - method_decl.len()..code.len() - end.len(), - )], - code, - filter_range: (0..name.len()).into(), - }) - } - - Some((kind, _)) => { - let highlight_name = match kind { - CompletionKind::Module | CompletionKind::Interface => "title", - CompletionKind::Keyword => "keyword", - CompletionKind::TypeParameter => "type", - _ => return None, - }; - - Some(CodeLabel { - spans: vec![(CodeLabelSpan::literal(name, Some(highlight_name.to_string())))], - filter_range: (0..name.len()).into(), - code: String::new(), - }) - } - _ => None, - } - } - - fn label_for_symbol( - &self, - _language_server_id: &zed::LanguageServerId, - symbol: Symbol, - ) -> Option { - let name = &symbol.name; - - let (code, filter_range, display_range) = match symbol.kind { - SymbolKind::Property => { - let code = format!("type t = {{ {}: (); }}", name); - let filter_range: Range = 0..name.len(); - let display_range = 11..11 + name.len(); - (code, filter_range, display_range) - } - SymbolKind::Function - if name.contains(OPERATOR_CHAR) - || (name.starts_with("let") && name.contains(OPERATOR_CHAR)) => - { - let code = format!("let ( {name} ) () = ()"); - - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end + 1; - (code, filter_range, display_range) - } - SymbolKind::Function => { - let code = format!("let {name} () = ()"); - - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (code, filter_range, display_range) - } - SymbolKind::Constructor => { - let code = format!("type t = {name}"); - let filter_range = 0..name.len(); - let display_range = 9..9 + name.len(); - (code, filter_range, display_range) - } - SymbolKind::Module => { - let code = format!("module {name} = struct end"); - let filter_range = 7..7 + name.len(); - let display_range = 0..filter_range.end; - (code, filter_range, display_range) - } - SymbolKind::Class => { - let code = format!("class {name} = object end"); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (code, filter_range, display_range) - } - SymbolKind::Method => { - let code = format!("class c = object method {name} = () end"); - let filter_range = 0..name.len(); - let display_range = 17..24 + name.len(); - (code, filter_range, display_range) - } - SymbolKind::String => { - let code = format!("type {name} = T"); - let filter_range = 5..5 + name.len(); - let display_range = 0..filter_range.end; - (code, filter_range, display_range) - } - _ => return None, - }; - - Some(CodeLabel { - code, - spans: vec![CodeLabelSpan::code_range(display_range)], - filter_range: filter_range.into(), - }) - } -} - -zed::register_extension!(OcamlExtension); diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md index 405356dc53..337c24325b 100644 --- a/extensions/perplexity/README.md +++ b/extensions/perplexity/README.md @@ -38,6 +38,6 @@ Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. -To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. +To obtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. diff --git a/extensions/perplexity/extension.toml b/extensions/perplexity/extension.toml index 205f8a5cc2..474d9ee981 100644 --- a/extensions/perplexity/extension.toml +++ b/extensions/perplexity/extension.toml @@ -3,7 +3,7 @@ name = "Perplexity" version = "0.1.0" description = "Ask questions to Perplexity AI directly from Zed" authors = ["Zed Industries "] -repository = "https://github.com/zed-industries/zed-perplexity" +repository = "https://github.com/zed-industries/zed" schema_version = 1 [slash_commands.perplexity] diff --git a/extensions/php/Cargo.toml b/extensions/php/Cargo.toml index a78c133e8e..8bf6a523f4 100644 --- a/extensions/php/Cargo.toml +++ b/extensions/php/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_php" -version = "0.2.2" +version = "0.2.3" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/php/extension.toml b/extensions/php/extension.toml index eec2fe5d39..a2bc1d921e 100644 --- a/extensions/php/extension.toml +++ b/extensions/php/extension.toml @@ -1,7 +1,7 @@ id = "php" name = "PHP" description = "PHP support." -version = "0.2.2" +version = "0.2.3" schema_version = 1 authors = ["Piotr Osiewicz "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/php/languages/php/injections.scm b/extensions/php/languages/php/injections.scm index 122d0b377a..fdf5559130 100644 --- a/extensions/php/languages/php/injections.scm +++ b/extensions/php/languages/php/injections.scm @@ -1,9 +1,9 @@ -((text) @content - (#set! "language" "html") - (#set! "combined")) +((text) @injection.content + (#set! injection.language "html") + (#set! injection.combined)) -((comment) @content - (#match? @content "^/\\*\\*[^*]") - (#set! "language" "phpdoc")) +((comment) @injection.content + (#match? @injection.content "^/\\*\\*[^*]") + (#set! injection.language "phpdoc")) -((heredoc_body) (heredoc_end) @language) @content +((heredoc_body) (heredoc_end) @injection.language) @injection.content diff --git a/extensions/php/languages/php/textobjects.scm b/extensions/php/languages/php/textobjects.scm new file mode 100644 index 0000000000..d86a0c1252 --- /dev/null +++ b/extensions/php/languages/php/textobjects.scm @@ -0,0 +1,45 @@ +(function_definition + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + +(method_declaration + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + +(method_declaration) @function.around + +(class_declaration + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(interface_declaration + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(trait_declaration + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(enum_declaration + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(namespace_definition + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(comment)+ @comment.around diff --git a/extensions/prisma/Cargo.toml b/extensions/prisma/Cargo.toml index e5a261266a..68256bd1cc 100644 --- a/extensions/prisma/Cargo.toml +++ b/extensions/prisma/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_prisma" -version = "0.0.3" +version = "0.0.4" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/prisma/extension.toml b/extensions/prisma/extension.toml index 449f990d2f..22b2bd9f2b 100644 --- a/extensions/prisma/extension.toml +++ b/extensions/prisma/extension.toml @@ -1,7 +1,7 @@ id = "prisma" name = "Prisma" description = "Prisma support." -version = "0.0.3" +version = "0.0.4" schema_version = 1 authors = ["Matthew Gramigna "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/prisma/languages/prisma/textobjects.scm b/extensions/prisma/languages/prisma/textobjects.scm new file mode 100644 index 0000000000..0158c90786 --- /dev/null +++ b/extensions/prisma/languages/prisma/textobjects.scm @@ -0,0 +1,25 @@ +(model_declaration + (statement_block + "{" + (_)* @class.inside + "}")) @class.around + +(datasource_declaration + (statement_block + "{" + (_)* @class.inside + "}")) @class.around + +(generator_declaration + (statement_block + "{" + (_)* @class.inside + "}")) @class.around + +(enum_declaration + (enum_block + "{" + (_)* @class.inside + "}")) @class.around + +(developer_comment)+ @comment.around diff --git a/extensions/proto/Cargo.toml b/extensions/proto/Cargo.toml index 215a09f896..03c9bc5626 100644 --- a/extensions/proto/Cargo.toml +++ b/extensions/proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_proto" -version = "0.2.0" +version = "0.2.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml index f26aee7dde..232602faf7 100644 --- a/extensions/proto/extension.toml +++ b/extensions/proto/extension.toml @@ -1,7 +1,7 @@ id = "proto" name = "Proto" description = "Protocol Buffers support." -version = "0.2.0" +version = "0.2.1" schema_version = 1 authors = ["Zed Industries "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/proto/languages/proto/textobjects.scm b/extensions/proto/languages/proto/textobjects.scm new file mode 100644 index 0000000000..90ea84282d --- /dev/null +++ b/extensions/proto/languages/proto/textobjects.scm @@ -0,0 +1,18 @@ +(message (message_body + "{" + (_)* @class.inside + "}")) @class.around +(enum (enum_body + "{" + (_)* @class.inside + "}")) @class.around +(service + "service" + (_) + "{" + (_)* @class.inside + "}") @class.around + +(rpc) @function.around + +(comment)+ @comment.around diff --git a/extensions/purescript/languages/purescript/highlights.scm b/extensions/purescript/languages/purescript/highlights.scm index b6e969af78..fe2db3115d 100644 --- a/extensions/purescript/languages/purescript/highlights.scm +++ b/extensions/purescript/languages/purescript/highlights.scm @@ -18,7 +18,7 @@ (integer) @number (exp_negation) @number (exp_literal (number)) @float -(char) @character +(char) @string [ (string) (triple_quote_string) diff --git a/extensions/racket/LICENSE-APACHE b/extensions/racket/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3..0000000000 --- a/extensions/racket/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/racket/extension.toml b/extensions/racket/extension.toml deleted file mode 100644 index 8e59742685..0000000000 --- a/extensions/racket/extension.toml +++ /dev/null @@ -1,11 +0,0 @@ -id = "racket" -name = "Racket" -description = "Racket support." -version = "0.0.1" -schema_version = 1 -authors = ["Mikayla Maki "] -repository = "https://github.com/zed-industries/zed" - -[grammars.racket] -repository = "https://github.com/zed-industries/tree-sitter-racket" -commit = "beb29de2b668110e69df0c6d51649c3e2c91d863" diff --git a/extensions/racket/languages/racket/brackets.scm b/extensions/racket/languages/racket/brackets.scm deleted file mode 100644 index 191fd9c084..0000000000 --- a/extensions/racket/languages/racket/brackets.scm +++ /dev/null @@ -1,3 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) diff --git a/extensions/racket/languages/racket/config.toml b/extensions/racket/languages/racket/config.toml deleted file mode 100644 index d5975a36e5..0000000000 --- a/extensions/racket/languages/racket/config.toml +++ /dev/null @@ -1,10 +0,0 @@ -name = "Racket" -grammar = "racket" -path_suffixes = ["rkt"] -line_comments = ["; "] -autoclose_before = "])" -brackets = [ - { start = "[", end = "]", close = true, newline = false }, - { start = "(", end = ")", close = true, newline = false }, - { start = "\"", end = "\"", close = true, newline = false }, -] diff --git a/extensions/racket/languages/racket/highlights.scm b/extensions/racket/languages/racket/highlights.scm deleted file mode 100644 index 0b462a9c26..0000000000 --- a/extensions/racket/languages/racket/highlights.scm +++ /dev/null @@ -1,39 +0,0 @@ -["(" ")" "[" "]" "{" "}"] @punctuation.bracket - -[(string) - (here_string) - (byte_string)] @string -(regex) @string.regex -(escape_sequence) @string.escape - -[(comment) - (block_comment) - (sexp_comment)] @comment - -(symbol) @variable - -(number) @number -(character) @constant.builtin -(boolean) @constant.builtin -(keyword) @constant -(quote . (symbol)) @constant - -(extension) @keyword -(lang_name) @variable.special - -((symbol) @operator - (#match? @operator "^(\\+|-|\\*|/|=|>|<|>=|<=)$")) - -(list - . - (symbol) @function) - -(list - . - (symbol) @keyword - (#match? @keyword - "^(unit-from-context|for/last|syntax-case|match-let\\*-values|define-for-syntax|define/subexpression-pos-prop|set-field!|class-field-accessor|invoke-unit|#%stratified-body|for\\*/and|for\\*/weak-set|flat-rec-contract|for\\*/stream|planet|for/mutable-seteqv|log-error|delay|#%declare|prop:dict/contract|->d|lib|override\\*|define-local-member-name|send-generic|for\\*/hasheq|define-syntax|submod|except|include-at/relative-to/reader|public\\*|define-member-name|define/public|let\\*|for/and|for\\*/first|for|delay/strict|define-values-for-export|==|match-define-values|for/weak-seteq|for\\*/async|for/stream|for/weak-seteqv|set!-values|lambda|for\\*/product|augment-final\\*|pubment\\*|command-line|contract|case|struct-field-index|contract-struct|unless|for/hasheq|for/seteqv|with-method|define-values-for-syntax|for-template|pubment|for\\*/list|syntax-case\\*|init-field|define-serializable-class|=>|for/foldr/derived|letrec-syntaxes|overment\\*|unquote-splicing|_|inherit-field|for\\*|stream-lazy|match-lambda\\*|contract-pos/neg-doubling|unit/c|match-define|for\\*/set|unit/s|nor|#%expression|class/c|this%|place/context|super-make-object|when|set!|parametric->/c|syntax-id-rules|include/reader|compound-unit|override-final|get-field|gen:dict|for\\*/seteqv|for\\*/hash|#%provide|combine-out|link|with-contract-continuation-mark|define-struct/derived|stream\\*|λ|rename-out|define-serializable-class\\*|augment|define/augment|let|define-signature-form|letrec-syntax|abstract|define-namespace-anchor|#%module-begin|#%top-interaction|for\\*/weak-seteqv|do|define/subexpression-pos-prop/name|absent|send/apply|with-handlers\\*|all-from-out|provide-signature-elements|gen:stream|define/override-final|for\\*/mutable-seteqv|rename|quasisyntax/loc|instantiate|for/list|extends|include-at/relative-to|mixin|define/pubment|#%plain-lambda|except-out|#%plain-module-begin|init|for\\*/last|relative-in|define-unit/new-import-export|->dm|member-name-key|nand|interface\\*|struct|define/override|else|define/augment-final|failure-cont|open|log-info|define/final-prop|all-defined-out|for/sum|for\\*/sum|recursive-contract|define|define-logger|match\\*|log-debug|rename-inner|->|struct/derived|unit|class\\*|prefix-out|any|define/overment|define-signature|match-letrec-values|let-syntaxes|for/mutable-set|define/match|cond|super-instantiate|define-contract-struct|import|hash/dc|define-custom-set-types|public-final|for/vector|for-label|prefix-in|for\\*/foldr/derived|define-unit-binding|object-contract|syntax-rules|augride|for\\*/mutable-seteq|quasisyntax|inner|for-syntax|overment|send/keyword-apply|generic|let\\*-values|->m|define-values|struct-copy|init-depend|struct/ctc|match-lambda|#%printing-module-begin|match\\*/derived|case->m|this|file|stream-cons|inspect|field|for/weak-set|struct\\*|gen:custom-write|thunk\\*|combine-in|unquote|for/lists|define/private|for\\*/foldr|define-unit/s|with-continuation-mark|begin|prefix|quote-syntax/prune|object/c|interface|match/derived|for/hasheqv|current-contract-region|define-compound-unit|override|define/public-final|recontract-out|let/cc|augride\\*|inherit|send|define-values/invoke-unit|for/mutable-seteq|#%datum|for/first|match-let\\*|invoke-unit/infer|define/contract|syntax/loc|for\\*/hasheqv|define-sequence-syntax|let/ec|for/product|for\\*/fold/derived|define-syntax-rule|lazy|unconstrained-domain->|augment-final|private|class|define-splicing-for-clause-syntax|for\\*/fold|prompt-tag/c|contract-out|match/values|public-final\\*|case-lambda|for/fold|unsyntax|for/set|begin0|#%require|time|public|define-struct|include|define-values/invoke-unit/infer|only-space-in|struct/c|only-meta-in|unit/new-import-export|place|begin-for-syntax|shared|inherit/super|quote|for/or|struct/contract|export|inherit/inner|struct-out|let-syntax|augment\\*|for\\*/vector|rename-in|match-let|define-unit|:do-in|~@|for\\*/weak-seteq|private\\*|and|except-in|log-fatal|gen:equal\\+hash|provide|require|thunk|invariant-assertion|define-match-expander|init-rest|->\\*|class/derived|super-new|for/fold/derived|for\\*/mutable-set|match-lambda\\*\\*|only|with-contract|~\\?|opt/c|let-values|delay/thread|->i|for/foldr|for-meta|only-in|send\\+|\\.\\.\\.|struct-guard/c|->\\*m|gen:set|struct/dc|define-syntaxes|if|parameterize|module\\*|module|send\\*|#%variable-reference|compound-unit/infer|#%plain-app|for/hash|contracted|case->|match|for\\*/lists|#%app|letrec-values|log-warning|super|define/augride|local-require|provide/contract|define-struct/contract|match-let-values|quote-syntax|for\\*/seteq|define-compound-unit/infer|parameterize\\*|values/drop|for/seteq|tag|stream|delay/idle|module\\+|define-custom-hash-types|cons/dc|define-module-boundary-contract|or|protect-out|define-opt/c|implies|letrec-syntaxes\\+values|for\\*/or|unsyntax-splicing|override-final\\*|for/async|parameterize-break|syntax|place\\*|for-space|quasiquote|with-handlers|delay/sync|define-unit-from-context|match-letrec|#%top|define-unit/contract|delay/name|new|field-bound\\?|letrec|class-field-mutator|with-syntax|flat-murec-contract|rename-super|local)$" - )) - -((symbol) @comment - (#match? @comment "^#[cC][iIsS]$")) diff --git a/extensions/racket/languages/racket/indents.scm b/extensions/racket/languages/racket/indents.scm deleted file mode 100644 index 9a1cbad161..0000000000 --- a/extensions/racket/languages/racket/indents.scm +++ /dev/null @@ -1,3 +0,0 @@ -(_ "[" "]") @indent -(_ "{" "}") @indent -(_ "(" ")") @indent diff --git a/extensions/racket/languages/racket/outline.scm b/extensions/racket/languages/racket/outline.scm deleted file mode 100644 index 6001548303..0000000000 --- a/extensions/racket/languages/racket/outline.scm +++ /dev/null @@ -1,10 +0,0 @@ -(list - . - (symbol) @start-symbol @context - . - [ - (symbol) @name - (list . (symbol) @name) - ] - (#match? @start-symbol "^define") -) @item diff --git a/extensions/terraform/languages/hcl/injections.scm b/extensions/terraform/languages/hcl/injections.scm index 8617e9fc2e..f06f6b970c 100644 --- a/extensions/terraform/languages/hcl/injections.scm +++ b/extensions/terraform/languages/hcl/injections.scm @@ -1,6 +1,6 @@ ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/hcl/injections.scm (heredoc_template - (template_literal) @content - (heredoc_identifier) @language - (#downcase! @language)) + (template_literal) @injection.content + (heredoc_identifier) @injection.language + (#downcase! @injection.language)) diff --git a/extensions/terraform/languages/terraform-vars/highlights.scm b/extensions/terraform/languages/terraform-vars/highlights.scm index f123c3232d..ca39758ba6 100644 --- a/extensions/terraform/languages/terraform-vars/highlights.scm +++ b/extensions/terraform/languages/terraform-vars/highlights.scm @@ -147,7 +147,7 @@ (#any-of? @variable "workspace"))) ; Terraform specific keywords -; FIXME: ideally only for identifiers under a `variable` block to minimize false positives +; TODO: ideally only for identifiers under a `variable` block to minimize false positives ((identifier) @type (#any-of? @type "bool" "string" "number" "object" "tuple" "list" "map" "set" "any")) diff --git a/extensions/terraform/languages/terraform-vars/injections.scm b/extensions/terraform/languages/terraform-vars/injections.scm index b41ee95d40..49e7d8d325 100644 --- a/extensions/terraform/languages/terraform-vars/injections.scm +++ b/extensions/terraform/languages/terraform-vars/injections.scm @@ -1,9 +1,9 @@ ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/hcl/injections.scm (heredoc_template - (template_literal) @content - (heredoc_identifier) @language - (#downcase! @language)) + (template_literal) @injection.content + (heredoc_identifier) @injection.language + (#downcase! @injection.language)) ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/injections.scm ; inherits: hcl diff --git a/extensions/terraform/languages/terraform/highlights.scm b/extensions/terraform/languages/terraform/highlights.scm index f123c3232d..ca39758ba6 100644 --- a/extensions/terraform/languages/terraform/highlights.scm +++ b/extensions/terraform/languages/terraform/highlights.scm @@ -147,7 +147,7 @@ (#any-of? @variable "workspace"))) ; Terraform specific keywords -; FIXME: ideally only for identifiers under a `variable` block to minimize false positives +; TODO: ideally only for identifiers under a `variable` block to minimize false positives ((identifier) @type (#any-of? @type "bool" "string" "number" "object" "tuple" "list" "map" "set" "any")) diff --git a/extensions/terraform/languages/terraform/injections.scm b/extensions/terraform/languages/terraform/injections.scm index b41ee95d40..49e7d8d325 100644 --- a/extensions/terraform/languages/terraform/injections.scm +++ b/extensions/terraform/languages/terraform/injections.scm @@ -1,9 +1,9 @@ ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/hcl/injections.scm (heredoc_template - (template_literal) @content - (heredoc_identifier) @language - (#downcase! @language)) + (template_literal) @injection.content + (heredoc_identifier) @injection.language + (#downcase! @injection.language)) ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/injections.scm ; inherits: hcl diff --git a/extensions/toml/Cargo.toml b/extensions/toml/Cargo.toml index 3aa7b69224..85d933e236 100644 --- a/extensions/toml/Cargo.toml +++ b/extensions/toml/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_toml" -version = "0.1.1" +version = "0.1.2" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/toml/extension.toml b/extensions/toml/extension.toml index 15db5c464d..a8b9250226 100644 --- a/extensions/toml/extension.toml +++ b/extensions/toml/extension.toml @@ -1,7 +1,7 @@ id = "toml" name = "TOML" description = "TOML support." -version = "0.1.1" +version = "0.1.2" schema_version = 1 authors = [ "Max Brunsfeld ", diff --git a/extensions/toml/languages/toml/config.toml b/extensions/toml/languages/toml/config.toml index d5c1172d84..f62290d9e9 100644 --- a/extensions/toml/languages/toml/config.toml +++ b/extensions/toml/languages/toml/config.toml @@ -1,6 +1,6 @@ name = "TOML" grammar = "toml" -path_suffixes = ["Cargo.lock", "toml", "Pipfile"] +path_suffixes = ["Cargo.lock", "toml", "Pipfile", "uv.lock"] line_comments = ["# "] autoclose_before = ",]}" brackets = [ diff --git a/extensions/toml/languages/toml/textobjects.scm b/extensions/toml/languages/toml/textobjects.scm new file mode 100644 index 0000000000..f5b4856e27 --- /dev/null +++ b/extensions/toml/languages/toml/textobjects.scm @@ -0,0 +1,6 @@ +(comment)+ @comment +(table "[" (_) "]" + (_)* @class.inside) @class.around + +(table_array_element "[[" (_) "]]" + (_)* @class.inside) @class.around diff --git a/extensions/zig/Cargo.toml b/extensions/zig/Cargo.toml index 63f3c5c007..e29542d27e 100644 --- a/extensions/zig/Cargo.toml +++ b/extensions/zig/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_zig" -version = "0.3.1" +version = "0.3.2" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/zig/extension.toml b/extensions/zig/extension.toml index bcd4f58555..380300683b 100644 --- a/extensions/zig/extension.toml +++ b/extensions/zig/extension.toml @@ -1,7 +1,7 @@ id = "zig" name = "Zig" description = "Zig support." -version = "0.3.1" +version = "0.3.2" schema_version = 1 authors = ["Allan Calix "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/zig/languages/zig/highlights.scm b/extensions/zig/languages/zig/highlights.scm index 20c256eb63..c4be851e88 100644 --- a/extensions/zig/languages/zig/highlights.scm +++ b/extensions/zig/languages/zig/highlights.scm @@ -249,7 +249,7 @@ ; Literals -(character) @character +(character) @string ([ (string) diff --git a/extensions/zig/languages/zig/textobjects.scm b/extensions/zig/languages/zig/textobjects.scm new file mode 100644 index 0000000000..b08df97ea9 --- /dev/null +++ b/extensions/zig/languages/zig/textobjects.scm @@ -0,0 +1,27 @@ +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(test_declaration + (block + "{" + (_)* @function.inside + "}")) @function.around + +(variable_declaration + (struct_declaration + "struct" + "{" + [(_) ","]* @class.inside + "}")) @class.around + +(variable_declaration + (enum_declaration + "enum" + "{" + (_)* @class.inside + "}")) @class.around + +(comment)+ @comment.around diff --git a/flake.lock b/flake.lock index 5666e73569..c0cf3f726c 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1727060013, - "narHash": "sha256-/fC5YlJy4IoAW9GhkJiwyzk0K/gQd9Qi4rRcoweyG9E=", + "lastModified": 1734324364, + "narHash": "sha256-omYTR59TdH0AumP1cfh49fBnWZ52HjfdNfaLzCMZBx0=", "owner": "ipetkov", "repo": "crane", - "rev": "6b40cc876c929bfe1e3a24bf538ce3b5622646ba", + "rev": "60d7623f1320470bf2fdb92fd2dca1e9a27b98ce", "type": "github" }, "original": { @@ -15,34 +15,13 @@ "type": "github" } }, - "fenix": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ], - "rust-analyzer-src": "rust-analyzer-src" - }, - "locked": { - "lastModified": 1727073227, - "narHash": "sha256-1kmkEQmFfGVuPBasqSZrNThqyMDV1SzTalQdRZxtDRs=", - "owner": "nix-community", - "repo": "fenix", - "rev": "88cc292eb3c689073c784d6aecc0edbd47e12881", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "fenix", - "type": "github" - } - }, "flake-compat": { "locked": { - "lastModified": 1696426674, - "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", + "lastModified": 1733328505, + "narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=", "owner": "edolstra", "repo": "flake-compat", - "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", + "rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec", "type": "github" }, "original": { @@ -53,11 +32,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726937504, - "narHash": "sha256-bvGoiQBvponpZh8ClUcmJ6QnsNKw0EMrCQJARK3bI1c=", + "lastModified": 1734119587, + "narHash": "sha256-AKU6qqskl0yf2+JdRdD0cfxX4b9x3KKV5RqA6wijmPM=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "9357f4f23713673f310988025d9dc261c20e70c6", + "rev": "3566ab7246670a43abd2ffa913cc62dad9cdf7d5", "type": "github" }, "original": { @@ -70,25 +49,28 @@ "root": { "inputs": { "crane": "crane", - "fenix": "fenix", "flake-compat": "flake-compat", - "nixpkgs": "nixpkgs" + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" } }, - "rust-analyzer-src": { - "flake": false, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, "locked": { - "lastModified": 1726443025, - "narHash": "sha256-nCmG4NJpwI0IoIlYlwtDwVA49yuspA2E6OhfCOmiArQ=", - "owner": "rust-lang", - "repo": "rust-analyzer", - "rev": "94b526fc86eaa0e90fb4d54a5ba6313aa1e9b269", + "lastModified": 1734316514, + "narHash": "sha256-0aLx44yMblcOGpfFXKCzp2GhU5JaE6OTvdU+JYrXiUc=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "83ee8ff74d6294a7657320f16814754c4594127b", "type": "github" }, "original": { - "owner": "rust-lang", - "ref": "nightly", - "repo": "rust-analyzer", + "owner": "oxalica", + "repo": "rust-overlay", "type": "github" } } diff --git a/flake.nix b/flake.nix index 2ee86c4466..f797227fba 100644 --- a/flake.nix +++ b/flake.nix @@ -3,60 +3,65 @@ inputs = { nixpkgs.url = "github:NixOS/nixpkgs?ref=nixos-unstable"; - fenix = { - url = "github:nix-community/fenix"; + rust-overlay = { + url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; crane.url = "github:ipetkov/crane"; flake-compat.url = "github:edolstra/flake-compat"; }; - outputs = { - nixpkgs, - crane, - fenix, - ... - }: let - systems = ["x86_64-linux" "aarch64-linux"]; + outputs = + { + nixpkgs, + rust-overlay, + crane, + ... + }: + let + systems = [ + "x86_64-linux" + "x86_64-darwin" + "aarch64-linux" + "aarch64-darwin" + ]; - overlays = { - fenix = fenix.overlays.default; - rust-toolchain = final: prev: { - rustToolchain = final.fenix.stable.toolchain; - }; - zed-editor = final: prev: { - zed-editor = final.callPackage ./nix/build.nix { - craneLib = (crane.mkLib final).overrideToolchain final.rustToolchain; - rustPlatform = final.makeRustPlatform { - inherit (final.rustToolchain) cargo rustc; + overlays = { + rust-overlay = rust-overlay.overlays.default; + rust-toolchain = final: prev: { + rustToolchain = final.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml; + }; + zed-editor = final: prev: { + zed-editor = final.callPackage ./nix/build.nix { + crane = crane.mkLib final; + rustToolchain = final.rustToolchain; }; }; }; - }; - mkPkgs = system: - import nixpkgs { - inherit system; - overlays = builtins.attrValues overlays; - }; + mkPkgs = + system: + import nixpkgs { + inherit system; + overlays = builtins.attrValues overlays; + }; - forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f (mkPkgs system)); - in { - packages = forAllSystems (pkgs: { - zed-editor = pkgs.zed-editor; - default = pkgs.zed-editor; - }); + forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f (mkPkgs system)); + in + { + packages = forAllSystems (pkgs: { + zed-editor = pkgs.zed-editor; + default = pkgs.zed-editor; + }); - devShells = forAllSystems (pkgs: { - default = import ./nix/shell.nix {inherit pkgs;}; - }); + devShells = forAllSystems (pkgs: { + default = import ./nix/shell.nix { inherit pkgs; }; + }); - formatter = forAllSystems (pkgs: pkgs.alejandra); + formatter = forAllSystems (pkgs: pkgs.nixfmt-rfc-style); - overlays = - overlays - // { + overlays = overlays // { default = nixpkgs.lib.composeManyExtensions (builtins.attrValues overlays); }; - }; + }; } diff --git a/legal/subprocessors.md b/legal/subprocessors.md index 286d1fc800..db55f84457 100644 --- a/legal/subprocessors.md +++ b/legal/subprocessors.md @@ -10,13 +10,14 @@ This page provides information about the Subprocessors Zed has engaged to provid | Cloudflare | Cloud Infrastructure | Worldwide | | Vercel | Cloud Infrastructure | United States | | DigitalOcean | Cloud Infrastructure | United States | +| AWS | Cloud Infrastructure | United States | | ConvertKit | Email Marketing | United States | | Axiom | Analytics | United States | -| ClickHouse | Analytics | United States | +| Snowflake | Analytics | United States | | Metabase | Analytics | United States | | GitHub | Authentication | United States | | LiveKit | Audio Conferencing | United States | | Anthropic | AI Services | United States | | OpenAI | AI Services | United States | -**DATE: August 19, 2024** +**DATE: December 9, 2024** diff --git a/nix/build.nix b/nix/build.nix index 4782c9a56f..e78025dffd 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -1,10 +1,10 @@ { lib, - craneLib, - rustPlatform, + crane, + rustToolchain, + fetchpatch, clang, - llvmPackages_18, - mold-wrapped, + cmake, copyDesktopItems, curl, perl, @@ -22,47 +22,64 @@ wayland, libglvnd, xorg, + stdenv, makeFontsConf, vulkan-loader, envsubst, - stdenvAdapters, + cargo-about, + cargo-bundle, + git, + apple-sdk_15, + darwinMinVersionHook, + makeWrapper, + nodejs_22, nix-gitignore, - withGLES ? false, - cmake, -}: let - includeFilter = path: type: let - baseName = baseNameOf (toString path); - parentDir = dirOf path; - inRootDir = type == "directory" && parentDir == ../.; - in - !(inRootDir && (baseName == "docs" || baseName == ".github" || baseName == "script" || baseName == ".git" || baseName == "target")); - src = lib.cleanSourceWith { - src = nix-gitignore.gitignoreSource [] ../.; + withGLES ? false, +}: + +assert withGLES -> stdenv.hostPlatform.isLinux; + +let + includeFilter = + path: type: + let + baseName = baseNameOf (toString path); + parentDir = dirOf path; + inRootDir = type == "directory" && parentDir == ../.; + in + !( + inRootDir + && (baseName == "docs" || baseName == ".github" || baseName == ".git" || baseName == "target") + ); + craneLib = crane.overrideToolchain rustToolchain; + commonSrc = lib.cleanSourceWith { + src = nix-gitignore.gitignoreSource [ ] ../.; filter = includeFilter; name = "source"; }; + commonArgs = rec { + pname = "zed-editor"; + version = "nightly"; - stdenv = stdenvAdapters.useMoldLinker llvmPackages_18.stdenv; + src = commonSrc; - commonArgs = - craneLib.crateNameFromCargoToml {cargoToml = ../crates/zed/Cargo.toml;} - // { - inherit src stdenv; - - nativeBuildInputs = [ + nativeBuildInputs = + [ clang + cmake copyDesktopItems curl - mold-wrapped perl pkg-config protobuf - rustPlatform.bindgenHook - cmake - ]; + cargo-about + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ makeWrapper ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ cargo-bundle ]; - buildInputs = [ + buildInputs = + [ curl fontconfig freetype @@ -71,73 +88,161 @@ sqlite zlib zstd - + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ alsa-lib libxkbcommon wayland xorg.libxcb + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ + apple-sdk_15 + (darwinMinVersionHook "10.15") ]; + env = { ZSTD_SYS_USE_PKG_CONFIG = true; FONTCONFIG_FILE = makeFontsConf { fontDirectories = [ - "../assets/fonts/zed-mono" - "../assets/fonts/zed-sans" + "${src}/assets/fonts/plex-mono" + "${src}/assets/fonts/plex-sans" ]; }; - ZED_UPDATE_EXPLANATION = "zed has been installed using nix. Auto-updates have thus been disabled."; + ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled."; + RELEASE_VERSION = version; }; - + }; cargoArtifacts = craneLib.buildDepsOnly commonArgs; - - gpu-lib = - if withGLES - then libglvnd - else vulkan-loader; - - zed = craneLib.buildPackage (commonArgs - // { - inherit cargoArtifacts; - cargoExtraArgs = "--package=zed --package=cli"; - buildFeatures = ["gpui/runtime_shaders"]; - doCheck = false; - - RUSTFLAGS = - if withGLES - then "--cfg gles" - else ""; - - postFixup = '' - patchelf --add-rpath ${gpu-lib}/lib $out/libexec/* - patchelf --add-rpath ${wayland}/lib $out/libexec/* - ''; - - postInstall = '' - mkdir -p $out/bin $out/libexec - mv $out/bin/zed $out/libexec/zed-editor - mv $out/bin/cli $out/bin/zed - - install -D crates/zed/resources/app-icon@2x.png $out/share/icons/hicolor/1024x1024@2x/apps/zed.png - install -D crates/zed/resources/app-icon.png $out/share/icons/hicolor/512x512/apps/zed.png - - export DO_STARTUP_NOTIFY="true" - export APP_CLI="zed" - export APP_ICON="zed" - export APP_NAME="Zed" - export APP_ARGS="%U" - mkdir -p "$out/share/applications" - ${lib.getExe envsubst} < "crates/zed/resources/zed.desktop.in" > "$out/share/applications/dev.zed.Zed.desktop" - ''; - }); in - zed - // { - meta = with lib; { +craneLib.buildPackage ( + commonArgs + // rec { + inherit cargoArtifacts; + + patches = + [ + # Zed uses cargo-install to install cargo-about during the script execution. + # We provide cargo-about ourselves and can skip this step. + # Until https://github.com/zed-industries/zed/issues/19971 is fixed, + # we also skip any crate for which the license cannot be determined. + (fetchpatch { + url = "https://raw.githubusercontent.com/NixOS/nixpkgs/1fd02d90c6c097f91349df35da62d36c19359ba7/pkgs/by-name/ze/zed-editor/0001-generate-licenses.patch"; + hash = "sha256-cLgqLDXW1JtQ2OQFLd5UolAjfy7bMoTw40lEx2jA2pk="; + }) + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ + # Livekit requires Swift 6 + # We need this until livekit-rust sdk is used + (fetchpatch { + url = "https://raw.githubusercontent.com/NixOS/nixpkgs/1fd02d90c6c097f91349df35da62d36c19359ba7/pkgs/by-name/ze/zed-editor/0002-disable-livekit-darwin.patch"; + hash = "sha256-whZ7RaXv8hrVzWAveU3qiBnZSrvGNEHTuyNhxgMIo5w="; + }) + ]; + + cargoExtraArgs = "--package=zed --package=cli --features=gpui/runtime_shaders"; + + dontUseCmakeConfigure = true; + preBuild = '' + bash script/generate-licenses + ''; + + postFixup = lib.optionalString stdenv.hostPlatform.isLinux '' + patchelf --add-rpath ${gpu-lib}/lib $out/libexec/* + patchelf --add-rpath ${wayland}/lib $out/libexec/* + wrapProgram $out/libexec/zed-editor --suffix PATH : ${lib.makeBinPath [ nodejs_22 ]} + ''; + + RUSTFLAGS = if withGLES then "--cfg gles" else ""; + gpu-lib = if withGLES then libglvnd else vulkan-loader; + + preCheck = '' + export HOME=$(mktemp -d); + ''; + + cargoTestExtraArgs = + "-- " + + lib.concatStringsSep " " ( + [ + # Flaky: unreliably fails on certain hosts (including Hydra) + "--skip=zed::tests::test_window_edit_state_restoring_enabled" + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ + # Fails on certain hosts (including Hydra) for unclear reason + "--skip=test_open_paths_action" + ] + ); + + installPhase = + if stdenv.hostPlatform.isDarwin then + '' + runHook preInstall + + # cargo-bundle expects the binary in target/release + mv target/release/zed target/release/zed + + pushd crates/zed + + # Note that this is GNU sed, while Zed's bundle-mac uses BSD sed + sed -i "s/package.metadata.bundle-stable/package.metadata.bundle/" Cargo.toml + export CARGO_BUNDLE_SKIP_BUILD=true + app_path=$(cargo bundle --release | xargs) + + # We're not using the fork of cargo-bundle, so we must manually append plist extensions + # Remove closing tags from Info.plist (last two lines) + head -n -2 $app_path/Contents/Info.plist > Info.plist + # Append extensions + cat resources/info/*.plist >> Info.plist + # Add closing tags + printf "\n\n" >> Info.plist + mv Info.plist $app_path/Contents/Info.plist + + popd + + mkdir -p $out/Applications $out/bin + # Zed expects git next to its own binary + ln -s ${git}/bin/git $app_path/Contents/MacOS/git + mv target/release/cli $app_path/Contents/MacOS/cli + mv $app_path $out/Applications/ + + # Physical location of the CLI must be inside the app bundle as this is used + # to determine which app to start + ln -s $out/Applications/Zed.app/Contents/MacOS/cli $out/bin/zed + + runHook postInstall + '' + else + '' + runHook preInstall + + mkdir -p $out/bin $out/libexec + cp target/release/zed $out/libexec/zed-editor + cp target/release/cli $out/bin/zed + + install -D ${commonSrc}/crates/zed/resources/app-icon@2x.png $out/share/icons/hicolor/1024x1024@2x/apps/zed.png + install -D ${commonSrc}/crates/zed/resources/app-icon.png $out/share/icons/hicolor/512x512/apps/zed.png + + # extracted from https://github.com/zed-industries/zed/blob/v0.141.2/script/bundle-linux (envsubst) + # and https://github.com/zed-industries/zed/blob/v0.141.2/script/install.sh (final desktop file name) + ( + export DO_STARTUP_NOTIFY="true" + export APP_CLI="zed" + export APP_ICON="zed" + export APP_NAME="Zed" + export APP_ARGS="%U" + mkdir -p "$out/share/applications" + ${lib.getExe envsubst} < "crates/zed/resources/zed.desktop.in" > "$out/share/applications/dev.zed.Zed.desktop" + ) + + runHook postInstall + ''; + + meta = { description = "High-performance, multiplayer code editor from the creators of Atom and Tree-sitter"; homepage = "https://zed.dev"; changelog = "https://zed.dev/releases/preview"; - license = licenses.gpl3Only; + license = lib.licenses.gpl3Only; mainProgram = "zed"; - platforms = platforms.linux; + platforms = lib.platforms.linux ++ lib.platforms.darwin; }; } +) diff --git a/nix/shell.nix b/nix/shell.nix index e0b4018778..acdbc82ca5 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -1,51 +1,64 @@ -{pkgs ? import {}}: let - stdenv = pkgs.stdenvAdapters.useMoldLinker pkgs.llvmPackages_18.stdenv; +{ + pkgs ? import { }, +}: +let + inherit (pkgs) lib; in - if pkgs.stdenv.isDarwin - then - # See https://github.com/NixOS/nixpkgs/issues/320084 - throw "zed: nix dev-shell isn't supported on darwin yet." - else let - buildInputs = with pkgs; [ - curl - fontconfig - freetype - libgit2 - openssl - sqlite - zlib - zstd - alsa-lib - libxkbcommon - wayland - xorg.libxcb - vulkan-loader - rustToolchain +pkgs.mkShell rec { + packages = + [ + pkgs.clang + pkgs.curl + pkgs.cmake + pkgs.perl + pkgs.pkg-config + pkgs.protobuf + pkgs.rustPlatform.bindgenHook + pkgs.rust-analyzer + ] + ++ lib.optionals pkgs.stdenv.hostPlatform.isLinux [ + pkgs.mold ]; - in - pkgs.mkShell.override {inherit stdenv;} { - nativeBuildInputs = with pkgs; [ - clang - curl - cmake - perl - pkg-config - protobuf - rustPlatform.bindgenHook - ]; - inherit buildInputs; + buildInputs = + [ + pkgs.curl + pkgs.fontconfig + pkgs.freetype + pkgs.libgit2 + pkgs.openssl + pkgs.sqlite + pkgs.zlib + pkgs.zstd + pkgs.rustToolchain + ] + ++ lib.optionals pkgs.stdenv.hostPlatform.isLinux [ + pkgs.alsa-lib + pkgs.libxkbcommon + pkgs.wayland + pkgs.xorg.libxcb + pkgs.vulkan-loader + ] + ++ lib.optional pkgs.stdenv.hostPlatform.isDarwin pkgs.apple-sdk_15; - shellHook = '' - export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath buildInputs}:$LD_LIBRARY_PATH" - export PROTOC="${pkgs.protobuf}/bin/protoc" - ''; + # We set SDKROOT and DEVELOPER_DIR to the Xcode ones instead of the nixpkgs ones, + # because we need Swift 6.0 and nixpkgs doesn't have it. + # Xcode is required for development anyways + shellHook = + '' + export LD_LIBRARY_PATH="${lib.makeLibraryPath buildInputs}:$LD_LIBRARY_PATH" + export PROTOC="${pkgs.protobuf}/bin/protoc" + '' + + lib.optionalString pkgs.stdenv.hostPlatform.isDarwin '' + export SDKROOT="/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk"; + export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"; + ''; - FONTCONFIG_FILE = pkgs.makeFontsConf { - fontDirectories = [ - "./assets/fonts/zed-mono" - "./assets/fonts/zed-sans" - ]; - }; - ZSTD_SYS_USE_PKG_CONFIG = true; - } + FONTCONFIG_FILE = pkgs.makeFontsConf { + fontDirectories = [ + "./assets/fonts/zed-mono" + "./assets/fonts/zed-sans" + ]; + }; + ZSTD_SYS_USE_PKG_CONFIG = true; +} diff --git a/script/analyze_highlights.py b/script/analyze_highlights.py index 1fd16f2c0f..09a6419653 100644 --- a/script/analyze_highlights.py +++ b/script/analyze_highlights.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ This script analyzes all the highlight.scm files in our embedded languages and extensions. It counts the number of unique instances of @{name} and the languages in which they are used. diff --git a/script/bump-zed-minor-versions b/script/bump-zed-minor-versions index bec3a58887..fa30530184 100755 --- a/script/bump-zed-minor-versions +++ b/script/bump-zed-minor-versions @@ -28,6 +28,7 @@ minor_branch_name="v${major}.${minor}.x" prev_minor_branch_name="v${major}.${prev_minor}.x" next_minor_branch_name="v${major}.${next_minor}.x" preview_tag_name="v${major}.${minor}.${patch}-pre" +bump_main_branch_name="set-minor-version-to-${major}.${next_minor}" git fetch origin ${prev_minor_branch_name}:${prev_minor_branch_name} git fetch origin --tags @@ -65,7 +66,7 @@ git checkout -q ${prev_minor_branch_name} git clean -q -dff stable_tag_name="v$(script/get-crate-version zed)" if git show-ref --quiet refs/tags/${stable_tag_name}; then - echo "tag ${preview_tag_name} already exists" + echo "tag ${stable_tag_name} already exists" exit 1 fi old_prev_minor_sha=$(git rev-parse HEAD) @@ -83,29 +84,40 @@ git tag ${preview_tag_name} echo "Preparing main for version ${next_minor_branch_name}..." git checkout -q main git clean -q -dff -old_main_sha=$(git rev-parse HEAD) +git checkout -q -b ${bump_main_branch_name} cargo set-version --package zed --bump minor cargo check -q + git commit -q --all --message "${next_minor_branch_name} dev" -cat <" exit 1 fi diff --git a/script/create-draft-release b/script/create-draft-release new file mode 100755 index 0000000000..95b1a1450a --- /dev/null +++ b/script/create-draft-release @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +preview="" +if [[ "$GITHUB_REF_NAME" == *"-pre" ]]; then + preview="-p" +fi + +gh release create -t "$GITHUB_REF_NAME" -d "$GITHUB_REF_NAME" -F "$1" $preview diff --git a/script/deploy-postgrest b/script/deploy-postgrest index 14fbd50e30..2a0b21a991 100755 --- a/script/deploy-postgrest +++ b/script/deploy-postgrest @@ -3,7 +3,7 @@ set -eu source script/lib/deploy-helpers.sh -if [[ $# < 1 ]]; then +if [[ $# != 1 ]]; then echo "Usage: $0 (postgrest not needed on preview or nightly)" exit 1 fi diff --git a/script/draft-release-notes b/script/draft-release-notes index 287997ff79..1ef276718d 100755 --- a/script/draft-release-notes +++ b/script/draft-release-notes @@ -19,24 +19,45 @@ async function main() { process.exit(1); } - let priorVersion = [parts[0], parts[1], parts[2] - 1].join("."); - let suffix = ""; - - if (channel == "preview") { - suffix = "-pre"; - if (parts[2] == 0) { - priorVersion = [parts[0], parts[1] - 1, 0].join("."); - } - } else if (!ensureTag(`v${priorVersion}`)) { - console.log("Copy the release notes from preview."); + // currently we can only draft notes for patch releases. + if (parts[2] == 0) { process.exit(0); } + let priorVersion = [parts[0], parts[1], parts[2] - 1].join("."); + let suffix = channel == "preview" ? "-pre" : ""; let [tag, priorTag] = [`v${version}${suffix}`, `v${priorVersion}${suffix}`]; - if (!ensureTag(tag) || !ensureTag(priorTag)) { - console.log("Could not draft release notes, missing a tag:", tag, priorTag); - process.exit(0); + try { + execFileSync("rm", ["-rf", "target/shallow_clone"]); + execFileSync("git", [ + "clone", + "https://github.com/zed-industries/zed", + "target/shallow_clone", + "--filter=tree:0", + "--no-checkout", + "--branch", + tag, + "--depth", + 100, + ]); + execFileSync("git", [ + "-C", + "target/shallow_clone", + "rev-parse", + "--verify", + tag, + ]); + execFileSync("git", [ + "-C", + "target/shallow_clone", + "rev-parse", + "--verify", + priorTag, + ]); + } catch (e) { + console.error(e.stderr.toString()); + process.exit(1); } const newCommits = getCommits(priorTag, tag); @@ -64,16 +85,18 @@ async function main() { } console.log(releaseNotes.join("\n") + "\n"); - console.log(""); } function getCommits(oldTag, newTag) { const pullRequestNumbers = execFileSync( "git", - ["log", `${oldTag}..${newTag}`, "--format=DIVIDER\n%H|||%B"], + [ + "-C", + "target/shallow_clone", + "log", + `${oldTag}..${newTag}`, + "--format=DIVIDER\n%H|||%B", + ], { encoding: "utf8" }, ) .replace(/\r\n/g, "\n") @@ -103,18 +126,3 @@ function getCommits(oldTag, newTag) { return pullRequestNumbers; } - -function ensureTag(tag) { - try { - execFileSync("git", ["rev-parse", "--verify", tag]); - return true; - } catch (e) { - try { - execFileSync("git"[("fetch", "origin", "--shallow-exclude", tag)]); - execFileSync("git"[("fetch", "origin", "--deepen", "1")]); - return true; - } catch (e) { - return false; - } - } -} diff --git a/script/get-crate-version b/script/get-crate-version index b6346b32ec..0a35e4d49d 100755 --- a/script/get-crate-version +++ b/script/get-crate-version @@ -2,7 +2,7 @@ set -eu -if [[ $# < 1 ]]; then +if [[ $# -ne 1 ]]; then echo "Usage: $0 " >&2 exit 1 fi @@ -14,4 +14,4 @@ cargo metadata \ --format-version=1 \ | jq \ --raw-output \ - ".packages[] | select(.name == \"${CRATE_NAME}\") | .version" \ No newline at end of file + ".packages[] | select(.name == \"${CRATE_NAME}\") | .version" diff --git a/script/get-released-version b/script/get-released-version index e1f4783f8a..357de7c240 100755 --- a/script/get-released-version +++ b/script/get-released-version @@ -18,4 +18,4 @@ case $channel in ;; esac -curl -s https://zed.dev/api/releases/latest?asset=Zed.dmg$query | jq -r .version +curl -s "https://zed.dev/api/releases/latest?asset=zed&os=macos&arch=aarch64$query" | jq -r .version diff --git a/script/import-themes b/script/import-themes index ce9ce9ef12..8f07df2ef3 100755 --- a/script/import-themes +++ b/script/import-themes @@ -1,3 +1,3 @@ #!/bin/bash -cargo run -p theme_importer +cargo run -p theme_importer -- "$@" diff --git a/script/install-cmake b/script/install-cmake index 71b5aaeeef..3a28aae1b8 100755 --- a/script/install-cmake +++ b/script/install-cmake @@ -35,7 +35,7 @@ CMAKE_VERSION="${CMAKE_VERSION:-${1:-3.30.4}}" if [ "$(whoami)" = root ]; then SUDO=; else SUDO="$(command -v sudo || command -v doas || true)"; fi -if cmake --version | grep -q "$CMAKE_VERSION"; then +if cmake --version 2>/dev/null | grep -q "$CMAKE_VERSION"; then echo "CMake $CMAKE_VERSION is already installed." exit 0 elif [ -e /usr/local/bin/cmake ]; then @@ -51,7 +51,7 @@ elif [ -e /etc/lsb-release ] && grep -qP 'DISTRIB_ID=Ubuntu' /etc/lsb-release; t echo "deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" \ | $SUDO tee /etc/apt/sources.list.d/kitware.list >/dev/null $SUDO apt-get update - $SUDO apt-get install -y kitware-archive-keyring cmake==$CMAKE_VERSION + $SUDO apt-get install -y kitware-archive-keyring cmake else arch="$(uname -m)" if [ "$arch" != "x86_64" ] && [ "$arch" != "aarch64" ]; then diff --git a/script/install.sh b/script/install.sh index 3f2c690779..9cd21119b7 100755 --- a/script/install.sh +++ b/script/install.sh @@ -125,7 +125,7 @@ linux() { desktop_file_path="$HOME/.local/share/applications/${appid}.desktop" cp "$HOME/.local/zed$suffix.app/share/applications/zed$suffix.desktop" "${desktop_file_path}" sed -i "s|Icon=zed|Icon=$HOME/.local/zed$suffix.app/share/icons/hicolor/512x512/apps/zed.png|g" "${desktop_file_path}" - sed -i "s|Exec=zed|Exec=$HOME/.local/zed$suffix.app/libexec/zed-editor|g" "${desktop_file_path}" + sed -i "s|Exec=zed|Exec=$HOME/.local/zed$suffix.app/bin/zed|g" "${desktop_file_path}" } macos() { diff --git a/script/kube-shell b/script/kube-shell index 9181dc959c..0ca77acdd0 100755 --- a/script/kube-shell +++ b/script/kube-shell @@ -1,6 +1,6 @@ #!/bin/bash -if [[ $# < 1 ]]; then +if [[ $# -ne 1 ]]; then echo "Usage: $0 [production|staging|...]" exit 1 fi @@ -8,4 +8,4 @@ fi export ZED_KUBE_NAMESPACE=$1 pod=$(kubectl --namespace=${ZED_KUBE_NAMESPACE} get pods --selector=app=zed --output=jsonpath='{.items[*].metadata.name}') -exec kubectl --namespace $ZED_KUBE_NAMESPACE exec --tty --stdin $pod -- /bin/bash \ No newline at end of file +exec kubectl --namespace $ZED_KUBE_NAMESPACE exec --tty --stdin $pod -- /bin/bash diff --git a/script/language-extension-version b/script/language-extension-version index fc5c448736..d547886087 100755 --- a/script/language-extension-version +++ b/script/language-extension-version @@ -26,4 +26,3 @@ fi sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$EXTENSION_TOML" sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$CARGO_TOML" -cargo check diff --git a/script/licenses/zed-licenses.toml b/script/licenses/zed-licenses.toml index 2f0ea892dd..483bd30493 100644 --- a/script/licenses/zed-licenses.toml +++ b/script/licenses/zed-licenses.toml @@ -116,6 +116,12 @@ license = "MIT" path = 'LICENSE' checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' +[pet-pixi.clarify] +license = "MIT" +[[pet-pixi.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + [pet-poetry.clarify] license = "MIT" [[pet-poetry.clarify.git]] diff --git a/script/linux b/script/linux index eecf70f90e..650c1cfbbb 100755 --- a/script/linux +++ b/script/linux @@ -37,6 +37,7 @@ if [[ -n $apt ]]; then cmake clang jq + netcat-openbsd git curl gettext-base @@ -67,6 +68,7 @@ yum=$(command -v yum || true) if [[ -n $dnf ]] || [[ -n $yum ]]; then pkg_cmd="${dnf:-${yum}}" deps=( + musl-gcc gcc clang cmake @@ -83,28 +85,30 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then tar ) # perl used for building openssl-sys crate. See: https://docs.rs/openssl/latest/openssl/ - if grep -qP '^ID="(fedora)' /etc/os-release; then + # openbsd-netcat is unavailable in RHEL8/9 (and nmap-ncat doesn't support sockets) + if grep -qP '^ID="?(fedora)' /etc/os-release; then deps+=( perl-FindBin perl-IPC-Cmd perl-File-Compare perl-File-Copy + netcat mold ) - elif grep -qP '^ID="(rhel|rocky|alma|centos|ol)' /etc/os-release; then + elif grep -qP '^ID="?(rhel|rocky|alma|centos|ol)' /etc/os-release; then deps+=( perl-interpreter ) fi # gcc-c++ is g++ on RHEL8 and 8.x clones - if grep -qP '^ID="(rhel|rocky|alma|centos|ol)' /etc/os-release \ - && grep -qP '^VERSION_ID="8' /etc/os-release; then + if grep -qP '^ID="?(rhel|rocky|alma|centos|ol)' /etc/os-release \ + && grep -qP '^VERSION_ID="?(8)' /etc/os-release; then deps+=( gcc-c++ ) else deps+=( g++ ) fi # libxkbcommon-x11-devel is in a non-default repo on RHEL 8.x/9.x (except on AmazonLinux) - if grep -qP '^VERSION_ID="(8|9)' /etc/os-release && grep -qP '^ID="(rhel|rocky|centos|alma|ol)' /etc/os-release; then + if grep -qP '^VERSION_ID="?(8|9)' /etc/os-release && grep -qP '^ID="?(rhel|rocky|centos|alma|ol)' /etc/os-release; then $maysudo dnf install -y 'dnf-command(config-manager)' if grep -qP '^PRETTY_NAME="(AlmaLinux 8|Rocky Linux 8)' /etc/os-release; then $maysudo dnf config-manager --set-enabled powertools @@ -119,7 +123,7 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then fi fi - $maysudo $pkg_cmd install -y "${deps[@]}" + $maysudo "$pkg_cmd" install -y "${deps[@]}" finalize exit 0 fi @@ -144,6 +148,7 @@ if [[ -n $zyp ]]; then libzstd-devel make mold + netcat-openbsd openssl-devel sqlite3-devel tar @@ -162,12 +167,14 @@ if [[ -n $pacman ]]; then deps=( gcc clang + musl cmake alsa-lib fontconfig wayland libgit2 libxkbcommon-x11 + openbsd-netcat openssl zstd pkgconf @@ -197,6 +204,7 @@ if [[ -n $xbps ]]; then libxcb-devel libxkbcommon-devel libzstd-devel + openbsd-netcat openssl-devel wayland-devel vulkan-loader @@ -221,6 +229,7 @@ if [[ -n $emerge ]]; then media-libs/alsa-lib media-libs/fontconfig media-libs/vulkan-loader + net-analyzer/openbsd-netcat x11-libs/libxcb x11-libs/libxkbcommon sys-devel/mold diff --git a/script/metal-debug b/script/metal-debug index 6fc18e5ebd..de8476f3e3 100755 --- a/script/metal-debug +++ b/script/metal-debug @@ -10,4 +10,4 @@ export GPUProfilerEnabled="YES" export METAL_DEBUG_ERROR_MODE=0 export LD_LIBRARY_PATH="/Applications/Xcode.app/Contents/Developer/../SharedFrameworks/" -cargo run $@ +cargo run "$@" diff --git a/script/patches/use-cross-platform-livekit.patch b/script/patches/use-cross-platform-livekit.patch new file mode 100644 index 0000000000..e875b9d2b0 --- /dev/null +++ b/script/patches/use-cross-platform-livekit.patch @@ -0,0 +1,59 @@ +diff --git a/crates/call/Cargo.toml b/crates/call/Cargo.toml +index 9ba10e56ba..bb69440691 100644 +--- a/crates/call/Cargo.toml ++++ b/crates/call/Cargo.toml +@@ -41,10 +41,10 @@ serde_derive.workspace = true + telemetry.workspace = true + util.workspace = true + +-[target.'cfg(target_os = "macos")'.dependencies] ++[target.'cfg(any())'.dependencies] + livekit_client_macos = { workspace = true } + +-[target.'cfg(not(target_os = "macos"))'.dependencies] ++[target.'cfg(all())'.dependencies] + livekit_client = { workspace = true } + + [dev-dependencies] +diff --git a/crates/call/src/call.rs b/crates/call/src/call.rs +index 5e212d35b7..a8f9e8f43e 100644 +--- a/crates/call/src/call.rs ++++ b/crates/call/src/call.rs +@@ -1,13 +1,13 @@ + pub mod call_settings; + +-#[cfg(target_os = "macos")] ++#[cfg(any())] + mod macos; + +-#[cfg(target_os = "macos")] ++#[cfg(any())] + pub use macos::*; + +-#[cfg(not(target_os = "macos"))] ++#[cfg(all())] + mod cross_platform; + +-#[cfg(not(target_os = "macos"))] ++#[cfg(all())] + pub use cross_platform::*; +diff --git a/crates/workspace/src/shared_screen.rs b/crates/workspace/src/shared_screen.rs +index 1d17cfa145..f845234987 100644 +--- a/crates/workspace/src/shared_screen.rs ++++ b/crates/workspace/src/shared_screen.rs +@@ -1,11 +1,11 @@ +-#[cfg(target_os = "macos")] ++#[cfg(any())] + mod macos; + +-#[cfg(target_os = "macos")] ++#[cfg(any())] + pub use macos::*; + +-#[cfg(not(target_os = "macos"))] ++#[cfg(all())] + mod cross_platform; + +-#[cfg(not(target_os = "macos"))] ++#[cfg(all())] + pub use cross_platform::*; diff --git a/script/shellcheck-scripts b/script/shellcheck-scripts new file mode 100755 index 0000000000..d42b31d02f --- /dev/null +++ b/script/shellcheck-scripts @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -euo pipefail + +mode=${1:-error} +[[ "$mode" =~ ^(error|warning)$ ]] || { echo "Usage: $0 [error|warning]"; exit 1; } + +cd "$(dirname "$0")/.." || exit 1 + +find script -maxdepth 1 -type f -print0 | + xargs -0 grep -l -E '^#!(/bin/|/usr/bin/env )(sh|bash|dash)' | + xargs -r shellcheck -x -S "$mode" -C diff --git a/script/symbolicate b/script/symbolicate index 0022ebc539..5a3c17fed3 100755 --- a/script/symbolicate +++ b/script/symbolicate @@ -2,40 +2,64 @@ set -eu if [[ $# -eq 0 ]] || [[ "$1" == "--help" ]]; then - echo "Usage: $(basename $0) " - echo "This script symbolicates the provided .ips file using the appropriate dSYM file from digital ocean" + echo "Usage: $(basename $0) " + echo "This script symbolicates the provided .ips file or .json panic report using the appropriate debug symbols from DigitalOcean" echo "" exit 1 fi -ips_file=$1; +input_file=$1; -version=$(cat $ips_file | head -n 1 | jq -r .app_version) -bundle_id=$(cat $ips_file | head -n 1 | jq -r .bundleID) -cpu_type=$(cat $ips_file | tail -n+2 | jq -r .cpuType) +if [[ "$input_file" == *.json ]]; then + version=$(cat $input_file | jq -r .app_version) + channel=$(cat $input_file | jq -r .release_channel) + target_triple=$(cat $input_file | jq -r .target) -which symbolicate >/dev/null || cargo install symbolicate + which llvm-symbolizer rustfilt >dev/null || echo Need to install llvm-symbolizer and rustfilt + + echo $channel; + + mkdir -p target/dsyms/$channel + + dsym="$channel/zed-$version-$target_triple.dbg" + if [[ ! -f target/dsyms/$dsym ]]; then + echo "Downloading $dsym..." + curl -o target/dsyms/$dsym.gz "https://zed-debug-symbols.nyc3.digitaloceanspaces.com/$dsym.gz" + gunzip target/dsyms/$dsym.gz + fi + + cat $input_file | jq -r .backtrace[] | sed s'/.*+//' | llvm-symbolizer --no-demangle --obj=target/dsyms/$dsym | rustfilt + +else # ips file + + version=$(cat $input_file | head -n 1 | jq -r .app_version) + bundle_id=$(cat $input_file | head -n 1 | jq -r .bundleID) + cpu_type=$(cat $input_file | tail -n+2 | jq -r .cpuType) + + which symbolicate >/dev/null || cargo install symbolicate + + arch="x86_64-apple-darwin" + if [[ "$cpu_type" == *ARM-64* ]]; then + arch="aarch64-apple-darwin" + fi + echo $bundle_id; + + channel="stable" + if [[ "$bundle_id" == *Nightly* ]]; then + channel="nightly" + elif [[ "$bundle_id" == *Preview* ]]; then + channel="preview" + fi + + mkdir -p target/dsyms/$channel + + dsym="$channel/Zed-$version-$arch.dwarf" + if [[ ! -f target/dsyms/$dsym ]]; then + echo "Downloading $dsym..." + curl -o target/dsyms/$dsym.gz "https://zed-debug-symbols.nyc3.digitaloceanspaces.com/$channel/Zed-$version-$arch.dwarf.gz" + gunzip target/dsyms/$dsym.gz + fi + + symbolicate $input_file target/dsyms/$dsym -arch="x86_64-apple-darwin" -if [[ "$cpu_type" == *ARM-64* ]]; then - arch="aarch64-apple-darwin" fi -echo $bundle_id; - -channel="stable" -if [[ "$bundle_id" == *Nightly* ]]; then - channel="nightly" -elif [[ "$bundle_id" == *Preview* ]]; then - channel="preview" -fi - -mkdir -p target/dsyms/$channel - -dsym="$channel/Zed-$version-$arch.dwarf" -if [[ ! -f target/dsyms/$dsym ]]; then - echo "Downloading $dsym..." - curl -o target/dsyms/$dsym.gz "https://zed-debug-symbols.nyc3.digitaloceanspaces.com/$channel/Zed-$version-$arch.dwarf.gz" - gunzip target/dsyms/$dsym.gz -fi - -symbolicate $ips_file target/dsyms/$dsym diff --git a/script/uninstall.sh b/script/uninstall.sh new file mode 100644 index 0000000000..3e460b8186 --- /dev/null +++ b/script/uninstall.sh @@ -0,0 +1,158 @@ +#!/usr/bin/env sh +set -eu + +# Uninstalls Zed that was installed using the install.sh script + +check_remaining_installations() { + platform="$(uname -s)" + if [ "$platform" = "Darwin" ]; then + # Check for any Zed variants in /Applications + remaining=$(ls -d /Applications/Zed*.app 2>/dev/null | wc -l) + [ "$remaining" -eq 0 ] + else + # Check for any Zed variants in ~/.local + remaining=$(ls -d "$HOME/.local/zed"*.app 2>/dev/null | wc -l) + [ "$remaining" -eq 0 ] + fi +} + +prompt_remove_preferences() { + printf "Do you want to keep your Zed preferences? [Y/n] " + read -r response + case "$response" in + [nN]|[nN][oO]) + rm -rf "$HOME/.config/zed" + echo "Preferences removed." + ;; + *) + echo "Preferences kept." + ;; + esac +} + +main() { + platform="$(uname -s)" + channel="${ZED_CHANNEL:-stable}" + + if [ "$platform" = "Darwin" ]; then + platform="macos" + elif [ "$platform" = "Linux" ]; then + platform="linux" + else + echo "Unsupported platform $platform" + exit 1 + fi + + "$platform" + + echo "Zed has been uninstalled" +} + +linux() { + suffix="" + if [ "$channel" != "stable" ]; then + suffix="-$channel" + fi + + appid="" + db_suffix="stable" + case "$channel" in + stable) + appid="dev.zed.Zed" + db_suffix="stable" + ;; + nightly) + appid="dev.zed.Zed-Nightly" + db_suffix="nightly" + ;; + preview) + appid="dev.zed.Zed-Preview" + db_suffix="preview" + ;; + dev) + appid="dev.zed.Zed-Dev" + db_suffix="dev" + ;; + *) + echo "Unknown release channel: ${channel}. Using stable app ID." + appid="dev.zed.Zed" + db_suffix="stable" + ;; + esac + + # Remove the app directory + rm -rf "$HOME/.local/zed$suffix.app" + + # Remove the binary symlink + rm -f "$HOME/.local/bin/zed" + + # Remove the .desktop file + rm -f "$HOME/.local/share/applications/${appid}.desktop" + + # Remove the database directory for this channel + rm -rf "$HOME/.local/share/zed/db/0-$db_suffix" + + # Remove socket file + rm -f "$HOME/.local/share/zed/zed-$db_suffix.sock" + + # Remove the entire Zed directory if no installations remain + if check_remaining_installations; then + rm -rf "$HOME/.local/share/zed" + prompt_remove_preferences + fi + + rm -rf $HOME/.zed_server +} + +macos() { + app="Zed.app" + db_suffix="stable" + app_id="dev.zed.Zed" + case "$channel" in + nightly) + app="Zed Nightly.app" + db_suffix="nightly" + app_id="dev.zed.Zed-Nightly" + ;; + preview) + app="Zed Preview.app" + db_suffix="preview" + app_id="dev.zed.Zed-Preview" + ;; + dev) + app="Zed Dev.app" + db_suffix="dev" + app_id="dev.zed.Zed-Dev" + ;; + esac + + # Remove the app bundle + if [ -d "/Applications/$app" ]; then + rm -rf "/Applications/$app" + fi + + # Remove the binary symlink + rm -f "$HOME/.local/bin/zed" + + # Remove the database directory for this channel + rm -rf "$HOME/Library/Application Support/Zed/db/0-$db_suffix" + + # Remove app-specific files and directories + rm -rf "$HOME/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/$app_id.sfl"* + rm -rf "$HOME/Library/Caches/$app_id" + rm -rf "$HOME/Library/HTTPStorages/$app_id" + rm -rf "$HOME/Library/Preferences/$app_id.plist" + rm -rf "$HOME/Library/Saved Application State/$app_id.savedState" + + # Remove the entire Zed directory if no installations remain + if check_remaining_installations; then + rm -rf "$HOME/Library/Application Support/Zed" + rm -rf "$HOME/Library/Logs/Zed" + + prompt_remove_preferences + fi + + rm -rf $HOME/.zed_server +} + +main "$@" diff --git a/script/update-json-schemas b/script/update-json-schemas new file mode 100755 index 0000000000..182e0ff03b --- /dev/null +++ b/script/update-json-schemas @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -euo pipefail + +cd "$(dirname "$0")/.." || exit 1 +cd crates/languages/src/json/schemas +files=( + "tsconfig.json" + "package.json" +) +for file in "${files[@]}"; do + curl -sL -o "$file" "https://raw.githubusercontent.com/SchemaStore/schemastore/master/src/schemas/json/$file" +done + +HASH="$(curl -s 'https://api.github.com/repos/SchemaStore/schemastore/commits/HEAD' | jq -r '.sha')" +SHORT_HASH="${HASH:0:7}" +DATE="$(curl -s 'https://api.github.com/repos/SchemaStore/schemastore/commits/HEAD' |jq -r .commit.author.date | cut -c1-10)" +echo +echo "Updated JSON schemas to [SchemaStore/schemastore@$SHORT_HASH](https://github.com/SchemaStore/schemastore/tree/$HASH) ($DATE)" +echo +for file in "${files[@]}"; do + echo "- [$file](https://github.com/SchemaStore/schemastore/commits/master/src/schemas/json/$file)" \ + "@ [$SHORT_HASH](https://raw.githubusercontent.com/SchemaStore/schemastore/$HASH/src/schemas/json/$file)" +done +echo diff --git a/script/upload-nightly b/script/upload-nightly index 61b73d4e56..87ad712ae4 100755 --- a/script/upload-nightly +++ b/script/upload-nightly @@ -19,12 +19,12 @@ if [[ -n "${1:-}" ]]; then target="$1" else echo "Error: Target '$1' is not allowed" - echo "Usage: $0 [${allowed_targets[@]}]" + echo "Usage: $0 [${allowed_targets[*]}]" exit 1 fi else echo "Error: Target is not specified" -echo "Usage: $0 [${allowed_targets[@]}]" +echo "Usage: $0 [${allowed_targets[*]}]" exit 1 fi echo "Uploading nightly for target: $target" @@ -43,7 +43,6 @@ case "$target" in macos) upload_to_blob_store $bucket_name "target/aarch64-apple-darwin/release/Zed.dmg" "nightly/Zed-aarch64.dmg" upload_to_blob_store $bucket_name "target/x86_64-apple-darwin/release/Zed.dmg" "nightly/Zed-x86_64.dmg" - upload_to_blob_store $bucket_name "target/release/Zed.dmg" "nightly/Zed.dmg" upload_to_blob_store $bucket_name "target/latest-sha" "nightly/latest-sha" rm -f "target/aarch64-apple-darwin/release/Zed.dmg" "target/x86_64-apple-darwin/release/Zed.dmg" "target/release/Zed.dmg" rm -f "target/latest-sha" diff --git a/typos.toml b/typos.toml index 0682d0a3a9..6e4cb4a7b8 100644 --- a/typos.toml +++ b/typos.toml @@ -22,14 +22,12 @@ extend-exclude = [ # Stripe IDs are flagged as typos. "crates/collab/src/db/tests/processed_stripe_event_tests.rs", # Not our typos. - "crates/live_kit_server/", + "crates/livekit_server/", # Vim makes heavy use of partial typing tables. "crates/vim/", # Editor and file finder rely on partial typing and custom in-string syntax. "crates/file_finder/src/file_finder_tests.rs", "crates/editor/src/editor_tests.rs", - # Clojure uses .edn filename extension, which is not a misspelling of "end". - "extensions/clojure/languages/clojure/config.toml", # There are some names in the test data that are incorrectly flagged as typos. "crates/git/test_data/blame_incremental_complex", "crates/git/test_data/golden/blame_incremental_complex.json", @@ -43,6 +41,8 @@ extend-exclude = [ "docs/theme/css/", # Spellcheck triggers on `|Fixe[sd]|` regex part. "script/danger/dangerfile.ts", + # Hashes are not typos + "script/patches/use-cross-platform-livekit.patch" ] [default] @@ -52,9 +52,6 @@ extend-ignore-re = [ '"ba"', # :/ crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql "COLUMN enviroment", - # Typo in ClickHouse column name. - # crates/collab/src/api/events.rs - "rename = \"sesssion_id\"", "doas", # ProtoLS crate with tree-sitter Protobuf grammar. "protols",