From 65961b80fc9bc3100f65b3600a5a09a07737f182 Mon Sep 17 00:00:00 2001 From: thataboy Date: Sat, 7 Sep 2024 07:25:41 -0700 Subject: [PATCH 001/270] Add a way to configure default search options (#17179) Closes https://github.com/zed-industries/zed/issues/4646 ```json // Search options to enable by default when opening new project and buffer searches. "search": { "whole_word": false, "case_sensitive": false, "include_ignored": false, "regex": false } ``` Release Notes: - Added `search` settings section to configure default options enabled in buffer and project searches ([#4646](https://github.com/zed-industries/zed/issues/4646)) --------- Co-authored-by: Kirill Bulatov --- assets/settings/default.json | 7 +++++ crates/editor/src/editor.rs | 4 ++- crates/editor/src/editor_settings.rs | 41 +++++++++++++++++++++++++ crates/search/src/buffer_search.rs | 20 +++++++++--- crates/search/src/project_search.rs | 6 ++-- crates/search/src/search.rs | 12 ++++++++ crates/vim/src/test/vim_test_context.rs | 2 +- crates/zed/src/zed.rs | 1 + 8 files changed, 85 insertions(+), 8 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 2bf4611b90..65254afb7c 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -279,6 +279,13 @@ "relative_line_numbers": false, // If 'search_wrap' is disabled, search result do not wrap around the end of the file. "search_wrap": true, + // Search options to enable by default when opening new project and buffer searches. + "search": { + "whole_word": false, + "case_sensitive": false, + "include_ignored": false, + "regex": false + }, // When to populate a new search's query based on the text under the cursor. // This setting can take the following three values: // diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d009a40748..cb4ae63afc 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -59,7 +59,9 @@ use convert_case::{Case, Casing}; use debounced_delay::DebouncedDelay; use display_map::*; pub use display_map::{DisplayPoint, FoldPlaceholder}; -pub use editor_settings::{CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine}; +pub use editor_settings::{ + CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, +}; pub use editor_settings_controls::*; use element::LineWithInvisibles; pub use element::{ diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 04403b1547..8dadc0154b 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -28,6 +28,8 @@ pub struct EditorSettings { #[serde(default)] pub double_click_in_multibuffer: DoubleClickInMultibuffer, pub search_wrap: bool, + #[serde(default)] + pub search: SearchSettings, pub auto_signature_help: bool, pub show_signature_help_after_edits: bool, pub jupyter: Jupyter, @@ -156,6 +158,40 @@ pub enum ScrollBeyondLastLine { VerticalScrollMargin, } +/// Default options for buffer and project search items. +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct SearchSettings { + #[serde(default)] + pub whole_word: bool, + #[serde(default)] + pub case_sensitive: bool, + #[serde(default)] + pub include_ignored: bool, + #[serde(default)] + pub regex: bool, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct SearchSettingsContent { + pub whole_word: Option, + pub case_sensitive: Option, + pub include_ignored: Option, + pub regex: Option, +} + +impl Settings for SearchSettings { + const KEY: Option<&'static str> = Some("search"); + + type FileContent = SearchSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} + #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct EditorSettingsContent { /// Whether the cursor blinks in the editor. @@ -251,6 +287,11 @@ pub struct EditorSettingsContent { /// Default: true pub search_wrap: Option, + /// Defaults to use when opening a new buffer and project search items. + /// + /// Default: nothing is enabled + pub search: Option, + /// Whether to automatically show a signature help pop-up or not. /// /// Default: false diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index bea470dedd..ba10a45282 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -9,7 +9,7 @@ use any_vec::AnyVec; use collections::HashMap; use editor::{ actions::{Tab, TabPrev}, - DisplayPoint, Editor, EditorElement, EditorSettings, EditorStyle, + DisplayPoint, Editor, EditorElement, EditorSettings, EditorStyle, SearchSettings, }; use futures::channel::oneshot; use gpui::{ @@ -22,7 +22,7 @@ use project::{ search_history::{SearchHistory, SearchHistoryCursor}, }; use serde::Deserialize; -use settings::Settings; +use settings::{Settings, SettingsStore}; use std::sync::Arc; use theme::ThemeSettings; @@ -96,6 +96,7 @@ pub struct BufferSearchBar { scroll_handle: ScrollHandle, editor_scroll_handle: ScrollHandle, editor_needed_width: Pixels, + _subscriptions: Vec, } impl BufferSearchBar { @@ -505,6 +506,12 @@ impl BufferSearchBar { cx.subscribe(&replacement_editor, Self::on_replacement_editor_event) .detach(); + let search_options = SearchOptions::from_settings(&SearchSettings::get_global(cx)); + + let settings_subscription = cx.observe_global::(move |this, cx| { + this.default_options = SearchOptions::from_settings(&SearchSettings::get_global(cx)); + }); + Self { query_editor, query_editor_focused: false, @@ -514,8 +521,8 @@ impl BufferSearchBar { active_searchable_item_subscription: None, active_match_index: None, searchable_items_with_matches: Default::default(), - default_options: SearchOptions::NONE, - search_options: SearchOptions::NONE, + default_options: search_options, + search_options, pending_search: None, query_contains_error: false, dismissed: true, @@ -530,6 +537,7 @@ impl BufferSearchBar { scroll_handle: ScrollHandle::new(), editor_scroll_handle: ScrollHandle::new(), editor_needed_width: px(0.), + _subscriptions: vec![settings_subscription], } } @@ -602,6 +610,9 @@ impl BufferSearchBar { let Some(handle) = self.active_searchable_item.as_ref() else { return false; }; + if self.default_options != self.search_options { + self.search_options = self.default_options; + } self.dismissed = false; handle.search_bar_visibility_changed(true, cx); @@ -1203,6 +1214,7 @@ mod tests { language::init(cx); Project::init_settings(cx); theme::init(theme::LoadThemes::JustBase, cx); + crate::init(cx); }); } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index f4e08af6da..c43d4ed454 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -668,7 +668,9 @@ impl ProjectSearchView { let (mut options, filters_enabled) = if let Some(settings) = settings { (settings.search_options, settings.filters_enabled) } else { - (SearchOptions::NONE, false) + let search_options = + SearchOptions::from_settings(&EditorSettings::get_global(cx).search); + (search_options, false) }; { @@ -3537,7 +3539,7 @@ pub mod tests { editor::init(cx); workspace::init_settings(cx); Project::init_settings(cx); - super::init(cx); + crate::init(cx); }); } diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index 0466930f90..f8d8da87c5 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -1,8 +1,10 @@ use bitflags::bitflags; pub use buffer_search::BufferSearchBar; +use editor::SearchSettings; use gpui::{actions, Action, AppContext, IntoElement}; use project::search::SearchQuery; pub use project_search::ProjectSearchView; +use settings::Settings; use ui::{prelude::*, Tooltip}; use ui::{ButtonStyle, IconButton}; use workspace::notifications::NotificationId; @@ -13,6 +15,7 @@ pub mod project_search; pub(crate) mod search_bar; pub fn init(cx: &mut AppContext) { + SearchSettings::register(cx); menu::init(); buffer_search::init(cx); project_search::init(cx); @@ -93,6 +96,15 @@ impl SearchOptions { options } + pub fn from_settings(settings: &SearchSettings) -> SearchOptions { + let mut options = SearchOptions::NONE; + options.set(SearchOptions::WHOLE_WORD, settings.whole_word); + options.set(SearchOptions::CASE_SENSITIVE, settings.case_sensitive); + options.set(SearchOptions::INCLUDE_IGNORED, settings.include_ignored); + options.set(SearchOptions::REGEX, settings.regex); + options + } + pub fn as_button( &self, active: bool, diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index 5ae4d517b0..c985f68e70 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -16,12 +16,12 @@ impl VimTestContext { return; } cx.update(|cx| { - search::init(cx); let settings = SettingsStore::test(cx); cx.set_global(settings); release_channel::init(SemanticVersion::default(), cx); command_palette::init(cx); crate::init(cx); + search::init(cx); }); } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 74bdfb666b..9ec43d607a 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3442,6 +3442,7 @@ mod tests { ); tasks_ui::init(cx); initialize_workspace(app_state.clone(), prompt_builder, cx); + search::init(cx); app_state }) } From 63188b6754276cf424aed2c1cc9947738d9c6116 Mon Sep 17 00:00:00 2001 From: saahityaedams Date: Sat, 7 Sep 2024 20:21:02 +0530 Subject: [PATCH 002/270] Fix parenthesis matching for file links in terminal (#17512) Closes #17391 Release Notes: - Fixed parenthesis matching for file links in terminal ([#17391](https://github.com/zed-industries/zed/issues/17391)) --- crates/terminal/src/terminal.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 224e660f5f..bd4dfed0fa 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -425,7 +425,7 @@ impl TerminalBuilder { // Optional suffix matches MSBuild diagnostic suffixes for path parsing in PathLikeWithPosition // https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks let word_regex = - RegexSearch::new(r#"[\$\+\w.\[\]:/\\@\-~]+(?:\((?:\d+|\d+,\d+)\))?"#).unwrap(); + RegexSearch::new(r#"[\$\+\w.\[\]:/\\@\-~()]+(?:\((?:\d+|\d+,\d+)\))?"#).unwrap(); let terminal = Terminal { task, From 894866da9478003e77458e7a849353312d4c282c Mon Sep 17 00:00:00 2001 From: thataboy Date: Sat, 7 Sep 2024 16:58:28 -0700 Subject: [PATCH 003/270] Refactor SearchSettings (#17550) Related to #17179. Simplify handling of search settings since there is no requirement to watch for settings.json changes and update search panels while they are opened. Attn: @SomeoneToIgnore Per our discussion. Ran test on search crate. Ran `cargo fmt`. Release Notes: - N/A --- crates/editor/src/editor_settings.rs | 21 --------------------- crates/search/src/buffer_search.rs | 15 ++++++--------- crates/search/src/search.rs | 2 -- 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 8dadc0154b..2614e4ea30 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -171,27 +171,6 @@ pub struct SearchSettings { pub regex: bool, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct SearchSettingsContent { - pub whole_word: Option, - pub case_sensitive: Option, - pub include_ignored: Option, - pub regex: Option, -} - -impl Settings for SearchSettings { - const KEY: Option<&'static str> = Some("search"); - - type FileContent = SearchSettingsContent; - - fn load( - sources: SettingsSources, - _: &mut gpui::AppContext, - ) -> anyhow::Result { - sources.json_merge() - } -} - #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct EditorSettingsContent { /// Whether the cursor blinks in the editor. diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index ba10a45282..3a7cccfbb9 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -9,7 +9,7 @@ use any_vec::AnyVec; use collections::HashMap; use editor::{ actions::{Tab, TabPrev}, - DisplayPoint, Editor, EditorElement, EditorSettings, EditorStyle, SearchSettings, + DisplayPoint, Editor, EditorElement, EditorSettings, EditorStyle, }; use futures::channel::oneshot; use gpui::{ @@ -22,7 +22,7 @@ use project::{ search_history::{SearchHistory, SearchHistoryCursor}, }; use serde::Deserialize; -use settings::{Settings, SettingsStore}; +use settings::Settings; use std::sync::Arc; use theme::ThemeSettings; @@ -96,7 +96,6 @@ pub struct BufferSearchBar { scroll_handle: ScrollHandle, editor_scroll_handle: ScrollHandle, editor_needed_width: Pixels, - _subscriptions: Vec, } impl BufferSearchBar { @@ -506,11 +505,7 @@ impl BufferSearchBar { cx.subscribe(&replacement_editor, Self::on_replacement_editor_event) .detach(); - let search_options = SearchOptions::from_settings(&SearchSettings::get_global(cx)); - - let settings_subscription = cx.observe_global::(move |this, cx| { - this.default_options = SearchOptions::from_settings(&SearchSettings::get_global(cx)); - }); + let search_options = SearchOptions::from_settings(&EditorSettings::get_global(cx).search); Self { query_editor, @@ -537,7 +532,6 @@ impl BufferSearchBar { scroll_handle: ScrollHandle::new(), editor_scroll_handle: ScrollHandle::new(), editor_needed_width: px(0.), - _subscriptions: vec![settings_subscription], } } @@ -610,6 +604,9 @@ impl BufferSearchBar { let Some(handle) = self.active_searchable_item.as_ref() else { return false; }; + + self.default_options = SearchOptions::from_settings(&EditorSettings::get_global(cx).search); + if self.default_options != self.search_options { self.search_options = self.default_options; } diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index f8d8da87c5..b99672c532 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -4,7 +4,6 @@ use editor::SearchSettings; use gpui::{actions, Action, AppContext, IntoElement}; use project::search::SearchQuery; pub use project_search::ProjectSearchView; -use settings::Settings; use ui::{prelude::*, Tooltip}; use ui::{ButtonStyle, IconButton}; use workspace::notifications::NotificationId; @@ -15,7 +14,6 @@ pub mod project_search; pub(crate) mod search_bar; pub fn init(cx: &mut AppContext) { - SearchSettings::register(cx); menu::init(); buffer_search::init(cx); project_search::init(cx); From 89ae97e5e99f68851075c8e02386d7ea180ccd37 Mon Sep 17 00:00:00 2001 From: Vishal Bhavsar Date: Sun, 8 Sep 2024 10:38:00 -0400 Subject: [PATCH 004/270] vim: Revert 'Y' to yank to end of line (#17563) Closes https://github.com/zed-industries/zed/issues/17549 --- assets/keymaps/vim.json | 2 +- crates/vim/src/normal.rs | 23 ----------------------- crates/vim/test_data/test_shift_y.json | 4 ---- 3 files changed, 1 insertion(+), 28 deletions(-) delete mode 100644 crates/vim/test_data/test_shift_y.json diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 8bb5ac72fe..f863e8488a 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -214,7 +214,7 @@ "shift-d": "vim::DeleteToEndOfLine", "shift-j": "vim::JoinLines", "y": ["vim::PushOperator", "Yank"], - "shift-y": "vim::YankToEndOfLine", + "shift-y": "vim::YankLine", "i": "vim::InsertBefore", "shift-i": "vim::InsertFirstNonWhitespace", "a": "vim::InsertAfter", diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index c9d3a7a472..ae560acc29 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -52,7 +52,6 @@ actions!( DeleteToEndOfLine, Yank, YankLine, - YankToEndOfLine, ChangeCase, ConvertToUpperCase, ConvertToLowerCase, @@ -77,7 +76,6 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { Vim::action(editor, cx, Vim::convert_to_upper_case); Vim::action(editor, cx, Vim::convert_to_lower_case); Vim::action(editor, cx, Vim::yank_line); - Vim::action(editor, cx, Vim::yank_to_end_of_line); Vim::action(editor, cx, Vim::toggle_comments); Vim::action(editor, cx, Vim::paste); @@ -428,18 +426,6 @@ impl Vim { self.yank_motion(motion::Motion::CurrentLine, count, cx) } - fn yank_to_end_of_line(&mut self, _: &YankToEndOfLine, cx: &mut ViewContext) { - self.record_current_action(cx); - let count = self.take_count(cx); - self.yank_motion( - motion::Motion::EndOfLine { - display_lines: false, - }, - count, - cx, - ) - } - fn toggle_comments(&mut self, _: &ToggleComments, cx: &mut ViewContext) { self.record_current_action(cx); self.update_editor(cx, |_, editor, cx| { @@ -1408,15 +1394,6 @@ mod test { ); } - #[gpui::test] - async fn test_shift_y(cx: &mut gpui::TestAppContext) { - let mut cx = NeovimBackedTestContext::new(cx).await; - - cx.set_shared_state("helˇlo\n").await; - cx.simulate_shared_keystrokes("shift-y").await; - cx.shared_clipboard().await.assert_eq("lo"); - } - #[gpui::test] async fn test_r(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_shift_y.json b/crates/vim/test_data/test_shift_y.json deleted file mode 100644 index 53038a69e9..0000000000 --- a/crates/vim/test_data/test_shift_y.json +++ /dev/null @@ -1,4 +0,0 @@ -{"Put":{"state":"helˇlo\n"}} -{"Key":"shift-y"} -{"Get":{"state":"helˇlo\n","mode":"Normal"}} -{"ReadRegister":{"name":"\"","value":"lo"}} From 657be0aa3e3308ac33d2bac745ac9bfab4b9ef18 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Sun, 8 Sep 2024 08:50:35 -0600 Subject: [PATCH 005/270] vim doc tweaks (#17564) Release Notes: - N/A --- docs/src/vim.md | 25 ++++++++++++++++--------- typos.toml | 1 + 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/docs/src/vim.md b/docs/src/vim.md index 089602916b..8656cfc7ba 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -96,8 +96,6 @@ Finally, vim mode's search and replace functionality is backed by Zed's. This me You can edit your personal key bindings with `:keymap`. For vim-specific shortcuts, you may find the following template a good place to start. -> **Note:** We made some breaking changes in Zed version `0.145.0`. For older versions, see [the previous version of this document](https://github.com/zed-industries/zed/blob/c67aeaa9c58619a58708722ac7d7a78c75c29336/docs/src/vim.md#L90). - ```json [ { @@ -106,6 +104,12 @@ For vim-specific shortcuts, you may find the following template a good place to // put key-bindings here if you want them to work in normal & visual mode } }, + { + "context": "vim_mode == normal && !menu", + "bindings": { + // "shift-y": ["workspace::SendKeystrokes", "y $"] // use nvim's Y behavior + } + }, { "context": "vim_mode == insert", "bindings": { @@ -162,7 +166,7 @@ Vim mode allows you to enable Zed’s command palette with `:`. This means that Additionally vim mode contains a number of aliases for popular vim commands to ensure that muscle memory works. For example `:w` will save the file. -We do not (yet) emulate the full power of vim’s command line, in particular we special case specific patterns instead of using vim's range selection syntax, and we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. +We do not (yet) emulate the full power of vim’s command line, in particular we we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. @@ -191,6 +195,12 @@ Currently supported vim-specific commands: :cc, :ll to open the errors page +# handling git diff +:dif[fupdate] + to view the diff under the cursor ("d o" in normal mode) +:rev[ert] + to revert the diff under the cursor ("d p" in normal mode) + # jump to position : to jump to a line number @@ -200,11 +210,8 @@ Currently supported vim-specific commands: to jump to next/prev line matching foo # replacement (/g is always assumed and Zed uses different regex syntax to vim) -:%s/foo/bar/ +:[range]s/foo/bar/ to replace instances of foo with bar -:X,Ys/foo/bar/ - to limit replacement between line X and Y - other ranges are not yet implemented # editing :j[oin] @@ -213,18 +220,18 @@ Currently supported vim-specific commands: to delete the current line (no range is yet supported) :s[ort] [i] to sort the current selection (with i, case-insensitively) +:y[ank] ``` As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: ``` -:diff Toggle Hunk [Diff] :diffs Toggle all Hunk [Diffs] -:revert Revert Selected Hunks :cpp [C]o[p]y [P]ath to file :crp [C]opy [r]elative [P]ath :reveal [Reveal] in finder :zlog Open [Z]ed Log +:clank [C]ancel [lan]guage server work[k] ``` ## Settings diff --git a/typos.toml b/typos.toml index ad18f890e5..2bbb4907a7 100644 --- a/typos.toml +++ b/typos.toml @@ -46,6 +46,7 @@ extend-exclude = [ [default] extend-ignore-re = [ 'cl\[ist]', + '\[lan\]guage', '"ba"', ":ba\\|z", # :/ crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql From 174e12568669e70abb15934597a3f5df335eaf08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Elan=20Ruusam=C3=A4e?= Date: Sun, 8 Sep 2024 19:04:44 +0300 Subject: [PATCH 006/270] doc: vim.md: Remove duplicate we we (#17565) ...also adds some puncuation --- docs/src/vim.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/vim.md b/docs/src/vim.md index 8656cfc7ba..d4e41b5819 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -164,9 +164,9 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. -Additionally vim mode contains a number of aliases for popular vim commands to ensure that muscle memory works. For example `:w` will save the file. +Additionally, vim mode contains a number of aliases for popular vim commands to ensure that muscle memory works. For example `:w` will save the file. -We do not (yet) emulate the full power of vim’s command line, in particular we we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. +We do not (yet) emulate the full power of vim’s command line, in particular, we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. From 66ef31882341852229c74996867916fbd4a2fe2a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 9 Sep 2024 13:22:16 +0200 Subject: [PATCH 007/270] project panel: select autofolded entries (#17520) Closes #17252 Release Notes: - Intermediate auto-folded project entries can now be selected and acted upon (removed, renamed, cut, pasted). --- crates/project_panel/src/project_panel.rs | 238 +++++++++++++++--- .../src/components/label/highlighted_label.rs | 5 + crates/ui/src/components/label/label.rs | 5 + crates/ui/src/components/label/label_like.rs | 22 +- 4 files changed, 238 insertions(+), 32 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 9d73557761..be945cde77 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -60,11 +60,15 @@ pub struct ProjectPanel { scroll_handle: UniformListScrollHandle, focus_handle: FocusHandle, visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, + /// Maps from leaf project entry ID to the currently selected ancestor. + /// Relevant only for auto-fold dirs, where a single project panel entry may actually consist of several + /// project entries (and all non-leaf nodes are guaranteed to be directories). + ancestors: HashMap, last_worktree_root_id: Option, last_external_paths_drag_over_entry: Option, expanded_dir_ids: HashMap>, unfolded_dir_ids: HashSet, - // Currently selected entry in a file tree + // Currently selected leaf entry (see auto-folding for a definition of that) in a file tree selection: Option, marked_entries: BTreeSet, context_menu: Option<(View, Point, Subscription)>, @@ -96,7 +100,7 @@ enum ClipboardEntry { } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct EntryDetails { +struct EntryDetails { filename: String, icon: Option, path: Arc, @@ -111,18 +115,19 @@ pub struct EntryDetails { is_cut: bool, git_status: Option, is_private: bool, + is_auto_folded: bool, worktree_id: WorktreeId, canonical_path: Option>, } #[derive(PartialEq, Clone, Default, Debug, Deserialize)] -pub struct Delete { +struct Delete { #[serde(default)] pub skip_prompt: bool, } #[derive(PartialEq, Clone, Default, Debug, Deserialize)] -pub struct Trash { +struct Trash { #[serde(default)] pub skip_prompt: bool, } @@ -155,6 +160,18 @@ actions!( ] ); +#[derive(Debug, Default)] +struct FoldedAncestors { + current_ancestor_depth: usize, + ancestors: Vec, +} + +impl FoldedAncestors { + fn max_ancestor_depth(&self) -> usize { + self.ancestors.len() + } +} + pub fn init_settings(cx: &mut AppContext) { ProjectPanelSettings::register(cx); } @@ -277,6 +294,7 @@ impl ProjectPanel { scroll_handle: UniformListScrollHandle::new(), focus_handle, visible_entries: Default::default(), + ancestors: Default::default(), last_worktree_root_id: Default::default(), last_external_paths_drag_over_entry: None, expanded_dir_ids: Default::default(), @@ -457,7 +475,7 @@ impl ProjectPanel { entry_id, }); - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let auto_fold_dirs = ProjectPanelSettings::get_global(cx).auto_fold_dirs; let is_root = Some(entry) == worktree.root_entry(); let is_dir = entry.is_dir(); @@ -583,6 +601,13 @@ impl ProjectPanel { fn expand_selected_entry(&mut self, _: &ExpandSelectedEntry, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { + if folded_ancestors.current_ancestor_depth > 0 { + folded_ancestors.current_ancestor_depth -= 1; + cx.notify(); + return; + } + } if entry.is_dir() { let worktree_id = worktree.id(); let entry_id = entry.id; @@ -611,6 +636,13 @@ impl ProjectPanel { fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext) { if let Some((worktree, mut entry)) = self.selected_entry(cx) { + if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { + if folded_ancestors.current_ancestor_depth < folded_ancestors.max_ancestor_depth() { + folded_ancestors.current_ancestor_depth += 1; + cx.notify(); + return; + } + } let worktree_id = worktree.id(); let expanded_dir_ids = if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) { @@ -943,6 +975,17 @@ impl ProjectPanel { } } + fn unflatten_entry_id(&self, leaf_entry_id: ProjectEntryId) -> ProjectEntryId { + if let Some(ancestors) = self.ancestors.get(&leaf_entry_id) { + ancestors + .ancestors + .get(ancestors.current_ancestor_depth) + .copied() + .unwrap_or(leaf_entry_id) + } else { + leaf_entry_id + } + } fn rename(&mut self, _: &Rename, cx: &mut ViewContext) { if let Some(SelectedEntry { worktree_id, @@ -950,6 +993,7 @@ impl ProjectPanel { }) = self.selection { if let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) { + let entry_id = self.unflatten_entry_id(entry_id); if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) { self.edit_state = Some(EditState { worktree_id, @@ -1161,7 +1205,7 @@ impl ProjectPanel { } fn select_parent(&mut self, _: &SelectParent, cx: &mut ViewContext) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { if let Some(parent) = entry.path.parent() { if let Some(parent_entry) = worktree.entry_for_path(parent) { self.selection = Some(SelectedEntry { @@ -1447,13 +1491,13 @@ impl ProjectPanel { } fn reveal_in_finder(&mut self, _: &RevealInFileManager, cx: &mut ViewContext) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { cx.reveal_path(&worktree.abs_path().join(&entry.path)); } } fn open_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let abs_path = worktree.abs_path().join(&entry.path); let working_directory = if entry.is_dir() { Some(abs_path) @@ -1476,7 +1520,7 @@ impl ProjectPanel { _: &NewSearchInDirectory, cx: &mut ViewContext, ) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { if entry.is_dir() { let include_root = self.project.read(cx).visible_worktrees(cx).count() > 1; let dir_path = if include_root { @@ -1596,15 +1640,36 @@ impl ProjectPanel { // Returns list of entries that should be affected by an operation. // When currently selected entry is not marked, it's treated as the only marked entry. fn marked_entries(&self) -> BTreeSet { - let Some(selection) = self.selection else { + let Some(mut selection) = self.selection else { return Default::default(); }; if self.marked_entries.contains(&selection) { - self.marked_entries.clone() + self.marked_entries + .iter() + .copied() + .map(|mut entry| { + entry.entry_id = self.resolve_entry(entry.entry_id); + entry + }) + .collect() } else { + selection.entry_id = self.resolve_entry(selection.entry_id); BTreeSet::from_iter([selection]) } } + + fn resolve_entry(&self, id: ProjectEntryId) -> ProjectEntryId { + self.ancestors + .get(&id) + .and_then(|ancestors| { + if ancestors.current_ancestor_depth == 0 { + return None; + } + ancestors.ancestors.get(ancestors.current_ancestor_depth) + }) + .copied() + .unwrap_or(id) + } pub fn selected_entry<'a>( &self, cx: &'a AppContext, @@ -1613,6 +1678,21 @@ impl ProjectPanel { Some((worktree.read(cx), entry)) } + /// Compared to selected_entry, this function resolves to the currently + /// selected subentry if dir auto-folding is enabled. + fn selected_sub_entry<'a>( + &self, + cx: &'a AppContext, + ) -> Option<(&'a Worktree, &'a project::Entry)> { + let (worktree, mut entry) = self.selected_entry_handle(cx)?; + + let worktree = worktree.read(cx); + let resolved_id = self.resolve_entry(entry.id); + if resolved_id != entry.id { + entry = worktree.entry_for_id(resolved_id)?; + } + Some((worktree, entry)) + } fn selected_entry_handle<'a>( &self, cx: &'a AppContext, @@ -1655,6 +1735,7 @@ impl ProjectPanel { .and_then(|worktree| worktree.read(cx).root_entry()) .map(|entry| entry.id); + let old_ancestors = std::mem::take(&mut self.ancestors); self.visible_entries.clear(); for worktree in project.visible_worktrees(cx) { let snapshot = worktree.read(cx).snapshot(); @@ -1688,25 +1769,42 @@ impl ProjectPanel { let mut visible_worktree_entries = Vec::new(); let mut entry_iter = snapshot.entries(true, 0); + let mut auto_folded_ancestors = vec![]; while let Some(entry) = entry_iter.entry() { - if auto_collapse_dirs - && entry.kind.is_dir() - && !self.unfolded_dir_ids.contains(&entry.id) - { - if let Some(root_path) = snapshot.root_entry() { - let mut child_entries = snapshot.child_entries(&entry.path); - if let Some(child) = child_entries.next() { - if entry.path != root_path.path - && child_entries.next().is_none() - && child.kind.is_dir() - { - entry_iter.advance(); - continue; + if auto_collapse_dirs && entry.kind.is_dir() { + auto_folded_ancestors.push(entry.id); + if !self.unfolded_dir_ids.contains(&entry.id) { + if let Some(root_path) = snapshot.root_entry() { + let mut child_entries = snapshot.child_entries(&entry.path); + if let Some(child) = child_entries.next() { + if entry.path != root_path.path + && child_entries.next().is_none() + && child.kind.is_dir() + { + entry_iter.advance(); + + continue; + } } } } + let depth = old_ancestors + .get(&entry.id) + .map(|ancestor| ancestor.current_ancestor_depth) + .unwrap_or_default(); + let mut ancestors = std::mem::take(&mut auto_folded_ancestors); + if ancestors.len() > 1 { + ancestors.reverse(); + self.ancestors.insert( + entry.id, + FoldedAncestors { + current_ancestor_depth: depth, + ancestors, + }, + ); + } } - + auto_folded_ancestors.clear(); visible_worktree_entries.push(entry.clone()); if Some(entry.id) == new_entry_parent_id { visible_worktree_entries.push(Entry { @@ -1999,6 +2097,7 @@ impl ProjectPanel { .map_or(false, |e| e.is_cut() && e.items().contains(&selection)), git_status: status, is_private: entry.is_private, + is_auto_folded: difference > 1, worktree_id: *worktree_id, canonical_path: entry.canonical_path.clone(), }; @@ -2008,6 +2107,15 @@ impl ProjectPanel { entry.id == NEW_ENTRY_ID } else { entry.id == edit_state.entry_id + || self + .ancestors + .get(&entry.id) + .is_some_and(|auto_folded_dirs| { + auto_folded_dirs + .ancestors + .iter() + .any(|entry_id| *entry_id == edit_state.entry_id) + }) }; if is_edited_entry { @@ -2102,6 +2210,7 @@ impl ProjectPanel { active_selection: selection, marked_selections: selections, }; + let is_auto_folded = details.is_auto_folded; div() .id(entry_id.to_proto() as usize) .on_drag_move::(cx.listener( @@ -2202,11 +2311,78 @@ impl ProjectPanel { if let (Some(editor), true) = (Some(&self.filename_editor), show_editor) { h_flex().h_6().w_full().child(editor.clone()) } else { - h_flex().h_6().child( - Label::new(file_name) - .single_line() - .color(filename_text_color), - ) + h_flex().h_6().map(|this| { + if is_auto_folded && is_active { + let folded_ancestors = self.ancestors.get(&entry_id).unwrap(); + let Some(part_to_highlight) = Path::new(&file_name) + .ancestors() + .nth(folded_ancestors.current_ancestor_depth) + else { + return this; + }; + + let suffix = Path::new(&file_name) + .strip_prefix(part_to_highlight) + .ok() + .filter(|suffix| !suffix.as_os_str().is_empty()); + let prefix = part_to_highlight + .parent() + .filter(|prefix| !prefix.as_os_str().is_empty()); + let Some(part_to_highlight) = part_to_highlight + .file_name() + .and_then(|name| name.to_str().map(String::from)) + else { + return this; + }; + + this.children(prefix.and_then(|prefix| { + Some( + h_flex() + .child( + Label::new(prefix.to_str().map(String::from)?) + .single_line() + .color(filename_text_color), + ) + .child( + Label::new(std::path::MAIN_SEPARATOR_STR) + .single_line() + .color(filename_text_color), + ), + ) + })) + .child( + Label::new(part_to_highlight) + .single_line() + .color(filename_text_color) + .underline(true), + ) + .children( + suffix.and_then(|suffix| { + Some( + h_flex() + .child( + Label::new(std::path::MAIN_SEPARATOR_STR) + .single_line() + .color(filename_text_color), + ) + .child( + Label::new( + suffix.to_str().map(String::from)?, + ) + .single_line() + .color(filename_text_color), + ), + ) + }), + ) + } else { + this.child( + Label::new(file_name) + .single_line() + .color(filename_text_color), + ) + } + }) } .ml_1(), ) @@ -2551,7 +2727,7 @@ impl Render for ProjectPanel { .child( uniform_list(cx.view().clone(), "entries", items_count, { |this, range, cx| { - let mut items = Vec::new(); + let mut items = Vec::with_capacity(range.end - range.start); this.for_each_visible_entry(range, cx, |id, details, cx| { items.push(this.render_entry(id, details, cx)); }); diff --git a/crates/ui/src/components/label/highlighted_label.rs b/crates/ui/src/components/label/highlighted_label.rs index ab71ffdc4c..6b170bb810 100644 --- a/crates/ui/src/components/label/highlighted_label.rs +++ b/crates/ui/src/components/label/highlighted_label.rs @@ -58,6 +58,11 @@ impl LabelCommon for HighlightedLabel { self.base = self.base.alpha(alpha); self } + + fn underline(mut self, underline: bool) -> Self { + self.base = self.base.underline(underline); + self + } } pub fn highlight_ranges( diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index f29e4656e9..898a59de77 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -170,6 +170,11 @@ impl LabelCommon for Label { self.base = self.base.alpha(alpha); self } + + fn underline(mut self, underline: bool) -> Self { + self.base = self.base.underline(underline); + self + } } impl RenderOnce for Label { diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index a59c93e31d..bc2fae15a7 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -1,4 +1,4 @@ -use gpui::{relative, AnyElement, FontWeight, StyleRefinement, Styled}; +use gpui::{relative, AnyElement, FontWeight, StyleRefinement, Styled, UnderlineStyle}; use settings::Settings; use smallvec::SmallVec; use theme::ThemeSettings; @@ -42,6 +42,9 @@ pub trait LabelCommon { /// Sets the italic property of the label. fn italic(self, italic: bool) -> Self; + /// Sets the underline property of the label + fn underline(self, underline: bool) -> Self; + /// Sets the alpha property of the label, overwriting the alpha value of the color. fn alpha(self, alpha: f32) -> Self; } @@ -57,6 +60,7 @@ pub struct LabelLike { italic: bool, children: SmallVec<[AnyElement; 2]>, alpha: Option, + underline: bool, } impl Default for LabelLike { @@ -77,6 +81,7 @@ impl LabelLike { italic: false, children: SmallVec::new(), alpha: None, + underline: false, } } } @@ -123,6 +128,11 @@ impl LabelCommon for LabelLike { self } + fn underline(mut self, underline: bool) -> Self { + self.underline = underline; + self + } + fn alpha(mut self, alpha: f32) -> Self { self.alpha = Some(alpha); self @@ -165,6 +175,16 @@ impl RenderOnce for LabelLike { this.line_height(relative(1.)) }) .when(self.italic, |this| this.italic()) + .when(self.underline, |mut this| { + this.text_style() + .get_or_insert_with(Default::default) + .underline = Some(UnderlineStyle { + thickness: px(1.), + color: None, + wavy: false, + }); + this + }) .text_color(color) .font_weight(self.weight.unwrap_or(settings.ui_font.weight)) .children(self.children) From fcf79c0f1d6ec5153fef7ee3191450e50749dff9 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 9 Sep 2024 15:01:26 +0200 Subject: [PATCH 008/270] assistant: Support copy/pasting creases (#17490) https://github.com/user-attachments/assets/78a2572d-8e8f-4206-9680-dcd884e7bbbd Release Notes: - Added support for copying and pasting slash commands in the assistant panel --------- Co-authored-by: Thorsten --- crates/assistant/src/assistant_panel.rs | 239 ++++++++++++++++---- crates/editor/src/display_map/crease_map.rs | 92 +++++++- 2 files changed, 283 insertions(+), 48 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index fd5f62e188..0828b9b991 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -26,8 +26,8 @@ use collections::{BTreeSet, HashMap, HashSet}; use editor::{ actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt}, display_map::{ - BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, CustomBlockId, FoldId, - RenderBlock, ToDisplayPoint, + BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, CreaseMetadata, + CustomBlockId, FoldId, RenderBlock, ToDisplayPoint, }, scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor}, Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, RowExt, ToOffset as _, ToPoint, @@ -54,13 +54,13 @@ use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::{Project, ProjectLspAdapterDelegate, Worktree}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; +use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; use smol::stream::StreamExt; use std::{ borrow::Cow, cmp, collections::hash_map, - fmt::Write, ops::{ControlFlow, Range}, path::PathBuf, sync::Arc, @@ -2491,20 +2491,26 @@ impl ContextEditor { .unwrap(); let buffer_row = MultiBufferRow(start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); - creases.push(Crease::new( - start..end, - FoldPlaceholder { - render: render_fold_icon_button( - cx.view().downgrade(), - section.icon, - section.label.clone(), - ), - constrain_width: false, - merge_adjacent: false, - }, - render_slash_command_output_toggle, - |_, _, _| Empty.into_any_element(), - )); + creases.push( + Crease::new( + start..end, + FoldPlaceholder { + render: render_fold_icon_button( + cx.view().downgrade(), + section.icon, + section.label.clone(), + ), + constrain_width: false, + merge_adjacent: false, + }, + render_slash_command_output_toggle, + |_, _, _| Empty.into_any_element(), + ) + .with_metadata(CreaseMetadata { + icon: section.icon, + label: section.label, + }), + ); } editor.insert_creases(creases, cx); @@ -3318,39 +3324,113 @@ impl ContextEditor { } fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext) { - let editor = self.editor.read(cx); - let context = self.context.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let mut copied_text = String::new(); - let mut spanned_messages = 0; - for message in context.messages(cx) { - if message.offset_range.start >= selection.range().end { - break; - } else if message.offset_range.end >= selection.range().start { - let range = cmp::max(message.offset_range.start, selection.range().start) - ..cmp::min(message.offset_range.end, selection.range().end); - if !range.is_empty() { - spanned_messages += 1; - write!(&mut copied_text, "## {}\n\n", message.role).unwrap(); - for chunk in context.buffer().read(cx).text_for_range(range) { - copied_text.push_str(chunk); - } - copied_text.push('\n'); - } - } - } - - if spanned_messages > 1 { - cx.write_to_clipboard(ClipboardItem::new_string(copied_text)); - return; - } + if self.editor.read(cx).selections.count() == 1 { + let (copied_text, metadata) = self.get_clipboard_contents(cx); + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + copied_text, + metadata, + )); + cx.stop_propagation(); + return; } cx.propagate(); } - fn paste(&mut self, _: &editor::actions::Paste, cx: &mut ViewContext) { + fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext) { + if self.editor.read(cx).selections.count() == 1 { + let (copied_text, metadata) = self.get_clipboard_contents(cx); + + self.editor.update(cx, |editor, cx| { + let selections = editor.selections.all::(cx); + + editor.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections); + }); + this.insert("", cx); + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + copied_text, + metadata, + )); + }); + }); + + cx.stop_propagation(); + return; + } + + cx.propagate(); + } + + fn get_clipboard_contents(&mut self, cx: &mut ViewContext) -> (String, CopyMetadata) { + let creases = self.editor.update(cx, |editor, cx| { + let selection = editor.selections.newest::(cx); + let selection_start = editor.selections.newest::(cx).start; + let snapshot = editor.buffer().read(cx).snapshot(cx); + editor.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .crease_snapshot + .creases_in_range( + MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1), + &snapshot, + ) + .filter_map(|crease| { + if let Some(metadata) = &crease.metadata { + let start = crease + .range + .start + .to_offset(&snapshot) + .saturating_sub(selection_start); + let end = crease + .range + .end + .to_offset(&snapshot) + .saturating_sub(selection_start); + + let range_relative_to_selection = start..end; + + if range_relative_to_selection.is_empty() { + None + } else { + Some(SelectedCreaseMetadata { + range_relative_to_selection, + crease: metadata.clone(), + }) + } + } else { + None + } + }) + .collect::>() + }) + }); + + let context = self.context.read(cx); + let selection = self.editor.read(cx).selections.newest::(cx); + let mut text = String::new(); + for message in context.messages(cx) { + if message.offset_range.start >= selection.range().end { + break; + } else if message.offset_range.end >= selection.range().start { + let range = cmp::max(message.offset_range.start, selection.range().start) + ..cmp::min(message.offset_range.end, selection.range().end); + if !range.is_empty() { + for chunk in context.buffer().read(cx).text_for_range(range) { + text.push_str(chunk); + } + text.push('\n'); + } + } + } + + (text, CopyMetadata { creases }) + } + + fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext) { + cx.stop_propagation(); + let images = if let Some(item) = cx.read_from_clipboard() { item.into_entries() .filter_map(|entry| { @@ -3365,9 +3445,62 @@ impl ContextEditor { Vec::new() }; + let metadata = if let Some(item) = cx.read_from_clipboard() { + item.entries().first().and_then(|entry| { + if let ClipboardEntry::String(text) = entry { + text.metadata_json::() + } else { + None + } + }) + } else { + None + }; + if images.is_empty() { - // If we didn't find any valid image data to paste, propagate to let normal pasting happen. - cx.propagate(); + self.editor.update(cx, |editor, cx| { + let paste_position = editor.selections.newest::(cx).head(); + editor.paste(action, cx); + + if let Some(metadata) = metadata { + let buffer = editor.buffer().read(cx).snapshot(cx); + + let mut buffer_rows_to_fold = BTreeSet::new(); + let weak_editor = cx.view().downgrade(); + editor.insert_creases( + metadata.creases.into_iter().map(|metadata| { + let start = buffer.anchor_after( + paste_position + metadata.range_relative_to_selection.start, + ); + let end = buffer.anchor_before( + paste_position + metadata.range_relative_to_selection.end, + ); + + let buffer_row = MultiBufferRow(start.to_point(&buffer).row); + buffer_rows_to_fold.insert(buffer_row); + Crease::new( + start..end, + FoldPlaceholder { + constrain_width: false, + render: render_fold_icon_button( + weak_editor.clone(), + metadata.crease.icon, + metadata.crease.label.clone(), + ), + merge_adjacent: false, + }, + render_slash_command_output_toggle, + |_, _, _| Empty.into_any(), + ) + .with_metadata(metadata.crease.clone()) + }), + cx, + ); + for buffer_row in buffer_rows_to_fold.into_iter().rev() { + editor.fold_at(&FoldAt { buffer_row }, cx); + } + } + }); } else { let mut image_positions = Vec::new(); self.editor.update(cx, |editor, cx| { @@ -4037,6 +4170,17 @@ fn render_fold_icon_button( }) } +#[derive(Debug, Clone, Serialize, Deserialize)] +struct CopyMetadata { + creases: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct SelectedCreaseMetadata { + range_relative_to_selection: Range, + crease: CreaseMetadata, +} + impl EventEmitter for ContextEditor {} impl EventEmitter for ContextEditor {} @@ -4062,6 +4206,7 @@ impl Render for ContextEditor { .capture_action(cx.listener(ContextEditor::cancel)) .capture_action(cx.listener(ContextEditor::save)) .capture_action(cx.listener(ContextEditor::copy)) + .capture_action(cx.listener(ContextEditor::cut)) .capture_action(cx.listener(ContextEditor::paste)) .capture_action(cx.listener(ContextEditor::cycle_message_role)) .capture_action(cx.listener(ContextEditor::confirm_command)) diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 9aa2728dca..10ee125b32 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -1,10 +1,11 @@ use collections::HashMap; use gpui::{AnyElement, IntoElement}; use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToPoint}; +use serde::{Deserialize, Serialize}; use std::{cmp::Ordering, ops::Range, sync::Arc}; use sum_tree::{Bias, SeekTarget, SumTree}; use text::Point; -use ui::WindowContext; +use ui::{IconName, SharedString, WindowContext}; use crate::FoldPlaceholder; @@ -49,6 +50,31 @@ impl CreaseSnapshot { None } + pub fn creases_in_range<'a>( + &'a self, + range: Range, + snapshot: &'a MultiBufferSnapshot, + ) -> impl '_ + Iterator { + let start = snapshot.anchor_before(Point::new(range.start.0, 0)); + let mut cursor = self.creases.cursor::(); + cursor.seek(&start, Bias::Left, snapshot); + + std::iter::from_fn(move || { + while let Some(item) = cursor.item() { + cursor.next(snapshot); + let crease_start = item.crease.range.start.to_point(snapshot); + let crease_end = item.crease.range.end.to_point(snapshot); + if crease_end.row > range.end.0 { + continue; + } + if crease_start.row >= range.start.0 && crease_end.row < range.end.0 { + return Some(&item.crease); + } + } + None + }) + } + pub fn crease_items_with_offsets( &self, snapshot: &MultiBufferSnapshot, @@ -87,6 +113,14 @@ pub struct Crease { pub placeholder: FoldPlaceholder, pub render_toggle: RenderToggleFn, pub render_trailer: RenderTrailerFn, + pub metadata: Option, +} + +/// Metadata about a [`Crease`], that is used for serialization. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct CreaseMetadata { + pub icon: IconName, + pub label: SharedString, } impl Crease { @@ -124,8 +158,14 @@ impl Crease { render_trailer: Arc::new(move |row, folded, cx| { render_trailer(row, folded, cx).into_any_element() }), + metadata: None, } } + + pub fn with_metadata(mut self, metadata: CreaseMetadata) -> Self { + self.metadata = Some(metadata); + self + } } impl std::fmt::Debug for Crease { @@ -304,4 +344,54 @@ mod test { .query_row(MultiBufferRow(3), &snapshot) .is_none()); } + + #[gpui::test] + fn test_creases_in_range(cx: &mut AppContext) { + let text = "line1\nline2\nline3\nline4\nline5\nline6\nline7"; + let buffer = MultiBuffer::build_simple(text, cx); + let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + let mut crease_map = CreaseMap::default(); + + let creases = [ + Crease::new( + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + Crease::new( + snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + Crease::new( + snapshot.anchor_before(Point::new(5, 0))..snapshot.anchor_after(Point::new(5, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + ]; + crease_map.insert(creases, &snapshot); + + let crease_snapshot = crease_map.snapshot(); + + let range = MultiBufferRow(0)..MultiBufferRow(7); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 3); + + let range = MultiBufferRow(2)..MultiBufferRow(5); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 1); + assert_eq!(creases[0].range.start.to_point(&snapshot).row, 3); + + let range = MultiBufferRow(0)..MultiBufferRow(2); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 1); + assert_eq!(creases[0].range.start.to_point(&snapshot).row, 1); + + let range = MultiBufferRow(6)..MultiBufferRow(7); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 0); + } } From dd257b8412c3c8005c710185dc8fc2eed3cc67ba Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:14:07 +0200 Subject: [PATCH 009/270] project panel: Do not allow collapsing auto-folded directory past the list of ancestors (#17594) Closes #ISSUE Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index be945cde77..9add77c864 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -637,7 +637,9 @@ impl ProjectPanel { fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext) { if let Some((worktree, mut entry)) = self.selected_entry(cx) { if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { - if folded_ancestors.current_ancestor_depth < folded_ancestors.max_ancestor_depth() { + if folded_ancestors.current_ancestor_depth + 1 + < folded_ancestors.max_ancestor_depth() + { folded_ancestors.current_ancestor_depth += 1; cx.notify(); return; From 12dde176084c9950b78093c1e7fb13e5988292ba Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 9 Sep 2024 18:31:55 +0200 Subject: [PATCH 010/270] assistant panel: automatically insert selections (#17589) Addresses parts of feedback from https://www.jacobcolling.com/friction-log/zed-friction-log Release Notes: - "Assistant::NewContext" now automatically does quote selection as well - "Assistant::QuoteSelection" now handles multicursor selections, inserting multiple excerpts. --- crates/assistant/src/assistant_panel.rs | 226 +++++++++++++----------- 1 file changed, 120 insertions(+), 106 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 0828b9b991..82888b498a 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -939,9 +939,16 @@ impl AssistantPanel { cx: &mut ViewContext, ) { if let Some(panel) = workspace.panel::(cx) { - panel.update(cx, |panel, cx| { - panel.new_context(cx); - }); + let did_create_context = panel + .update(cx, |panel, cx| { + panel.new_context(cx)?; + + Some(()) + }) + .is_some(); + if did_create_context { + ContextEditor::quote_selection(workspace, &Default::default(), cx); + } } } @@ -3186,87 +3193,93 @@ impl ContextEditor { return; }; - let selection = editor.update(cx, |editor, cx| editor.selections.newest_adjusted(cx)); - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); - let range = editor::ToOffset::to_offset(&selection.start, &buffer) - ..editor::ToOffset::to_offset(&selection.end, &buffer); - let selected_text = buffer.text_for_range(range.clone()).collect::(); - if selected_text.is_empty() { - return; - } - - let start_language = buffer.language_at(range.start); - let end_language = buffer.language_at(range.end); - let language_name = if start_language == end_language { - start_language.map(|language| language.code_fence_block_name()) - } else { - None - }; - let language_name = language_name.as_deref().unwrap_or(""); - - let filename = buffer - .file_at(selection.start) - .map(|file| file.full_path(cx)); - - let text = if language_name == "markdown" { - selected_text - .lines() - .map(|line| format!("> {}", line)) - .collect::>() - .join("\n") - } else { - let start_symbols = buffer - .symbols_containing(selection.start, None) - .map(|(_, symbols)| symbols); - let end_symbols = buffer - .symbols_containing(selection.end, None) - .map(|(_, symbols)| symbols); - - let outline_text = - if let Some((start_symbols, end_symbols)) = start_symbols.zip(end_symbols) { - Some( - start_symbols - .into_iter() - .zip(end_symbols) - .take_while(|(a, b)| a == b) - .map(|(a, _)| a.text) - .collect::>() - .join(" > "), - ) + let mut creases = vec![]; + editor.update(cx, |editor, cx| { + let selections = editor.selections.all_adjusted(cx); + let buffer = editor.buffer().read(cx).snapshot(cx); + for selection in selections { + let range = editor::ToOffset::to_offset(&selection.start, &buffer) + ..editor::ToOffset::to_offset(&selection.end, &buffer); + let selected_text = buffer.text_for_range(range.clone()).collect::(); + if selected_text.is_empty() { + continue; + } + let start_language = buffer.language_at(range.start); + let end_language = buffer.language_at(range.end); + let language_name = if start_language == end_language { + start_language.map(|language| language.code_fence_block_name()) } else { None }; + let language_name = language_name.as_deref().unwrap_or(""); + let filename = buffer + .file_at(selection.start) + .map(|file| file.full_path(cx)); + let text = if language_name == "markdown" { + selected_text + .lines() + .map(|line| format!("> {}", line)) + .collect::>() + .join("\n") + } else { + let start_symbols = buffer + .symbols_containing(selection.start, None) + .map(|(_, symbols)| symbols); + let end_symbols = buffer + .symbols_containing(selection.end, None) + .map(|(_, symbols)| symbols); - let line_comment_prefix = start_language - .and_then(|l| l.default_scope().line_comment_prefixes().first().cloned()); + let outline_text = if let Some((start_symbols, end_symbols)) = + start_symbols.zip(end_symbols) + { + Some( + start_symbols + .into_iter() + .zip(end_symbols) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a.text) + .collect::>() + .join(" > "), + ) + } else { + None + }; - let fence = codeblock_fence_for_path( - filename.as_deref(), - Some(selection.start.row..selection.end.row), - ); + let line_comment_prefix = start_language + .and_then(|l| l.default_scope().line_comment_prefixes().first().cloned()); - if let Some((line_comment_prefix, outline_text)) = line_comment_prefix.zip(outline_text) - { - let breadcrumb = format!("{line_comment_prefix}Excerpt from: {outline_text}\n"); - format!("{fence}{breadcrumb}{selected_text}\n```") - } else { - format!("{fence}{selected_text}\n```") + let fence = codeblock_fence_for_path( + filename.as_deref(), + Some(selection.start.row..selection.end.row), + ); + + if let Some((line_comment_prefix, outline_text)) = + line_comment_prefix.zip(outline_text) + { + let breadcrumb = + format!("{line_comment_prefix}Excerpt from: {outline_text}\n"); + format!("{fence}{breadcrumb}{selected_text}\n```") + } else { + format!("{fence}{selected_text}\n```") + } + }; + let crease_title = if let Some(path) = filename { + let start_line = selection.start.row + 1; + let end_line = selection.end.row + 1; + if start_line == end_line { + format!("{}, Line {}", path.display(), start_line) + } else { + format!("{}, Lines {} to {}", path.display(), start_line, end_line) + } + } else { + "Quoted selection".to_string() + }; + creases.push((text, crease_title)); } - }; - - let crease_title = if let Some(path) = filename { - let start_line = selection.start.row + 1; - let end_line = selection.end.row + 1; - if start_line == end_line { - format!("{}, Line {}", path.display(), start_line) - } else { - format!("{}, Lines {} to {}", path.display(), start_line, end_line) - } - } else { - "Quoted selection".to_string() - }; - + }); + if creases.is_empty() { + return; + } // Activate the panel if !panel.focus_handle(cx).contains_focused(cx) { workspace.toggle_panel_focus::(cx); @@ -3283,39 +3296,40 @@ impl ContextEditor { context.update(cx, |context, cx| { context.editor.update(cx, |editor, cx| { editor.insert("\n", cx); + for (text, crease_title) in creases { + let point = editor.selections.newest::(cx).head(); + let start_row = MultiBufferRow(point.row); - let point = editor.selections.newest::(cx).head(); - let start_row = MultiBufferRow(point.row); + editor.insert(&text, cx); - editor.insert(&text, cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + let anchor_before = snapshot.anchor_after(point); + let anchor_after = editor + .selections + .newest_anchor() + .head() + .bias_left(&snapshot); - let snapshot = editor.buffer().read(cx).snapshot(cx); - let anchor_before = snapshot.anchor_after(point); - let anchor_after = editor - .selections - .newest_anchor() - .head() - .bias_left(&snapshot); + editor.insert("\n", cx); - editor.insert("\n", cx); - - let fold_placeholder = quote_selection_fold_placeholder( - crease_title, - cx.view().downgrade(), - ); - let crease = Crease::new( - anchor_before..anchor_after, - fold_placeholder, - render_quote_selection_output_toggle, - |_, _, _| Empty.into_any(), - ); - editor.insert_creases(vec![crease], cx); - editor.fold_at( - &FoldAt { - buffer_row: start_row, - }, - cx, - ); + let fold_placeholder = quote_selection_fold_placeholder( + crease_title, + cx.view().downgrade(), + ); + let crease = Crease::new( + anchor_before..anchor_after, + fold_placeholder, + render_quote_selection_output_toggle, + |_, _, _| Empty.into_any(), + ); + editor.insert_creases(vec![crease], cx); + editor.fold_at( + &FoldAt { + buffer_row: start_row, + }, + cx, + ); + } }) }); }; From 59be07ad90206fdba94dbdc66154831b24b9478a Mon Sep 17 00:00:00 2001 From: Fernando Tagawa Date: Mon, 9 Sep 2024 18:27:45 -0300 Subject: [PATCH 011/270] x11: Implement Drag and Drop (#17491) Closes #16225 Release Notes: - x11: Implemented Drag and Drop. --- crates/gpui/src/platform/linux/x11/client.rs | 210 ++++++++++++++++++- crates/gpui/src/platform/linux/x11/window.rs | 17 ++ typos.toml | 4 +- 3 files changed, 226 insertions(+), 5 deletions(-) diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index f127d312d2..0909d09f25 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,3 +1,4 @@ +use core::str; use std::cell::RefCell; use std::collections::HashSet; use std::ops::Deref; @@ -9,6 +10,8 @@ use calloop::generic::{FdWrapper, Generic}; use calloop::{EventLoop, LoopHandle, RegistrationToken}; use collections::HashMap; +use http_client::Url; +use smallvec::SmallVec; use util::ResultExt; use x11rb::connection::{Connection, RequestConnection}; @@ -17,9 +20,13 @@ use x11rb::errors::ConnectionError; use x11rb::protocol::randr::ConnectionExt as _; use x11rb::protocol::xinput::ConnectionExt; use x11rb::protocol::xkb::ConnectionExt as _; -use x11rb::protocol::xproto::{ChangeWindowAttributesAux, ConnectionExt as _, KeyPressEvent}; +use x11rb::protocol::xproto::{ + AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, ConnectionExt as _, + EventMask, KeyPressEvent, +}; use x11rb::protocol::{randr, render, xinput, xkb, xproto, Event}; use x11rb::resource_manager::Database; +use x11rb::wrapper::ConnectionExt as _; use x11rb::xcb_ffi::XCBConnection; use xim::{x11rb::X11rbClient, Client}; use xim::{AttributeName, InputStyle}; @@ -30,8 +37,8 @@ use crate::platform::linux::LinuxClient; use crate::platform::{LinuxCommon, PlatformWindow}; use crate::{ modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, - DisplayId, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, PlatformDisplay, - PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, + DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, + PlatformDisplay, PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, }; use super::{button_of_key, modifiers_from_state, pressed_button_from_mask}; @@ -101,6 +108,14 @@ struct XKBStateNotiy { locked_layout: LayoutIndex, } +#[derive(Debug, Default)] +pub struct Xdnd { + other_window: xproto::Window, + drag_type: u32, + retrieved: bool, + position: Point, +} + pub struct X11ClientState { pub(crate) loop_handle: LoopHandle<'static, X11Client>, pub(crate) event_loop: Option>, @@ -142,6 +157,7 @@ pub struct X11ClientState { pub(crate) common: LinuxCommon, pub(crate) clipboard: x11_clipboard::Clipboard, pub(crate) clipboard_item: Option, + pub(crate) xdnd_state: Xdnd, } #[derive(Clone)] @@ -423,6 +439,7 @@ impl X11Client { clipboard, clipboard_item: None, + xdnd_state: Xdnd::default(), }))) } @@ -611,7 +628,7 @@ impl X11Client { match event { Event::ClientMessage(event) => { let window = self.get_window(event.window)?; - let [atom, _arg1, arg2, arg3, _arg4] = event.data.as_data32(); + let [atom, arg1, arg2, arg3, arg4] = event.data.as_data32(); let mut state = self.0.borrow_mut(); if atom == state.atoms.WM_DELETE_WINDOW { @@ -627,6 +644,106 @@ impl X11Client { hi: arg3 as i32, }) } + + if event.type_ == state.atoms.XdndEnter { + state.xdnd_state.other_window = atom; + if (arg1 & 0x1) == 0x1 { + state.xdnd_state.drag_type = xdnd_get_supported_atom( + &state.xcb_connection, + &state.atoms, + state.xdnd_state.other_window, + ); + } else { + if let Some(atom) = [arg2, arg3, arg4] + .into_iter() + .find(|atom| xdnd_is_atom_supported(*atom, &state.atoms)) + { + state.xdnd_state.drag_type = atom; + } + } + } else if event.type_ == state.atoms.XdndLeave { + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Pending { + position: state.xdnd_state.position, + })); + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Exited {})); + state.xdnd_state = Xdnd::default(); + } else if event.type_ == state.atoms.XdndPosition { + if let Ok(pos) = state + .xcb_connection + .query_pointer(event.window) + .unwrap() + .reply() + { + state.xdnd_state.position = + Point::new(Pixels(pos.win_x as f32), Pixels(pos.win_y as f32)); + } + if !state.xdnd_state.retrieved { + state + .xcb_connection + .convert_selection( + event.window, + state.atoms.XdndSelection, + state.xdnd_state.drag_type, + state.atoms.XDND_DATA, + arg3, + ) + .unwrap(); + } + xdnd_send_status( + &state.xcb_connection, + &state.atoms, + event.window, + state.xdnd_state.other_window, + arg4, + ); + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Pending { + position: state.xdnd_state.position, + })); + } else if event.type_ == state.atoms.XdndDrop { + xdnd_send_finished( + &state.xcb_connection, + &state.atoms, + event.window, + state.xdnd_state.other_window, + ); + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Submit { + position: state.xdnd_state.position, + })); + state.xdnd_state = Xdnd::default(); + } + } + Event::SelectionNotify(event) => { + let window = self.get_window(event.requestor)?; + let mut state = self.0.borrow_mut(); + let property = state.xcb_connection.get_property( + false, + event.requestor, + state.atoms.XDND_DATA, + AtomEnum::ANY, + 0, + 1024, + ); + if property.as_ref().log_err().is_none() { + return Some(()); + } + if let Ok(reply) = property.unwrap().reply() { + match str::from_utf8(&reply.value) { + Ok(file_list) => { + let paths: SmallVec<[_; 2]> = file_list + .lines() + .filter_map(|path| Url::parse(path).log_err()) + .filter_map(|url| url.to_file_path().log_err()) + .collect(); + let input = PlatformInput::FileDrop(FileDropEvent::Entered { + position: state.xdnd_state.position, + paths: crate::ExternalPaths(paths), + }); + window.handle_input(input); + state.xdnd_state.retrieved = true; + } + Err(_) => {} + } + } } Event::ConfigureNotify(event) => { let bounds = Bounds { @@ -1179,6 +1296,16 @@ impl LinuxClient for X11Client { state.scale_factor, state.common.appearance, )?; + state + .xcb_connection + .change_property32( + xproto::PropMode::REPLACE, + x_window, + state.atoms.XdndAware, + state.atoms.XA_ATOM, + &[5], + ) + .unwrap(); let screen_resources = state .xcb_connection @@ -1540,3 +1667,78 @@ fn check_gtk_frame_extents_supported( supported_atoms.contains(&atoms._GTK_FRAME_EXTENTS) } + +fn xdnd_is_atom_supported(atom: u32, atoms: &XcbAtoms) -> bool { + return atom == atoms.TEXT + || atom == atoms.STRING + || atom == atoms.UTF8_STRING + || atom == atoms.TEXT_PLAIN + || atom == atoms.TEXT_PLAIN_UTF8 + || atom == atoms.TextUriList; +} + +fn xdnd_get_supported_atom( + xcb_connection: &XCBConnection, + supported_atoms: &XcbAtoms, + target: xproto::Window, +) -> u32 { + let property = xcb_connection + .get_property( + false, + target, + supported_atoms.XdndTypeList, + AtomEnum::ANY, + 0, + 1024, + ) + .unwrap(); + if let Ok(reply) = property.reply() { + if let Some(atoms) = reply.value32() { + for atom in atoms { + if xdnd_is_atom_supported(atom, &supported_atoms) { + return atom; + } + } + } + } + return 0; +} + +fn xdnd_send_finished( + xcb_connection: &XCBConnection, + atoms: &XcbAtoms, + source: xproto::Window, + target: xproto::Window, +) { + let message = ClientMessageEvent { + format: 32, + window: target, + type_: atoms.XdndFinished, + data: ClientMessageData::from([source, 1, atoms.XdndActionCopy, 0, 0]), + sequence: 0, + response_type: xproto::CLIENT_MESSAGE_EVENT, + }; + xcb_connection + .send_event(false, target, EventMask::default(), message) + .unwrap(); +} + +fn xdnd_send_status( + xcb_connection: &XCBConnection, + atoms: &XcbAtoms, + source: xproto::Window, + target: xproto::Window, + action: u32, +) { + let message = ClientMessageEvent { + format: 32, + window: target, + type_: atoms.XdndStatus, + data: ClientMessageData::from([source, 1, 0, 0, action]), + sequence: 0, + response_type: xproto::CLIENT_MESSAGE_EVENT, + }; + xcb_connection + .send_event(false, target, EventMask::default(), message) + .unwrap(); +} diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index f1aa10f311..b0cf82d605 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -32,7 +32,24 @@ use std::{ use super::{X11Display, XINPUT_MASTER_DEVICE}; x11rb::atom_manager! { pub XcbAtoms: AtomsCookie { + XA_ATOM, + XdndAware, + XdndStatus, + XdndEnter, + XdndLeave, + XdndPosition, + XdndSelection, + XdndDrop, + XdndFinished, + XdndTypeList, + XdndActionCopy, + TextUriList: b"text/uri-list", UTF8_STRING, + TEXT, + STRING, + TEXT_PLAIN_UTF8: b"text/plain;charset=utf-8", + TEXT_PLAIN: b"text/plain", + XDND_DATA, WM_PROTOCOLS, WM_DELETE_WINDOW, WM_CHANGE_STATE, diff --git a/typos.toml b/typos.toml index 2bbb4907a7..1b5c82b906 100644 --- a/typos.toml +++ b/typos.toml @@ -56,6 +56,8 @@ extend-ignore-re = [ "rename = \"sesssion_id\"", "doas", # ProtoLS crate with tree-sitter Protobuf grammar. - "protols" + "protols", + # x11rb SelectionNotifyEvent struct field + "requestor" ] check-filename = true From f92d0de58d1d14faf1eb1f68d8cdff3792613927 Mon Sep 17 00:00:00 2001 From: Taras Martyniuk Date: Tue, 10 Sep 2024 00:31:57 +0300 Subject: [PATCH 012/270] terraform: Update indents (#17200) Closes #15988 Fixed indent configuration for terraform/HCL Release Notes: - N/A https://github.com/user-attachments/assets/09b44ac9-ef09-463c-876d-0fbcdd1f09c9 --------- Co-authored-by: Marshall Bowers --- extensions/terraform/languages/hcl/config.toml | 1 + extensions/terraform/languages/hcl/indents.scm | 8 +++++--- extensions/terraform/languages/terraform-vars/config.toml | 1 + extensions/terraform/languages/terraform-vars/indents.scm | 8 +++++--- extensions/terraform/languages/terraform/indents.scm | 8 +++++--- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/extensions/terraform/languages/hcl/config.toml b/extensions/terraform/languages/hcl/config.toml index 891b2f38d4..be7e601e01 100644 --- a/extensions/terraform/languages/hcl/config.toml +++ b/extensions/terraform/languages/hcl/config.toml @@ -12,3 +12,4 @@ brackets = [ { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "/*", end = " */", close = true, newline = false, not_in = ["comment", "string"] }, ] +tab_size = 2 diff --git a/extensions/terraform/languages/hcl/indents.scm b/extensions/terraform/languages/hcl/indents.scm index 74edb66bdf..be12af1d0c 100644 --- a/extensions/terraform/languages/hcl/indents.scm +++ b/extensions/terraform/languages/hcl/indents.scm @@ -6,6 +6,8 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent diff --git a/extensions/terraform/languages/terraform-vars/config.toml b/extensions/terraform/languages/terraform-vars/config.toml index 12ed7e236c..4d803ee36e 100644 --- a/extensions/terraform/languages/terraform-vars/config.toml +++ b/extensions/terraform/languages/terraform-vars/config.toml @@ -12,3 +12,4 @@ brackets = [ { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "/*", end = " */", close = true, newline = false, not_in = ["comment", "string"] }, ] +tab_size = 2 diff --git a/extensions/terraform/languages/terraform-vars/indents.scm b/extensions/terraform/languages/terraform-vars/indents.scm index 95ad93df1d..b9ba0ad56a 100644 --- a/extensions/terraform/languages/terraform-vars/indents.scm +++ b/extensions/terraform/languages/terraform-vars/indents.scm @@ -6,9 +6,11 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/indents.scm ; inherits: hcl diff --git a/extensions/terraform/languages/terraform/indents.scm b/extensions/terraform/languages/terraform/indents.scm index 95ad93df1d..b9ba0ad56a 100644 --- a/extensions/terraform/languages/terraform/indents.scm +++ b/extensions/terraform/languages/terraform/indents.scm @@ -6,9 +6,11 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/indents.scm ; inherits: hcl From c1193875e8e93746379514e9ce4fa3db5a0b6503 Mon Sep 17 00:00:00 2001 From: Sergio Nonide <60042926+senonide@users.noreply.github.com> Date: Mon, 9 Sep 2024 23:44:02 +0200 Subject: [PATCH 013/270] Fix blurry cursor on Wayland at a scale other than 100% (#17496) Closes #13258 Release Notes: - Fixed blurry mouse cursor on wayland when the screen scale is other than 100% Before: ![Screenshot from 2024-09-06 14-38-30](https://github.com/user-attachments/assets/e4553503-ecea-4b53-b80d-43732d34fa62) After: ![Screenshot from 2024-09-06 14-38-56](https://github.com/user-attachments/assets/ce563d3a-2b44-44b9-9f59-f0042609924e) --- .../gpui/src/platform/linux/wayland/client.rs | 3 +- .../gpui/src/platform/linux/wayland/cursor.rs | 33 +++++++++++++++---- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 67cd1dcbd4..57c43a7e46 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -476,7 +476,8 @@ impl WaylandClient { .as_ref() .map(|primary_selection_manager| primary_selection_manager.get_device(&seat, &qh, ())); - let mut cursor = Cursor::new(&conn, &globals, 24); + // FIXME: Determine the scaling factor dynamically by the compositor + let mut cursor = Cursor::new(&conn, &globals, 24, 2); handle .insert_source(XDPEventSource::new(&common.background_executor), { diff --git a/crates/gpui/src/platform/linux/wayland/cursor.rs b/crates/gpui/src/platform/linux/wayland/cursor.rs index 6a52765042..ea29eee73c 100644 --- a/crates/gpui/src/platform/linux/wayland/cursor.rs +++ b/crates/gpui/src/platform/linux/wayland/cursor.rs @@ -11,6 +11,7 @@ pub(crate) struct Cursor { theme_name: Option, surface: WlSurface, size: u32, + scale: u32, shm: WlShm, connection: Connection, } @@ -23,7 +24,7 @@ impl Drop for Cursor { } impl Cursor { - pub fn new(connection: &Connection, globals: &Globals, size: u32) -> Self { + pub fn new(connection: &Connection, globals: &Globals, size: u32, scale: u32) -> Self { Self { theme: CursorTheme::load(&connection, globals.shm.clone(), size).log_err(), theme_name: None, @@ -31,6 +32,7 @@ impl Cursor { shm: globals.shm.clone(), connection: connection.clone(), size, + scale, } } @@ -38,14 +40,18 @@ impl Cursor { if let Some(size) = size { self.size = size; } - if let Some(theme) = - CursorTheme::load_from_name(&self.connection, self.shm.clone(), theme_name, self.size) - .log_err() + if let Some(theme) = CursorTheme::load_from_name( + &self.connection, + self.shm.clone(), + theme_name, + self.size * self.scale, + ) + .log_err() { self.theme = Some(theme); self.theme_name = Some(theme_name.to_string()); } else if let Some(theme) = - CursorTheme::load(&self.connection, self.shm.clone(), self.size).log_err() + CursorTheme::load(&self.connection, self.shm.clone(), self.size * self.scale).log_err() { self.theme = Some(theme); self.theme_name = None; @@ -91,9 +97,22 @@ impl Cursor { let (width, height) = buffer.dimensions(); let (hot_x, hot_y) = buffer.hotspot(); - wl_pointer.set_cursor(serial_id, Some(&self.surface), hot_x as i32, hot_y as i32); + let scaled_width = width / self.scale; + let scaled_height = height / self.scale; + let scaled_hot_x = hot_x / self.scale; + let scaled_hot_y = hot_y / self.scale; + + self.surface.set_buffer_scale(self.scale as i32); + + wl_pointer.set_cursor( + serial_id, + Some(&self.surface), + scaled_hot_x as i32, + scaled_hot_y as i32, + ); self.surface.attach(Some(&buffer), 0, 0); - self.surface.damage(0, 0, width as i32, height as i32); + self.surface + .damage(0, 0, scaled_width as i32, scaled_height as i32); self.surface.commit(); } } else { From da9601c69851b2b9c2997611d4834307a2d6b5f9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 9 Sep 2024 17:49:40 -0400 Subject: [PATCH 014/270] markdown: Handle definition lists in parser (#17617) Resolves https://github.com/zed-industries/zed/issues/17607. This PR makes it so the Markdown parser can handle Markdown containing definition lists. Note that this is just parser support, we aren't yet doing anything with the definition lists themselves. Release Notes: - N/A --- crates/markdown/src/parser.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index cb83b2356e..3dd11be983 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -232,6 +232,10 @@ pub enum MarkdownTag { /// A metadata block. MetadataBlock(MetadataBlockKind), + + DefinitionList, + DefinitionListTitle, + DefinitionListDefinition, } #[derive(Clone, Debug, PartialEq)] @@ -317,11 +321,9 @@ impl From> for MarkdownTag { }, pulldown_cmark::Tag::HtmlBlock => MarkdownTag::HtmlBlock, pulldown_cmark::Tag::MetadataBlock(kind) => MarkdownTag::MetadataBlock(kind), - pulldown_cmark::Tag::DefinitionList - | pulldown_cmark::Tag::DefinitionListTitle - | pulldown_cmark::Tag::DefinitionListDefinition => { - unimplemented!("definition lists are not yet supported") - } + pulldown_cmark::Tag::DefinitionList => MarkdownTag::DefinitionList, + pulldown_cmark::Tag::DefinitionListTitle => MarkdownTag::DefinitionListTitle, + pulldown_cmark::Tag::DefinitionListDefinition => MarkdownTag::DefinitionListDefinition, } } } From f71cb14d7a3c876b1a5a69949a5a198ed3bfbd5a Mon Sep 17 00:00:00 2001 From: ZZzzaaKK <66885975+ZZzzaaKK@users.noreply.github.com> Date: Mon, 9 Sep 2024 23:52:43 +0200 Subject: [PATCH 015/270] Add ',' to word chars for line wrapping (#17590) Closes #16407 Adds ',' to the is_word_char() matches for line wrapping, so that commas aren't wrapped to the start of a new line. Release Notes: - N/A --- crates/gpui/src/text_system/line_wrapper.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 31e852afdf..3d38ca315c 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -153,7 +153,7 @@ impl LineWrapper { matches!(c, '\u{0400}'..='\u{04FF}') || // Some other known special characters that should be treated as word characters, // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, `2^3`, `a~b`, etc. - matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~') || + matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',') || // Characters that used in URL, e.g. `https://github.com/zed-industries/zed?a=1&b=2` for better wrapping a long URL. matches!(c, '/' | ':' | '?' | '&' | '=') || // `⋯` character is special used in Zed, to keep this at the end of the line. From 2fc74a1b7132db1d9c20b1405b4bd3b9d030230f Mon Sep 17 00:00:00 2001 From: Kenichi Kamiya Date: Tue, 10 Sep 2024 19:50:43 +0900 Subject: [PATCH 016/270] Update doc comments with tabs.file_icons default (#17629) The diff only contains doc comments changes, however I expect this also fixes generating JSON Schema which generated by [schemars](https://github.com/GREsau/schemars/blob/092dc17ae4831d42974653588cebcc089d07493e/docs/examples/6-doc_comments.md). This default value is actually true at first. 1818fef32f24f24f082c6f34a4c3100add6d328c However, it was changed in the following commit. bf7e474bbcc2fadf002adb273e2584c77c1573e3 Closes #17628 Release Notes: - N/A --- crates/workspace/src/item.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 422ed1f165..935f0268b6 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -79,7 +79,7 @@ pub struct ItemSettingsContent { close_position: Option, /// Whether to show the file icon for a tab. /// - /// Default: true + /// Default: false file_icons: Option, } From 56bc3c36ad1140562c4ded64536c65f17e0d173f Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 12:55:46 +0200 Subject: [PATCH 017/270] project search: make sorting comparator comply with Ord preconditions (#17604) Closes #17493 /cc @SomeoneToIgnore /cc @ConradIrwin Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 38 +++++++++--------- crates/util/src/paths.rs | 42 +++++++++++--------- crates/util/src/util.rs | 48 +++++++++++------------ 3 files changed, 65 insertions(+), 63 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 9add77c864..32ccd47a89 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -3519,9 +3519,9 @@ mod tests { " > .git", " > a", " v b", - " > [EDITOR: ''] <== selected", " > 3", " > 4", + " > [EDITOR: ''] <== selected", " a-different-filename.tar.gz", " > C", " .dockerignore", @@ -3542,10 +3542,10 @@ mod tests { " > .git", " > a", " v b", - " > [PROCESSING: 'new-dir']", - " > 3 <== selected", + " > 3", " > 4", - " a-different-filename.tar.gz", + " > [PROCESSING: 'new-dir']", + " a-different-filename.tar.gz <== selected", " > C", " .dockerignore", ] @@ -3559,10 +3559,10 @@ mod tests { " > .git", " > a", " v b", - " > 3 <== selected", + " > 3", " > 4", " > new-dir", - " a-different-filename.tar.gz", + " a-different-filename.tar.gz <== selected", " > C", " .dockerignore", ] @@ -3576,10 +3576,10 @@ mod tests { " > .git", " > a", " v b", - " > [EDITOR: '3'] <== selected", + " > 3", " > 4", " > new-dir", - " a-different-filename.tar.gz", + " [EDITOR: 'a-different-filename.tar.gz'] <== selected", " > C", " .dockerignore", ] @@ -3594,10 +3594,10 @@ mod tests { " > .git", " > a", " v b", - " > 3 <== selected", + " > 3", " > 4", " > new-dir", - " a-different-filename.tar.gz", + " a-different-filename.tar.gz <== selected", " > C", " .dockerignore", ] @@ -3844,8 +3844,8 @@ mod tests { &[ // "v root1", - " one.two.txt <== selected", - " one.txt", + " one.txt <== selected", + " one.two.txt", ] ); @@ -3862,9 +3862,9 @@ mod tests { &[ // "v root1", - " one.two copy.txt <== selected", - " one.two.txt", " one.txt", + " one copy.txt <== selected", + " one.two.txt", ] ); @@ -3878,10 +3878,10 @@ mod tests { &[ // "v root1", - " one.two copy 1.txt <== selected", - " one.two copy.txt", - " one.two.txt", " one.txt", + " one copy.txt", + " one copy 1.txt <== selected", + " one.two.txt", ] ); } @@ -4074,8 +4074,8 @@ mod tests { " > b", " four.txt", " one.txt", - " three copy.txt <== selected", " three.txt", + " three copy.txt <== selected", " two.txt", ] ); @@ -4105,8 +4105,8 @@ mod tests { " > b", " four.txt", " one.txt", - " three copy.txt", " three.txt", + " three copy.txt", " two.txt", ] ); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 3143cb49e3..cd5beedf47 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -9,9 +9,8 @@ use std::{ use globset::{Glob, GlobSet, GlobSetBuilder}; use regex::Regex; use serde::{Deserialize, Serialize}; -use unicase::UniCase; -use crate::{maybe, NumericPrefixWithSuffix}; +use crate::NumericPrefixWithSuffix; /// Returns the path to the user's home directory. pub fn home_dir() -> &'static PathBuf { @@ -282,34 +281,29 @@ pub fn compare_paths( let a_is_file = components_a.peek().is_none() && a_is_file; let b_is_file = components_b.peek().is_none() && b_is_file; let ordering = a_is_file.cmp(&b_is_file).then_with(|| { - let maybe_numeric_ordering = maybe!({ - let path_a = Path::new(component_a.as_os_str()); - let num_and_remainder_a = if a_is_file { + let path_a = Path::new(component_a.as_os_str()); + let num_and_remainder_a = NumericPrefixWithSuffix::from_numeric_prefixed_str( + if a_is_file { path_a.file_stem() } else { path_a.file_name() } .and_then(|s| s.to_str()) - .and_then(NumericPrefixWithSuffix::from_numeric_prefixed_str)?; + .unwrap_or_default(), + ); - let path_b = Path::new(component_b.as_os_str()); - let num_and_remainder_b = if b_is_file { + let path_b = Path::new(component_b.as_os_str()); + let num_and_remainder_b = NumericPrefixWithSuffix::from_numeric_prefixed_str( + if b_is_file { path_b.file_stem() } else { path_b.file_name() } .and_then(|s| s.to_str()) - .and_then(NumericPrefixWithSuffix::from_numeric_prefixed_str)?; + .unwrap_or_default(), + ); - num_and_remainder_a.partial_cmp(&num_and_remainder_b) - }); - - maybe_numeric_ordering.unwrap_or_else(|| { - let name_a = UniCase::new(component_a.as_os_str().to_string_lossy()); - let name_b = UniCase::new(component_b.as_os_str().to_string_lossy()); - - name_a.cmp(&name_b) - }) + num_and_remainder_a.cmp(&num_and_remainder_b) }); if !ordering.is_eq() { return ordering; @@ -350,6 +344,18 @@ mod tests { (Path::new("test_dirs/1.46/bar_2"), true), ] ); + let mut paths = vec![ + (Path::new("root1/one.txt"), true), + (Path::new("root1/one.two.txt"), true), + ]; + paths.sort_by(|&a, &b| compare_paths(a, b)); + assert_eq!( + paths, + vec![ + (Path::new("root1/one.txt"), true), + (Path::new("root1/one.two.txt"), true), + ] + ); } #[test] diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 25ef363035..40a5cf6212 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -644,27 +644,27 @@ impl RangeExt for RangeInclusive { /// This is useful for turning regular alphanumerically sorted sequences as `1-abc, 10, 11-def, .., 2, 21-abc` /// into `1-abc, 2, 10, 11-def, .., 21-abc` #[derive(Debug, PartialEq, Eq)] -pub struct NumericPrefixWithSuffix<'a>(i32, &'a str); +pub struct NumericPrefixWithSuffix<'a>(Option, &'a str); impl<'a> NumericPrefixWithSuffix<'a> { - pub fn from_numeric_prefixed_str(str: &'a str) -> Option { + pub fn from_numeric_prefixed_str(str: &'a str) -> Self { let i = str.chars().take_while(|c| c.is_ascii_digit()).count(); let (prefix, remainder) = str.split_at(i); - match prefix.parse::() { - Ok(prefix) => Some(NumericPrefixWithSuffix(prefix, remainder)), - Err(_) => None, - } + let prefix = prefix.parse().ok(); + Self(prefix, remainder) } } - impl Ord for NumericPrefixWithSuffix<'_> { fn cmp(&self, other: &Self) -> Ordering { - let NumericPrefixWithSuffix(num_a, remainder_a) = self; - let NumericPrefixWithSuffix(num_b, remainder_b) = other; - num_a - .cmp(num_b) - .then_with(|| UniCase::new(remainder_a).cmp(&UniCase::new(remainder_b))) + match (self.0, other.0) { + (None, None) => UniCase::new(self.1).cmp(&UniCase::new(other.1)), + (None, Some(_)) => Ordering::Greater, + (Some(_), None) => Ordering::Less, + (Some(a), Some(b)) => a + .cmp(&b) + .then_with(|| UniCase::new(self.1).cmp(&UniCase::new(other.1))), + } } } @@ -737,66 +737,62 @@ mod tests { let target = "1a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, "a")) + NumericPrefixWithSuffix(Some(1), "a") ); let target = "12ab"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, "ab")) + NumericPrefixWithSuffix(Some(12), "ab") ); let target = "12_ab"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, "_ab")) + NumericPrefixWithSuffix(Some(12), "_ab") ); let target = "1_2ab"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, "_2ab")) + NumericPrefixWithSuffix(Some(1), "_2ab") ); let target = "1.2"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, ".2")) + NumericPrefixWithSuffix(Some(1), ".2") ); let target = "1.2_a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, ".2_a")) + NumericPrefixWithSuffix(Some(1), ".2_a") ); let target = "12.2_a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, ".2_a")) + NumericPrefixWithSuffix(Some(12), ".2_a") ); let target = "12a.2_a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, "a.2_a")) + NumericPrefixWithSuffix(Some(12), "a.2_a") ); } #[test] fn test_numeric_prefix_with_suffix() { let mut sorted = vec!["1-abc", "10", "11def", "2", "21-abc"]; - sorted.sort_by_key(|s| { - NumericPrefixWithSuffix::from_numeric_prefixed_str(s).unwrap_or_else(|| { - panic!("Cannot convert string `{s}` into NumericPrefixWithSuffix") - }) - }); + sorted.sort_by_key(|s| NumericPrefixWithSuffix::from_numeric_prefixed_str(s)); assert_eq!(sorted, ["1-abc", "2", "10", "11def", "21-abc"]); for numeric_prefix_less in ["numeric_prefix_less", "aaa", "~™£"] { assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(numeric_prefix_less), - None, + NumericPrefixWithSuffix(None, numeric_prefix_less), "String without numeric prefix `{numeric_prefix_less}` should not be converted into NumericPrefixWithSuffix" ) } From 75256bdfe1b054688d79a0b6fb4900888ea16279 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 14:41:02 +0200 Subject: [PATCH 018/270] lsp: Add support for workspace/workspaceFolders request (#17639) Related to: #17574 Release Notes: - N/A --- crates/project/src/lsp_store.rs | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 5b4dbd1dea..1d9ca98c06 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -40,7 +40,7 @@ use lsp::{ CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, LanguageServer, LanguageServerBinary, LanguageServerId, LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, - ServerStatus, SymbolKind, TextEdit, WorkDoneProgressCancelParams, + ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, }; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -4774,6 +4774,30 @@ impl LspStore { }) .detach(); + let id = language_server.server_id(); + language_server + .on_request::({ + let this = this.clone(); + move |_, mut cx| { + let this = this.clone(); + async move { + let Some(server) = + this.update(&mut cx, |this, _| this.language_server_for_id(id))? + else { + return Ok(None); + }; + let root = server.root_path(); + let Ok(uri) = Url::from_file_path(&root) else { + return Ok(None); + }; + Ok(Some(vec![WorkspaceFolder { + uri, + name: Default::default(), + }])) + } + } + }) + .detach(); // Even though we don't have handling for these requests, respond to them to // avoid stalling any language server like `gopls` which waits for a response // to these requests when initializing. From 929eff815cd7a03811536375b71de110ae53db60 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 14:50:23 +0200 Subject: [PATCH 019/270] project panel: Get rid of unwrap in autofolding code (#17641) @WeetHet spotted a crash in recently-introduced project panel autofolding that relates to unwrapping. Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 32ccd47a89..56d524cdc7 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -115,7 +115,6 @@ struct EntryDetails { is_cut: bool, git_status: Option, is_private: bool, - is_auto_folded: bool, worktree_id: WorktreeId, canonical_path: Option>, } @@ -2099,7 +2098,6 @@ impl ProjectPanel { .map_or(false, |e| e.is_cut() && e.items().contains(&selection)), git_status: status, is_private: entry.is_private, - is_auto_folded: difference > 1, worktree_id: *worktree_id, canonical_path: entry.canonical_path.clone(), }; @@ -2212,7 +2210,6 @@ impl ProjectPanel { active_selection: selection, marked_selections: selections, }; - let is_auto_folded = details.is_auto_folded; div() .id(entry_id.to_proto() as usize) .on_drag_move::(cx.listener( @@ -2314,8 +2311,9 @@ impl ProjectPanel { h_flex().h_6().w_full().child(editor.clone()) } else { h_flex().h_6().map(|this| { - if is_auto_folded && is_active { - let folded_ancestors = self.ancestors.get(&entry_id).unwrap(); + if let Some(folded_ancestors) = + is_active.then(|| self.ancestors.get(&entry_id)).flatten() + { let Some(part_to_highlight) = Path::new(&file_name) .ancestors() .nth(folded_ancestors.current_ancestor_depth) From bf64c0899f076b8a62071ffae22fb9b35af667c0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 10 Sep 2024 10:22:12 -0400 Subject: [PATCH 020/270] go: Fix regression by restoring regex to match tests (#17645) This fixes a regression that snuck in with #17108. When running a single test with `go test` the regex wouldn't be used anymore. This restores the old behavior. Release Notes: - Fixed a regression when running Go tests. A recent change dropped the regex used to match single test names when using `go test` in tasks to run tests. That could lead to more or the wrong tests being run. This restores the old behavior. --- crates/languages/src/go.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index c22a4e3eda..a528f4f70c 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -518,7 +518,13 @@ impl ContextProvider for GoContextProvider { GO_PACKAGE_TASK_VARIABLE.template_value(), VariableName::Symbol.template_value(), ), - command: format!("go test -run {}", VariableName::Symbol.template_value(),), + command: "go".into(), + args: vec![ + "test".into(), + GO_PACKAGE_TASK_VARIABLE.template_value(), + "-run".into(), + format!("^{}\\$", VariableName::Symbol.template_value(),), + ], tags: vec!["go-test".to_owned()], cwd: package_cwd.clone(), ..TaskTemplate::default() From 93b3520c11cd645d595f6158bc0cc178238105c5 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:03:44 -0400 Subject: [PATCH 021/270] assistant: Prevent possible execution of generated terminal commands (#17647) Closes #17424 Release Notes: - Fixed an issue where commands generated by the terminal command could sometimes be executed without confirmation --- crates/assistant/src/terminal_inline_assistant.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 479925b060..bb3f9d36bf 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -988,7 +988,7 @@ impl TerminalTransaction { pub fn push(&mut self, hunk: String, cx: &mut AppContext) { // Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal - let input = hunk.replace(CARRIAGE_RETURN, " "); + let input = Self::sanitize_input(hunk); self.terminal .update(cx, |terminal, _| terminal.input(input)); } @@ -1003,6 +1003,10 @@ impl TerminalTransaction { terminal.input(CARRIAGE_RETURN.to_string()) }); } + + fn sanitize_input(input: String) -> String { + input.replace(['\r', '\n'], "") + } } pub struct Codegen { From a078cb104c01d59442bc52f192e14807723f1278 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 11:16:27 -0400 Subject: [PATCH 022/270] Disable definition lists in Markdown (#17648) This PR disables definition list support in `pulldown_cmark`, as it is has been causing a number of issues. I opened an issue upstream with the panic we were seeing: https://github.com/pulldown-cmark/pulldown-cmark/issues/957. Release Notes: - N/A --- crates/language/src/markdown.rs | 2 ++ crates/markdown/src/parser.rs | 5 ++++- crates/markdown_preview/src/markdown_parser.rs | 4 +++- crates/rich_text/src/rich_text.rs | 4 +++- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/crates/language/src/markdown.rs b/crates/language/src/markdown.rs index 98b9ba53b1..b9393a16ab 100644 --- a/crates/language/src/markdown.rs +++ b/crates/language/src/markdown.rs @@ -166,6 +166,7 @@ pub async fn parse_markdown_block( let mut list_stack = Vec::new(); let mut options = pulldown_cmark::Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); options.remove(pulldown_cmark::Options::ENABLE_YAML_STYLE_METADATA_BLOCKS); for event in Parser::new_ext(markdown, options) { @@ -384,6 +385,7 @@ public: void format(const int &, const std::tm &, int &dest) "#; let mut options = pulldown_cmark::Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); options.remove(pulldown_cmark::Options::ENABLE_YAML_STYLE_METADATA_BLOCKS); let parser = pulldown_cmark::Parser::new_ext(input, options); diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 3dd11be983..7d349e29ef 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -5,10 +5,13 @@ use pulldown_cmark::{Alignment, HeadingLevel, LinkType, MetadataBlockKind, Optio use std::ops::Range; pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { + let mut options = Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); + let mut events = Vec::new(); let mut within_link = false; let mut within_metadata = false; - for (pulldown_event, mut range) in Parser::new_ext(text, Options::all()).into_offset_iter() { + for (pulldown_event, mut range) in Parser::new_ext(text, options).into_offset_iter() { if within_metadata { if let pulldown_cmark::Event::End(pulldown_cmark::TagEnd::MetadataBlock { .. }) = pulldown_event diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 4a607f4d72..7e503fb609 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -11,7 +11,9 @@ pub async fn parse_markdown( file_location_directory: Option, language_registry: Option>, ) -> ParsedMarkdown { - let options = Options::all(); + let mut options = Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); + let parser = Parser::new_ext(markdown_input, options); let parser = MarkdownParser::new( parser.into_offset_iter().collect(), diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs index 2c4b2ca8ee..80b7786c24 100644 --- a/crates/rich_text/src/rich_text.rs +++ b/crates/rich_text/src/rich_text.rs @@ -195,7 +195,9 @@ pub fn render_markdown_mut( let mut current_language = None; let mut list_stack = Vec::new(); - let options = Options::all(); + let mut options = Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); + for (event, source_range) in Parser::new_ext(block, options).into_offset_iter() { let prev_len = text.len(); match event { From a7ac37156c98bf3a48c9506b8c65284089b76b72 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:23:50 -0400 Subject: [PATCH 023/270] assistant: Fix configuration page showing incorrect Anthropic API key label (#17650) Release Notes: - N/A --- crates/language_model/src/provider/anthropic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 37ee2faf40..eac4ad3021 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -695,7 +695,7 @@ impl Render for ConfigurationView { ) .child( Label::new( - "You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed.", + format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."), ) .size(LabelSize::Small), ) From d5498c52f8205d8e02e0a489ae3707960476a425 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:45:21 -0400 Subject: [PATCH 024/270] assistant: Fix terminal inline assistant not showing retry on error (#17651) Release Notes: - Fixed an issue where a failed inline assistant prompt could not be restarted --- crates/assistant/src/terminal_inline_assistant.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index bb3f9d36bf..61a8813f6c 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -465,7 +465,8 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let buttons = match &self.codegen.read(cx).status { + let status = &self.codegen.read(cx).status; + let buttons = match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -516,7 +517,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested))); - if self.edited_since_done { + let has_error = matches!(status, CodegenStatus::Error(_)); + if has_error || self.edited_since_done { vec![ cancel, IconButton::new("restart", IconName::RotateCw) From 5f61e3140f14f0411f4a39dbeb59af57b6810555 Mon Sep 17 00:00:00 2001 From: Eric Andres Date: Tue, 10 Sep 2024 09:49:04 -0600 Subject: [PATCH 025/270] Fix vim surround behavior around text objects (#17603) Performing `ysa")` on `"Hello World"` should produce `("Hello World")`. Instead it places the parens inside the quotes (i.e. `"(Hello World)"`). This PR fixes the behavior by preserving the `around` flag from the operator sequence. Closes #12976 and partially fixes #13841 Release Notes: - Fixed the behavior of surrounding a text object in vim. --- crates/vim/src/normal.rs | 2 +- crates/vim/src/surrounds.rs | 40 ++++++++++++++++++++++++++++++++++--- 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index ae560acc29..8198c0da53 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -248,7 +248,7 @@ impl Vim { } Some(Operator::AddSurrounds { target: None }) => { waiting_operator = Some(Operator::AddSurrounds { - target: Some(SurroundsType::Object(object)), + target: Some(SurroundsType::Object(object, around)), }); } Some(Operator::ToggleComments) => self.toggle_comments_object(object, around, cx), diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index 137801c3ee..81025103fb 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -13,7 +13,7 @@ use ui::ViewContext; #[derive(Clone, Debug, PartialEq, Eq)] pub enum SurroundsType { Motion(Motion), - Object(Object), + Object(Object, bool), Selection, } @@ -59,8 +59,8 @@ impl Vim { for selection in &display_selections { let range = match &target { - SurroundsType::Object(object) => { - object.range(&display_map, selection.clone(), false) + SurroundsType::Object(object, around) => { + object.range(&display_map, selection.clone(), *around) } SurroundsType::Motion(motion) => { motion @@ -697,6 +697,40 @@ mod test { the lazy dog."}, Mode::Normal, ); + + // test add surrounds around object + cx.set_state( + indoc! {" + The [quˇick] brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); + cx.simulate_keystrokes("y s a ] )"); + cx.assert_state( + indoc! {" + The ˇ([quick]) brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); + + // test add surrounds inside object + cx.set_state( + indoc! {" + The [quˇick] brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); + cx.simulate_keystrokes("y s i ] )"); + cx.assert_state( + indoc! {" + The [ˇ(quick)] brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); } #[gpui::test] From 0b0cd9005e3f40865cd37ce32a4938570755ba16 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:58:16 -0400 Subject: [PATCH 026/270] assistant: Fix file slash command not allowing to select multiple files when pressing tab (#17652) Release Notes: - Allow to add multiple files in a single `/file` command when pressing tab --- crates/assistant/src/slash_command/file_command.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index c253e5b91c..e5d8f1b2d6 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -164,11 +164,7 @@ impl SlashCommand for FileSlashCommand { Some(ArgumentCompletion { label, new_text: text, - after_completion: if path_match.is_dir { - AfterCompletion::Compose - } else { - AfterCompletion::Run - }, + after_completion: AfterCompletion::Compose, replace_previous_arguments: false, }) }) From 85f4c96feff44d2070b2b520de9f4620ecc84b02 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 12:40:54 -0400 Subject: [PATCH 027/270] Ubuntu 22 (Linux arm runner) fixes (#17643) Our GitHub Actions Linux ARM hosted runner was running Ubuntu 20 was EOL'd. This gets builds working on the Ubuntu 22 Linux ARM runner which have spun to replace the EOL'd one. It pushes forward our Glibc requirement for Linux ARM users (was >= 2.29, now >= 2.35; sorry!) but also uses a newer version of clang/llvm (was 10, now 15; yay!). --- .github/workflows/ci.yml | 6 +++--- docs/src/linux.md | 16 +++++++++++----- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37e80e5a8d..02bec28714 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -339,7 +339,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - bundle-linux-aarch64: + bundle-linux-aarch64: # this runs on ubuntu22.04 timeout-minutes: 60 name: Create arm64 Linux bundle runs-on: @@ -360,8 +360,8 @@ jobs: - name: Set up Clang run: | sudo apt-get update - sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev - echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH + sudo apt-get install -y llvm-15 clang-15 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev + echo "/usr/lib/llvm-15/bin" >> $GITHUB_PATH - uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1 with: diff --git a/docs/src/linux.md b/docs/src/linux.md index 2b9a66d51e..812a3707d0 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -12,15 +12,21 @@ We also offer a preview build of Zed which receives updates about a week ahead o curl -f https://zed.dev/install.sh | ZED_CHANNEL=preview sh ``` -The Zed installed by the script does not work on systems that: +The Zed installed by the script works best on systems that: -- have no Vulkan compatible GPU available (for example Linux on an M-series macBook) -- have no system-wide glibc (for example on NixOS or Alpine by default) -- have a glibc older than version 2.29 (for example Amazon Linux 2 or Ubuntu 18 and earlier) -- use an architecture other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) +- have a Vulkan compatible GPU available (for example Linux on an M-series macBook) +- have a system-wide glibc (NixOS and Alpine do not by default) + - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer; Amazon Linux >2023) + - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). +You will need to build from source for: + +- architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) +- Amazon Linux 2 on x86_64 +- Rocky Linux 9.3 + ## Other ways to install Zed on Linux Zed is open source, and [you can install from source](./development/linux.md). From fb9d01b0d5c23a1f057bac06c86d5dbaa9a7c39d Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 13:41:06 -0400 Subject: [PATCH 028/270] assistant: Add display_name for OpenAI and Gemini (#17508) --- crates/assistant/src/assistant_settings.rs | 2 ++ crates/google_ai/src/google_ai.rs | 11 +++++++++-- crates/language_model/src/provider/cloud.rs | 2 ++ crates/language_model/src/provider/google.rs | 2 ++ crates/language_model/src/provider/open_ai.rs | 8 +++----- crates/language_model/src/settings.rs | 2 ++ crates/open_ai/src/open_ai.rs | 6 +++++- docs/src/assistant/configuration.md | 17 +++++++++-------- 8 files changed, 34 insertions(+), 16 deletions(-) diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index d57c1f19b6..3e326886d5 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -160,10 +160,12 @@ impl AssistantSettingsContent { .filter_map(|model| match model { OpenAiModel::Custom { name, + display_name, max_tokens, max_output_tokens, } => Some(open_ai::AvailableModel { name, + display_name, max_tokens, max_output_tokens, }), diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index f0803b4029..f1dcedf5b3 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -304,7 +304,12 @@ pub enum Model { #[serde(rename = "gemini-1.5-flash")] Gemini15Flash, #[serde(rename = "custom")] - Custom { name: String, max_tokens: usize }, + Custom { + name: String, + /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + display_name: Option, + max_tokens: usize, + }, } impl Model { @@ -320,7 +325,9 @@ impl Model { match self { Model::Gemini15Pro => "Gemini 1.5 Pro", Model::Gemini15Flash => "Gemini 1.5 Flash", - Model::Custom { name, .. } => name, + Self::Custom { + name, display_name, .. + } => display_name.as_ref().unwrap_or(name), } } diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 3db155393d..0de7fb3feb 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -254,11 +254,13 @@ impl LanguageModelProvider for CloudLanguageModelProvider { }), AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, }), AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, }), }; diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs index 1b24e8eda9..fc4a7a7a34 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_model/src/provider/google.rs @@ -37,6 +37,7 @@ pub struct GoogleSettings { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AvailableModel { name: String, + display_name: Option, max_tokens: usize, } @@ -170,6 +171,7 @@ impl LanguageModelProvider for GoogleLanguageModelProvider { model.name.clone(), google_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, }, ); diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 6b1790c1a1..15d84f6cca 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -40,6 +40,7 @@ pub struct OpenAiSettings { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AvailableModel { pub name: String, + pub display_name: Option, pub max_tokens: usize, pub max_output_tokens: Option, } @@ -171,6 +172,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { model.name.clone(), open_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, }, @@ -368,11 +370,7 @@ pub fn count_open_ai_tokens( }) .collect::>(); - if let open_ai::Model::Custom { .. } = model { - tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) - } else { - tiktoken_rs::num_tokens_from_messages(model.id(), &messages) - } + tiktoken_rs::num_tokens_from_messages(model.id(), &messages) }) .boxed() } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 8d3838d236..0059ed56c4 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -175,12 +175,14 @@ impl OpenAiSettingsContent { .filter_map(|model| match model { open_ai::Model::Custom { name, + display_name, max_tokens, max_output_tokens, } => Some(provider::open_ai::AvailableModel { name, max_tokens, max_output_tokens, + display_name, }), _ => None, }) diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 6be5327c04..5b621d6bb8 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -68,6 +68,8 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, + /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + display_name: Option, max_tokens: usize, max_output_tokens: Option, }, @@ -103,7 +105,9 @@ impl Model { Self::FourTurbo => "gpt-4-turbo", Self::FourOmni => "gpt-4o", Self::FourOmniMini => "gpt-4o-mini", - Self::Custom { name, .. } => name, + Self::Custom { + name, display_name, .. + } => display_name.as_ref().unwrap_or(name), } } diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index eaf5ed13b4..0fd242c619 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -77,7 +77,7 @@ You can use Gemini 1.5 Pro/Flash with the Zed assistant by choosing it via the m 1. Go the Google AI Studio site and [create an API key](https://aistudio.google.com/app/apikey). 2. Open the configuration view (`assistant: show configuration`) and navigate to the Google AI section -3. Enter your Google AI API key +3. Enter your Google AI API key and press enter. The Google AI API key will be saved in your keychain. @@ -85,7 +85,7 @@ Zed will also use the `GOOGLE_AI_API_KEY` environment variable if it's defined. #### Google AI custom models {#google-ai-custom-models} -You can add custom models to the Google AI provider by adding the following to your Zed `settings.json`: +By default Zed will use `stable` versions of models, but you can use specific versions of models, including [experimental models](https://ai.google.dev/gemini-api/docs/models/experimental-models) with the Google AI provider by adding the following to your Zed `settings.json`: ```json { @@ -93,8 +93,9 @@ You can add custom models to the Google AI provider by adding the following to y "google": { "available_models": [ { - "name": "custom-model", - "max_tokens": 128000 + "name": "gemini-1.5-flash-latest", + "display_name": "Gemini 1.5 Flash (Latest)", + "max_tokens": 1000000 } ] } @@ -164,16 +165,16 @@ Zed will also use the `OPENAI_API_KEY` environment variable if it's defined. #### OpenAI Custom Models {#openai-custom-models} -You can add custom models to the OpenAI provider, by adding the following to your Zed `settings.json`: +The Zed Assistant comes pre-configured to use the latest version for common models (GPT-3.5 Turbo, GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o mini). If you wish to use alternate models, perhaps a preview release or a dated model release, you can do so by adding the following to your Zed `settings.json`: ```json { "language_models": { "openai": { - "version": "1", "available_models": [ { - "name": "custom-model", + "provider": "openai", + "name": "gpt-4o-2024-08-06", "max_tokens": 128000 } ] @@ -182,7 +183,7 @@ You can add custom models to the OpenAI provider, by adding the following to you } ``` -Custom models will be listed in the model dropdown in the assistant panel. +You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). Custom models will be listed in the model dropdown in the assistant panel. ### Advanced configuration {#advanced-configuration} From 06142f975b13f78455c31fae54bced9b82f308a3 Mon Sep 17 00:00:00 2001 From: Niklas Haas Date: Tue, 10 Sep 2024 20:26:48 +0200 Subject: [PATCH 029/270] Use the configured UI font size for the inline assistant (#17542) --- crates/assistant/src/inline_assistant.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index bfd85d2525..7bd74ccabf 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1921,7 +1921,7 @@ impl PromptEditor { font_family: settings.ui_font.family.clone(), font_features: settings.ui_font.features.clone(), font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), + font_size: settings.ui_font_size.into(), font_weight: settings.ui_font.weight, line_height: relative(1.3), ..Default::default() From ae3880e71a86ef1ddc6218d4a87d683a95672c68 Mon Sep 17 00:00:00 2001 From: KorigamiK <72932688+KorigamiK@users.noreply.github.com> Date: Wed, 11 Sep 2024 00:06:36 +0530 Subject: [PATCH 030/270] Add ability to open files with system default application (#17231) --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 2 +- assets/keymaps/vim.json | 1 + crates/gpui/src/app.rs | 5 +++++ crates/gpui/src/platform.rs | 1 + crates/gpui/src/platform/linux/platform.rs | 13 +++++++++++++ crates/gpui/src/platform/mac/platform.rs | 14 ++++++++++++++ crates/gpui/src/platform/test/platform.rs | 4 ++++ crates/gpui/src/platform/windows/platform.rs | 13 +++++++++++++ crates/project_panel/src/project_panel.rs | 10 ++++++++++ 10 files changed, 63 insertions(+), 1 deletion(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index bd2ade4246..3c627d7803 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -553,6 +553,7 @@ "ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }], "ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-ctrl-r": "project_panel::RevealInFileManager", + "ctrl-shift-enter": "project_panel::OpenWithSystem", "alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index dec5cbd9f3..ed6ece0556 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -563,8 +563,8 @@ "cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }], "cmd-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-cmd-r": "project_panel::RevealInFileManager", + "ctrl-shift-enter": "project_panel::OpenWithSystem", "cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }], - "alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index f863e8488a..54905b2267 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -493,6 +493,7 @@ "v": "project_panel::OpenPermanent", "p": "project_panel::Open", "x": "project_panel::RevealInFileManager", + "s": "project_panel::OpenWithSystem", "shift-g": "menu::SelectLast", "g g": "menu::SelectFirst", "-": "project_panel::SelectParent", diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 2157f97634..564b893489 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -657,6 +657,11 @@ impl AppContext { self.platform.reveal_path(path) } + /// Opens the specified path with the system's default application. + pub fn open_with_system(&self, path: &Path) { + self.platform.open_with_system(path) + } + /// Returns whether the user has configured scrollbars to auto-hide at the platform level. pub fn should_auto_hide_scrollbars(&self) -> bool { self.platform.should_auto_hide_scrollbars() diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index cb54d9d47a..680c813078 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -149,6 +149,7 @@ pub(crate) trait Platform: 'static { ) -> oneshot::Receiver>>>; fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver>>; fn reveal_path(&self, path: &Path); + fn open_with_system(&self, path: &Path); fn on_quit(&self, callback: Box); fn on_reopen(&self, callback: Box); diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index 0aa17e534a..a0bd6b1d33 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -351,6 +351,19 @@ impl Platform for P { self.reveal_path(path.to_owned()); } + fn open_with_system(&self, path: &Path) { + let executor = self.background_executor().clone(); + let path = path.to_owned(); + executor + .spawn(async move { + let _ = std::process::Command::new("xdg-open") + .arg(path) + .spawn() + .expect("Failed to open file with xdg-open"); + }) + .detach(); + } + fn on_quit(&self, callback: Box) { self.with_common(|common| { common.callbacks.quit = Some(callback); diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index d03d8f0571..5873d8fe39 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -718,6 +718,20 @@ impl Platform for MacPlatform { } } + fn open_with_system(&self, path: &Path) { + let path = path.to_path_buf(); + self.0 + .lock() + .background_executor + .spawn(async move { + std::process::Command::new("open") + .arg(path) + .spawn() + .expect("Failed to open file"); + }) + .detach(); + } + fn on_quit(&self, callback: Box) { self.0.lock().quit = Some(callback); } diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index 58ca694d89..3258ae9af5 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -318,6 +318,10 @@ impl Platform for TestPlatform { fn register_url_scheme(&self, _: &str) -> Task> { unimplemented!() } + + fn open_with_system(&self, _path: &Path) { + unimplemented!() + } } #[cfg(target_os = "windows")] diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 2dcaf72ef2..f8b3924e62 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -400,6 +400,19 @@ impl Platform for WindowsPlatform { .detach(); } + fn open_with_system(&self, path: &Path) { + let executor = self.background_executor().clone(); + let path = path.to_owned(); + executor + .spawn(async move { + let _ = std::process::Command::new("cmd") + .args(&["/c", "start", "", path.to_str().expect("path to string")]) + .spawn() + .expect("Failed to open file"); + }) + .detach(); + } + fn on_quit(&self, callback: Box) { self.state.borrow_mut().callbacks.quit = Some(callback); } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 56d524cdc7..c77a2170dd 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -146,6 +146,7 @@ actions!( CopyRelativePath, Duplicate, RevealInFileManager, + OpenWithSystem, Cut, Paste, Rename, @@ -500,6 +501,7 @@ impl ProjectPanel { .when(cfg!(not(target_os = "macos")), |menu| { menu.action("Reveal in File Manager", Box::new(RevealInFileManager)) }) + .action("Open in Default App", Box::new(OpenWithSystem)) .action("Open in Terminal", Box::new(OpenInTerminal)) .when(is_dir, |menu| { menu.separator() @@ -1497,6 +1499,13 @@ impl ProjectPanel { } } + fn open_system(&mut self, _: &OpenWithSystem, cx: &mut ViewContext) { + if let Some((worktree, entry)) = self.selected_entry(cx) { + let abs_path = worktree.abs_path().join(&entry.path); + cx.open_with_system(&abs_path); + } + } + fn open_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let abs_path = worktree.abs_path().join(&entry.path); @@ -2711,6 +2720,7 @@ impl Render for ProjectPanel { }) .when(project.is_local_or_ssh(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) + .on_action(cx.listener(Self::open_system)) .on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( From 1b627925d3874b8461212945e8b41a28c5022aad Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Tue, 10 Sep 2024 14:40:51 -0400 Subject: [PATCH 031/270] v0.154.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f34f055ad..f9d3240d68 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14197,7 +14197,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.153.0" +version = "0.154.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index de8bc1f767..ad02d4f388 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.153.0" +version = "0.154.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From a23e381096c623951212608119fc497101e281f1 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 15:25:57 -0400 Subject: [PATCH 032/270] assistant: Pass up tool results in LLM request messages (#17656) This PR makes it so we pass up the tool results in the `tool_results` field in the request message to the LLM. This required reworking how we track non-text content in the context editor. We also removed serialization of images in context history, as we were never deserializing it, and thus it was unneeded. Release Notes: - N/A --------- Co-authored-by: Antonio --- crates/assistant/src/assistant_panel.rs | 39 ++- crates/assistant/src/context.rs | 391 ++++++++++++------------ crates/paths/src/paths.rs | 6 - 3 files changed, 220 insertions(+), 216 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 82888b498a..22843d41cd 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -11,7 +11,7 @@ use crate::{ }, slash_command_picker, terminal_inline_assistant::TerminalInlineAssistant, - Assist, CacheStatus, ConfirmCommand, Context, ContextEvent, ContextId, ContextStore, + Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand, @@ -46,6 +46,7 @@ use indexed_docs::IndexedDocsStore; use language::{ language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, }; +use language_model::LanguageModelToolUse; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, Role, @@ -1995,6 +1996,20 @@ impl ContextEditor { let buffer_row = MultiBufferRow(start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); + self.context.update(cx, |context, cx| { + context.insert_content( + Content::ToolUse { + range: tool_use.source_range.clone(), + tool_use: LanguageModelToolUse { + id: tool_use.id.to_string(), + name: tool_use.name.clone(), + input: tool_use.input.clone(), + }, + }, + cx, + ); + }); + Crease::new( start..end, placeholder, @@ -3538,7 +3553,7 @@ impl ContextEditor { let image_id = image.id(); context.insert_image(image, cx); for image_position in image_positions.iter() { - context.insert_image_anchor(image_id, image_position.text_anchor, cx); + context.insert_image_content(image_id, image_position.text_anchor, cx); } } }); @@ -3553,11 +3568,23 @@ impl ContextEditor { let new_blocks = self .context .read(cx) - .images(cx) - .filter_map(|image| { + .contents(cx) + .filter_map(|content| { + if let Content::Image { + anchor, + render_image, + .. + } = content + { + Some((anchor, render_image)) + } else { + None + } + }) + .filter_map(|(anchor, render_image)| { const MAX_HEIGHT_IN_LINES: u32 = 8; - let anchor = buffer.anchor_in_excerpt(excerpt_id, image.anchor).unwrap(); - let image = image.render_image.clone(); + let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap(); + let image = render_image.clone(); anchor.is_valid(&buffer).then(|| BlockProperties { position: anchor, height: MAX_HEIGHT_IN_LINES, diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 1bf846369b..e43ec203e9 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -17,7 +17,6 @@ use feature_flags::{FeatureFlag, FeatureFlagAppExt}; use fs::{Fs, RemoveOptions}; use futures::{ future::{self, Shared}, - stream::FuturesUnordered, FutureExt, StreamExt, }; use gpui::{ @@ -29,10 +28,11 @@ use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, P use language_model::{ LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent, LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelRequestTool, MessageContent, Role, StopReason, + LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, + StopReason, }; use open_ai::Model as OpenAiModel; -use paths::{context_images_dir, contexts_dir}; +use paths::contexts_dir; use project::Project; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; @@ -377,23 +377,8 @@ impl MessageMetadata { } } -#[derive(Clone, Debug)] -pub struct MessageImage { - image_id: u64, - image: Shared>>, -} - -impl PartialEq for MessageImage { - fn eq(&self, other: &Self) -> bool { - self.image_id == other.image_id - } -} - -impl Eq for MessageImage {} - #[derive(Clone, Debug)] pub struct Message { - pub image_offsets: SmallVec<[(usize, MessageImage); 1]>, pub offset_range: Range, pub index_range: Range, pub anchor_range: Range, @@ -403,60 +388,43 @@ pub struct Message { pub cache: Option, } -impl Message { - fn to_request_message(&self, buffer: &Buffer) -> Option { - let mut content = Vec::new(); - - let mut range_start = self.offset_range.start; - for (image_offset, message_image) in self.image_offsets.iter() { - if *image_offset != range_start { - if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) { - content.push(text); - } - } - - if let Some(image) = message_image.image.clone().now_or_never().flatten() { - content.push(language_model::MessageContent::Image(image)); - } - - range_start = *image_offset; - } - - if range_start != self.offset_range.end { - if let Some(text) = - Self::collect_text_content(buffer, range_start..self.offset_range.end) - { - content.push(text); - } - } - - if content.is_empty() { - return None; - } - - Some(LanguageModelRequestMessage { - role: self.role, - content, - cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor), - }) - } - - fn collect_text_content(buffer: &Buffer, range: Range) -> Option { - let text: String = buffer.text_for_range(range.clone()).collect(); - if text.trim().is_empty() { - None - } else { - Some(MessageContent::Text(text)) - } - } +#[derive(Debug, Clone)] +pub enum Content { + Image { + anchor: language::Anchor, + image_id: u64, + render_image: Arc, + image: Shared>>, + }, + ToolUse { + range: Range, + tool_use: LanguageModelToolUse, + }, + ToolResult { + range: Range, + tool_use_id: Arc, + }, } -#[derive(Clone, Debug)] -pub struct ImageAnchor { - pub anchor: language::Anchor, - pub image_id: u64, - pub render_image: Arc, - pub image: Shared>>, +impl Content { + fn range(&self) -> Range { + match self { + Self::Image { anchor, .. } => *anchor..*anchor, + Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(), + } + } + + fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering { + let self_range = self.range(); + let other_range = other.range(); + if self_range.end.cmp(&other_range.start, buffer).is_lt() { + Ordering::Less + } else if self_range.start.cmp(&other_range.end, buffer).is_gt() { + Ordering::Greater + } else { + Ordering::Equal + } + } } struct PendingCompletion { @@ -501,7 +469,7 @@ pub struct Context { pending_tool_uses_by_id: HashMap, PendingToolUse>, message_anchors: Vec, images: HashMap, Shared>>)>, - image_anchors: Vec, + contents: Vec, messages_metadata: HashMap, summary: Option, pending_summary: Task>, @@ -595,7 +563,7 @@ impl Context { pending_ops: Vec::new(), operations: Vec::new(), message_anchors: Default::default(), - image_anchors: Default::default(), + contents: Default::default(), images: Default::default(), messages_metadata: Default::default(), pending_slash_commands: Vec::new(), @@ -659,11 +627,6 @@ impl Context { id: message.id, start: message.offset_range.start, metadata: self.messages_metadata[&message.id].clone(), - image_offsets: message - .image_offsets - .iter() - .map(|image_offset| (image_offset.0, image_offset.1.image_id)) - .collect(), }) .collect(), summary: self @@ -1957,6 +1920,14 @@ impl Context { output_range }); + this.insert_content( + Content::ToolResult { + range: anchor_range.clone(), + tool_use_id: tool_use_id.clone(), + }, + cx, + ); + cx.emit(ContextEvent::ToolFinished { tool_use_id, output_range: anchor_range, @@ -2038,6 +2009,7 @@ impl Context { let stream_completion = async { let request_start = Instant::now(); let mut events = stream.await?; + let mut stop_reason = StopReason::EndTurn; while let Some(event) = events.next().await { if response_latency.is_none() { @@ -2050,7 +2022,7 @@ impl Context { .message_anchors .iter() .position(|message| message.id == assistant_message_id)?; - let event_to_emit = this.buffer.update(cx, |buffer, cx| { + this.buffer.update(cx, |buffer, cx| { let message_old_end_offset = this.message_anchors[message_ix + 1..] .iter() .find(|message| message.start.is_valid(buffer)) @@ -2059,13 +2031,9 @@ impl Context { }); match event { - LanguageModelCompletionEvent::Stop(reason) => match reason { - StopReason::ToolUse => { - return Some(ContextEvent::UsePendingTools); - } - StopReason::EndTurn => {} - StopReason::MaxTokens => {} - }, + LanguageModelCompletionEvent::Stop(reason) => { + stop_reason = reason; + } LanguageModelCompletionEvent::Text(chunk) => { buffer.edit( [( @@ -2116,14 +2084,9 @@ impl Context { ); } } - - None }); cx.emit(ContextEvent::StreamedCompletion); - if let Some(event) = event_to_emit { - cx.emit(event); - } Some(()) })?; @@ -2136,13 +2099,14 @@ impl Context { this.update_cache_status_for_completion(cx); })?; - anyhow::Ok(()) + anyhow::Ok(stop_reason) }; let result = stream_completion.await; this.update(&mut cx, |this, cx| { let error_message = result + .as_ref() .err() .map(|error| error.to_string().trim().to_string()); @@ -2170,6 +2134,16 @@ impl Context { error_message, ); } + + if let Ok(stop_reason) = result { + match stop_reason { + StopReason::ToolUse => { + cx.emit(ContextEvent::UsePendingTools); + } + StopReason::EndTurn => {} + StopReason::MaxTokens => {} + } + } }) .ok(); } @@ -2186,18 +2160,94 @@ impl Context { pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest { let buffer = self.buffer.read(cx); - let request_messages = self - .messages(cx) - .filter(|message| message.status == MessageStatus::Done) - .filter_map(|message| message.to_request_message(&buffer)) - .collect(); - LanguageModelRequest { - messages: request_messages, + let mut contents = self.contents(cx).peekable(); + + fn collect_text_content(buffer: &Buffer, range: Range) -> Option { + let text: String = buffer.text_for_range(range.clone()).collect(); + if text.trim().is_empty() { + None + } else { + Some(text) + } + } + + let mut completion_request = LanguageModelRequest { + messages: Vec::new(), tools: Vec::new(), stop: Vec::new(), temperature: 1.0, + }; + for message in self.messages(cx) { + if message.status != MessageStatus::Done { + continue; + } + + let mut offset = message.offset_range.start; + let mut request_message = LanguageModelRequestMessage { + role: message.role, + content: Vec::new(), + cache: message + .cache + .as_ref() + .map_or(false, |cache| cache.is_anchor), + }; + + while let Some(content) = contents.peek() { + if content + .range() + .end + .cmp(&message.anchor_range.end, buffer) + .is_lt() + { + let content = contents.next().unwrap(); + let range = content.range().to_offset(buffer); + request_message.content.extend( + collect_text_content(buffer, offset..range.start).map(MessageContent::Text), + ); + + match content { + Content::Image { image, .. } => { + if let Some(image) = image.clone().now_or_never().flatten() { + request_message + .content + .push(language_model::MessageContent::Image(image)); + } + } + Content::ToolUse { tool_use, .. } => { + request_message + .content + .push(language_model::MessageContent::ToolUse(tool_use.clone())); + } + Content::ToolResult { tool_use_id, .. } => { + request_message.content.push( + language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use_id.to_string(), + is_error: false, + content: collect_text_content(buffer, range.clone()) + .unwrap_or_default(), + }, + ), + ); + } + } + + offset = range.end; + } else { + break; + } + } + + request_message.content.extend( + collect_text_content(buffer, offset..message.offset_range.end) + .map(MessageContent::Text), + ); + + completion_request.messages.push(request_message); } + + completion_request } pub fn cancel_last_assist(&mut self, cx: &mut ModelContext) -> bool { @@ -2335,42 +2385,50 @@ impl Context { Some(()) } - pub fn insert_image_anchor( + pub fn insert_image_content( &mut self, image_id: u64, anchor: language::Anchor, cx: &mut ModelContext, - ) -> bool { - cx.emit(ContextEvent::MessagesEdited); - - let buffer = self.buffer.read(cx); - let insertion_ix = match self - .image_anchors - .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer)) - { - Ok(ix) => ix, - Err(ix) => ix, - }; - + ) { if let Some((render_image, image)) = self.images.get(&image_id) { - self.image_anchors.insert( - insertion_ix, - ImageAnchor { + self.insert_content( + Content::Image { anchor, image_id, image: image.clone(), render_image: render_image.clone(), }, + cx, ); - - true - } else { - false } } - pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator { - self.image_anchors.iter().cloned() + pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext) { + let buffer = self.buffer.read(cx); + let insertion_ix = match self + .contents + .binary_search_by(|probe| probe.cmp(&content, buffer)) + { + Ok(ix) => { + self.contents.remove(ix); + ix + } + Err(ix) => ix, + }; + self.contents.insert(insertion_ix, content); + cx.emit(ContextEvent::MessagesEdited); + } + + pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { + let buffer = self.buffer.read(cx); + self.contents + .iter() + .filter(|content| { + let range = content.range(); + range.start.is_valid(buffer) && range.end.is_valid(buffer) + }) + .cloned() } pub fn split_message( @@ -2533,22 +2591,14 @@ impl Context { return; } - let messages = self - .messages(cx) - .filter_map(|message| message.to_request_message(self.buffer.read(cx))) - .chain(Some(LanguageModelRequestMessage { - role: Role::User, - content: vec![ - "Summarize the context into a short title without punctuation.".into(), - ], - cache: false, - })); - let request = LanguageModelRequest { - messages: messages.collect(), - tools: Vec::new(), - stop: Vec::new(), - temperature: 1.0, - }; + let mut request = self.to_completion_request(cx); + request.messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![ + "Summarize the context into a short title without punctuation.".into(), + ], + cache: false, + }); self.pending_summary = cx.spawn(|this, mut cx| { async move { @@ -2648,10 +2698,8 @@ impl Context { cx: &'a AppContext, ) -> impl 'a + Iterator { let buffer = self.buffer.read(cx); - let messages = message_anchors.enumerate(); - let images = self.image_anchors.iter(); - Self::messages_from_iters(buffer, &self.messages_metadata, messages, images) + Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate()) } pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { @@ -2662,10 +2710,8 @@ impl Context { buffer: &'a Buffer, metadata: &'a HashMap, messages: impl Iterator + 'a, - images: impl Iterator + 'a, ) -> impl 'a + Iterator { let mut messages = messages.peekable(); - let mut images = images.peekable(); iter::from_fn(move || { if let Some((start_ix, message_anchor)) = messages.next() { @@ -2686,22 +2732,6 @@ impl Context { let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX); let message_end = message_end_anchor.to_offset(buffer); - let mut image_offsets = SmallVec::new(); - while let Some(image_anchor) = images.peek() { - if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() { - image_offsets.push(( - image_anchor.anchor.to_offset(buffer), - MessageImage { - image_id: image_anchor.image_id, - image: image_anchor.image.clone(), - }, - )); - images.next(); - } else { - break; - } - } - return Some(Message { index_range: start_ix..end_ix, offset_range: message_start..message_end, @@ -2710,7 +2740,6 @@ impl Context { role: metadata.role, status: metadata.status.clone(), cache: metadata.cache.clone(), - image_offsets, }); } None @@ -2748,9 +2777,6 @@ impl Context { })?; if let Some(summary) = summary { - this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))? - .await; - let context = this.read_with(&cx, |this, cx| this.serialize(cx))?; let mut discriminant = 1; let mut new_path; @@ -2790,45 +2816,6 @@ impl Context { }); } - pub fn serialize_images(&self, fs: Arc, cx: &AppContext) -> Task<()> { - let mut images_to_save = self - .images - .iter() - .map(|(id, (_, llm_image))| { - let fs = fs.clone(); - let llm_image = llm_image.clone(); - let id = *id; - async move { - if let Some(llm_image) = llm_image.await { - let path: PathBuf = - context_images_dir().join(&format!("{}.png.base64", id)); - if fs - .metadata(path.as_path()) - .await - .log_err() - .flatten() - .is_none() - { - fs.atomic_write(path, llm_image.source.to_string()) - .await - .log_err(); - } - } - } - }) - .collect::>(); - cx.background_executor().spawn(async move { - if fs - .create_dir(context_images_dir().as_ref()) - .await - .log_err() - .is_some() - { - while let Some(_) = images_to_save.next().await {} - } - }) - } - pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext) { let timestamp = self.next_timestamp(); let summary = self.summary.get_or_insert(ContextSummary::default()); @@ -2914,9 +2901,6 @@ pub struct SavedMessage { pub id: MessageId, pub start: usize, pub metadata: MessageMetadata, - #[serde(default)] - // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field. - pub image_offsets: Vec<(usize, u64)>, } #[derive(Serialize, Deserialize)] @@ -3102,7 +3086,6 @@ impl SavedContextV0_3_0 { timestamp, cache: None, }, - image_offsets: Vec::new(), }) }) .collect(), diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 4d6a3b6d92..b80bef5f2d 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -170,12 +170,6 @@ pub fn contexts_dir() -> &'static PathBuf { }) } -/// Returns the path within the contexts directory where images from contexts are stored. -pub fn context_images_dir() -> &'static PathBuf { - static CONTEXT_IMAGES_DIR: OnceLock = OnceLock::new(); - CONTEXT_IMAGES_DIR.get_or_init(|| contexts_dir().join("images")) -} - /// Returns the path to the contexts directory. /// /// This is where the prompts for use with the Assistant are stored. From bd1ff476b987ed2570f90b76e8c028d7e7951543 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 15:38:27 -0400 Subject: [PATCH 033/270] Revert tokenizer for custom OpenAI models (#17660) Fix for custom openai models tokenizer settings. --- crates/language_model/src/provider/open_ai.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 15d84f6cca..a7a962e925 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -370,7 +370,11 @@ pub fn count_open_ai_tokens( }) .collect::>(); - tiktoken_rs::num_tokens_from_messages(model.id(), &messages) + if let open_ai::Model::Custom { .. } = model { + tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) + } else { + tiktoken_rs::num_tokens_from_messages(model.id(), &messages) + } }) .boxed() } From 130f19d8f953ca43bf6c833ac0e0c7362a170850 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 10 Sep 2024 15:50:51 -0400 Subject: [PATCH 034/270] Correctly merge settings for vtsls (#17657) Release Notes: - Fixed vtsls initialization_options in project settings files --- crates/languages/src/vtsls.rs | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index d38ee85f31..3dca82688c 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -7,14 +7,14 @@ use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::project_settings::{BinarySettings, ProjectSettings}; use serde_json::{json, Value}; -use settings::Settings; +use settings::{Settings, SettingsLocation}; use std::{ any::Any, ffi::OsString, path::{Path, PathBuf}, sync::Arc, }; -use util::{maybe, ResultExt}; +use util::{maybe, merge_json_value_into, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -274,17 +274,29 @@ impl LspAdapter for VtslsLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(SERVER_NAME) - .and_then(|s| s.initialization_options.clone()) + ProjectSettings::get( + Some(SettingsLocation { + worktree_id: adapter.worktree_id(), + path: adapter.worktree_root_path(), + }), + cx, + ) + .lsp + .get(SERVER_NAME) + .and_then(|s| s.initialization_options.clone()) })?; if let Some(options) = override_options { return Ok(options); } - self.initialization_options(adapter) + let mut initialization_options = self + .initialization_options(adapter) .await - .map(|o| o.unwrap()) + .map(|o| o.unwrap())?; + + if let Some(override_options) = override_options { + merge_json_value_into(override_options, &mut initialization_options) + } + Ok(initialization_options) } fn language_ids(&self) -> HashMap { From 36eb1c15eaf7bff72bf120e1fccc7a73326ad8e9 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 10 Sep 2024 15:51:01 -0400 Subject: [PATCH 035/270] use ssh lsp store (#17655) Release Notes: - ssh remoting: Added support for booting langauge servers (in limited circumstances) --------- Co-authored-by: Mikayla --- crates/assistant/src/assistant_panel.rs | 15 +- crates/assistant/src/inline_assistant.rs | 4 +- crates/assistant/src/prompts.rs | 6 +- crates/collab/src/tests/integration_tests.rs | 10 +- .../remote_editing_collaboration_tests.rs | 2 +- crates/editor/src/clangd_ext.rs | 2 +- crates/editor/src/editor.rs | 2 +- crates/editor/src/editor_tests.rs | 10 +- crates/editor/src/items.rs | 4 +- crates/editor/src/rust_analyzer_ext.rs | 2 +- .../src/test/editor_lsp_test_context.rs | 2 +- crates/extension/src/extension_lsp_adapter.rs | 1 - crates/extension/src/extension_manifest.rs | 8 +- crates/extension/src/extension_store.rs | 5 +- crates/extension/src/extension_store_test.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 4 +- crates/gpui/src/app.rs | 6 + crates/language/src/buffer_tests.rs | 56 +- crates/language/src/language.rs | 59 +- crates/language/src/language_registry.rs | 211 ++- crates/language/src/language_settings.rs | 12 +- .../src/active_buffer_language.rs | 4 +- .../src/language_selector.rs | 2 +- crates/language_tools/src/lsp_log.rs | 2 +- crates/language_tools/src/syntax_tree_view.rs | 2 +- crates/languages/src/rust.rs | 2 +- crates/languages/src/yaml.rs | 2 +- crates/lsp/src/lsp.rs | 10 + .../src/markdown_preview_view.rs | 2 +- .../project/src/lsp_command/signature_help.rs | 2 +- crates/project/src/lsp_store.rs | 1368 ++++++++++++----- crates/project/src/project.rs | 122 +- crates/project/src/project_settings.rs | 2 +- crates/project/src/project_tests.rs | 14 +- crates/project/src/task_inventory.rs | 4 +- crates/proto/proto/zed.proto | 37 +- crates/proto/src/proto.rs | 7 +- crates/quick_action_bar/src/repl_menu.rs | 2 +- crates/recent_projects/src/ssh_connections.rs | 21 +- crates/remote/src/ssh_session.rs | 10 +- crates/remote_server/src/headless_project.rs | 29 +- .../remote_server/src/remote_editing_tests.rs | 122 +- crates/repl/src/repl_editor.rs | 8 +- crates/worktree/src/worktree.rs | 7 + crates/zed/src/zed.rs | 20 +- 45 files changed, 1553 insertions(+), 671 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 22843d41cd..7eebc97b1d 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -53,7 +53,8 @@ use language_model::{ }; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; -use project::{Project, ProjectLspAdapterDelegate, Worktree}; +use project::lsp_store::ProjectLspAdapterDelegate; +use project::{Project, Worktree}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; @@ -5340,9 +5341,17 @@ fn make_lsp_adapter_delegate( .worktrees(cx) .next() .ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?; + let fs = if project.is_local() { + Some(project.fs().clone()) + } else { + None + }; + let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { - Ok(ProjectLspAdapterDelegate::new(lsp_store, &worktree, cx) - as Arc) + Ok( + ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, cx) + as Arc, + ) }) }) } diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 7bd74ccabf..051db0f247 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2377,7 +2377,7 @@ impl Codegen { // If Markdown or No Language is Known, increase the randomness for more creative output // If Code, decrease temperature to get more deterministic outputs let temperature = if let Some(language) = language_name.clone() { - if language.as_ref() == "Markdown" { + if language == "Markdown".into() { 1.0 } else { 0.5 @@ -2386,7 +2386,7 @@ impl Codegen { 1.0 }; - let language_name = language_name.as_deref(); + let language_name = language_name.as_ref(); let start = buffer.point_to_buffer_offset(edit_range.start); let end = buffer.point_to_buffer_offset(edit_range.end); let (buffer, range) = if let Some((start, end)) = start.zip(end) { diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 068bf7158d..83e894f797 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -4,7 +4,7 @@ use fs::Fs; use futures::StreamExt; use gpui::AssetSource; use handlebars::{Handlebars, RenderError}; -use language::BufferSnapshot; +use language::{BufferSnapshot, LanguageName}; use parking_lot::Mutex; use serde::Serialize; use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration}; @@ -204,11 +204,11 @@ impl PromptBuilder { pub fn generate_content_prompt( &self, user_prompt: String, - language_name: Option<&str>, + language_name: Option<&LanguageName>, buffer: BufferSnapshot, range: Range, ) -> Result { - let content_type = match language_name { + let content_type = match language_name.as_ref().map(|l| l.0.as_ref()) { None | Some("Markdown" | "Plain Text") => "text", Some(_) => "code", }; diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index e012fce8c2..b6d7aca2e0 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2328,11 +2328,11 @@ async fn test_propagate_saves_and_fs_changes( .unwrap(); buffer_b.read_with(cx_b, |buffer, _| { - assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); buffer_c.read_with(cx_c, |buffer, _| { - assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "i-am-b, ")], None, cx)); buffer_c.update(cx_c, |buf, cx| buf.edit([(0..0, "i-am-c, ")], None, cx)); @@ -2432,17 +2432,17 @@ async fn test_propagate_saves_and_fs_changes( buffer_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_c.read_with(cx_c, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); let new_buffer_a = project_a diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 21e7f9dd9e..c4410fd776 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -100,7 +100,7 @@ async fn test_sharing_an_ssh_remote_project( let file = buffer_b.read(cx).file(); assert_eq!( all_language_settings(file, cx) - .language(Some("Rust")) + .language(Some(&("Rust".into()))) .language_servers, ["override-rust-analyzer".into()] ) diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 7fbb8f5f41..2f0f7aaee4 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -12,7 +12,7 @@ use crate::{element::register_action, Editor, SwitchSourceHeader}; static CLANGD_SERVER_NAME: &str = "clangd"; fn is_c_language(language: &Language) -> bool { - return language.name().as_ref() == "C++" || language.name().as_ref() == "C"; + return language.name() == "C++".into() || language.name() == "C".into(); } pub fn switch_source_header( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cb4ae63afc..3466888c94 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12465,7 +12465,7 @@ fn inlay_hint_settings( let language = snapshot.language_at(location); let settings = all_language_settings(file, cx); settings - .language(language.map(|l| l.name()).as_deref()) + .language(language.map(|l| l.name()).as_ref()) .inlay_hints } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index c8c509fd98..0b1e0385de 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -20,8 +20,8 @@ use language::{ }, BracketPairConfig, Capability::ReadWrite, - FakeLspAdapter, IndentGuide, LanguageConfig, LanguageConfigOverride, LanguageMatcher, Override, - ParsedMarkdown, Point, + FakeLspAdapter, IndentGuide, LanguageConfig, LanguageConfigOverride, LanguageMatcher, + LanguageName, Override, ParsedMarkdown, Point, }; use language_settings::{Formatter, FormatterList, IndentGuideSettings}; use multi_buffer::MultiBufferIndentGuide; @@ -9587,12 +9587,12 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test let server_restarts = Arc::new(AtomicUsize::new(0)); let closure_restarts = Arc::clone(&server_restarts); let language_server_name = "test language server"; - let language_name: Arc = "Rust".into(); + let language_name: LanguageName = "Rust".into(); let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( LanguageConfig { - name: Arc::clone(&language_name), + name: language_name.clone(), matcher: LanguageMatcher { path_suffixes: vec!["rs".to_string()], ..Default::default() @@ -9629,7 +9629,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test let _fake_server = fake_servers.next().await.unwrap(); update_test_language_settings(cx, |language_settings| { language_settings.languages.insert( - Arc::clone(&language_name), + language_name.clone(), LanguageSettingsContent { tab_size: NonZeroU32::new(8), ..Default::default() diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 23293469dd..1be2092d7d 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1705,8 +1705,8 @@ mod tests { let buffer = editor.buffer().read(cx).as_singleton().unwrap().read(cx); assert_eq!( - buffer.language().map(|lang| lang.name()).as_deref(), - Some("Rust") + buffer.language().map(|lang| lang.name()), + Some("Rust".into()) ); // Language should be set to Rust assert!(buffer.file().is_none()); // The buffer should not have an associated file }); diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index a152f3c453..db17eaab28 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -13,7 +13,7 @@ use crate::{ static RUST_ANALYZER_NAME: &str = "rust-analyzer"; fn is_rust_language(language: &Language) -> bool { - language.name().as_ref() == "Rust" + language.name() == "Rust".into() } pub fn apply_related_actions(editor: &View, cx: &mut WindowContext) { diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index ec1eccb864..16735760bf 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -58,7 +58,7 @@ impl EditorLspTestContext { let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let mut fake_servers = language_registry.register_fake_lsp_adapter( - language.name().as_ref(), + language.name(), FakeLspAdapter { capabilities, ..Default::default() diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index 41a35cb617..f82b6c9e0e 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -38,7 +38,6 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, _: Arc, delegate: Arc, _: futures::lock::MutexGuard<'a, Option>, diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index 9d8a841686..3dfd7e0d41 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Context, Result}; use collections::{BTreeMap, HashMap}; use fs::Fs; -use language::LanguageServerName; +use language::{LanguageName, LanguageServerName}; use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; use std::{ @@ -106,10 +106,10 @@ pub struct GrammarManifestEntry { pub struct LanguageServerManifestEntry { /// Deprecated in favor of `languages`. #[serde(default)] - language: Option>, + language: Option, /// The list of languages this language server should work with. #[serde(default)] - languages: Vec>, + languages: Vec, #[serde(default)] pub language_ids: HashMap, #[serde(default)] @@ -124,7 +124,7 @@ impl LanguageServerManifestEntry { /// /// We can replace this with just field access for the `languages` field once /// we have removed `language`. - pub fn languages(&self) -> impl IntoIterator> + '_ { + pub fn languages(&self) -> impl IntoIterator + '_ { let language = if self.languages.is_empty() { self.language.clone() } else { diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 2558dca93e..3ebc4f20d3 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -36,7 +36,8 @@ use gpui::{ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use indexed_docs::{IndexedDocsRegistry, ProviderId}; use language::{ - LanguageConfig, LanguageMatcher, LanguageQueries, LanguageRegistry, QUERY_FILENAME_PREFIXES, + LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry, + QUERY_FILENAME_PREFIXES, }; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -148,7 +149,7 @@ impl Global for GlobalExtensionStore {} pub struct ExtensionIndex { pub extensions: BTreeMap, ExtensionIndexEntry>, pub themes: BTreeMap, ExtensionIndexThemeEntry>, - pub languages: BTreeMap, ExtensionIndexLanguageEntry>, + pub languages: BTreeMap, } #[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 70ea7ac909..da530306d1 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -609,7 +609,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { .await .unwrap(); - let mut fake_servers = language_registry.fake_language_servers("Gleam"); + let mut fake_servers = language_registry.fake_language_servers("Gleam".into()); let buffer = project .update(cx, |project, cx| { diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 68550a44cf..337bb8afb0 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -9,6 +9,7 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; +use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; @@ -399,8 +400,9 @@ impl ExtensionImports for WasmState { cx.update(|cx| match category.as_str() { "language" => { + let key = key.map(|k| LanguageName::new(&k)); let settings = - AllLanguageSettings::get(location, cx).language(key.as_deref()); + AllLanguageSettings::get(location, cx).language(key.as_ref()); Ok(serde_json::to_string(&settings::LanguageSettings { tab_size: settings.tab_size, })?) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 564b893489..ac7d5eb47b 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1504,3 +1504,9 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } + +impl Drop for AppContext { + fn drop(&mut self) { + println!("Dropping the App Context"); + } +} diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 8584eee4c7..77a1079d3a 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -72,7 +72,7 @@ fn test_select_language(cx: &mut AppContext) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); registry.add(Arc::new(Language::new( LanguageConfig { - name: "Rust".into(), + name: LanguageName::new("Rust"), matcher: LanguageMatcher { path_suffixes: vec!["rs".to_string()], ..Default::default() @@ -83,7 +83,7 @@ fn test_select_language(cx: &mut AppContext) { ))); registry.add(Arc::new(Language::new( LanguageConfig { - name: "Make".into(), + name: LanguageName::new("Make"), matcher: LanguageMatcher { path_suffixes: vec!["Makefile".to_string(), "mk".to_string()], ..Default::default() @@ -97,15 +97,13 @@ fn test_select_language(cx: &mut AppContext) { assert_eq!( registry .language_for_file(&file("src/lib.rs"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), Some("Rust".into()) ); assert_eq!( registry .language_for_file(&file("src/lib.mk"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), Some("Make".into()) ); @@ -113,8 +111,7 @@ fn test_select_language(cx: &mut AppContext) { assert_eq!( registry .language_for_file(&file("src/Makefile"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), Some("Make".into()) ); @@ -122,22 +119,19 @@ fn test_select_language(cx: &mut AppContext) { assert_eq!( registry .language_for_file(&file("zed/cars"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), None ); assert_eq!( registry .language_for_file(&file("zed/a.cars"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), None ); assert_eq!( registry .language_for_file(&file("zed/sumk"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), None ); } @@ -158,23 +152,22 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) { ..Default::default() }); - cx.read(|cx| languages.language_for_file(&file("the/script"), None, cx)) - .await - .unwrap_err(); - cx.read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx)) - .await - .unwrap_err(); + assert!(cx + .read(|cx| languages.language_for_file(&file("the/script"), None, cx)) + .is_none()); + assert!(cx + .read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx)) + .is_none()); + assert_eq!( cx.read(|cx| languages.language_for_file( &file("the/script"), Some(&"#!/bin/env node".into()), cx )) - .await .unwrap() - .name() - .as_ref(), - "JavaScript" + .name(), + "JavaScript".into() ); } @@ -242,19 +235,16 @@ async fn test_language_for_file_with_custom_file_types(cx: &mut TestAppContext) let language = cx .read(|cx| languages.language_for_file(&file("foo.js"), None, cx)) - .await .unwrap(); - assert_eq!(language.name().as_ref(), "TypeScript"); + assert_eq!(language.name(), "TypeScript".into()); let language = cx .read(|cx| languages.language_for_file(&file("foo.c"), None, cx)) - .await .unwrap(); - assert_eq!(language.name().as_ref(), "C++"); + assert_eq!(language.name(), "C++".into()); let language = cx .read(|cx| languages.language_for_file(&file("Dockerfile.dev"), None, cx)) - .await .unwrap(); - assert_eq!(language.name().as_ref(), "Dockerfile"); + assert_eq!(language.name(), "Dockerfile".into()); } fn file(path: &str) -> Arc { @@ -2245,10 +2235,10 @@ fn test_language_at_with_hidden_languages(cx: &mut AppContext) { for point in [Point::new(0, 4), Point::new(0, 16)] { let config = snapshot.language_scope_at(point).unwrap(); - assert_eq!(config.language_name().as_ref(), "Markdown"); + assert_eq!(config.language_name(), "Markdown".into()); let language = snapshot.language_at(point).unwrap(); - assert_eq!(language.name().as_ref(), "Markdown"); + assert_eq!(language.name().0.as_ref(), "Markdown"); } buffer @@ -2757,7 +2747,7 @@ fn ruby_lang() -> Language { fn html_lang() -> Language { Language::new( LanguageConfig { - name: "HTML".into(), + name: LanguageName::new("HTML"), block_comment: Some(("".into())), ..Default::default() }, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7e8fcc655d..6424da8a54 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -28,6 +28,7 @@ use futures::Future; use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; +pub use language_registry::LanguageName; use lsp::{CodeActionKind, LanguageServerBinary}; use parking_lot::Mutex; use regex::Regex; @@ -67,8 +68,8 @@ pub use buffer::Operation; pub use buffer::*; pub use diagnostic_set::DiagnosticEntry; pub use language_registry::{ - LanguageNotFound, LanguageQueries, LanguageRegistry, LanguageServerBinaryStatus, - PendingLanguageServer, QUERY_FILENAME_PREFIXES, + AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry, + LanguageServerBinaryStatus, PendingLanguageServer, QUERY_FILENAME_PREFIXES, }; pub use lsp::LanguageServerId; pub use outline::*; @@ -140,6 +141,12 @@ pub trait ToLspPosition { #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] pub struct LanguageServerName(pub Arc); +impl LanguageServerName { + pub fn from_proto(s: String) -> Self { + Self(Arc::from(s)) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Location { pub buffer: Model, @@ -195,9 +202,12 @@ impl CachedLspAdapter { }) } + pub fn name(&self) -> Arc { + self.adapter.name().0.clone() + } + pub async fn get_language_server_command( self: Arc, - language: Arc, container_dir: Arc, delegate: Arc, cx: &mut AsyncAppContext, @@ -205,18 +215,10 @@ impl CachedLspAdapter { let cached_binary = self.cached_binary.lock().await; self.adapter .clone() - .get_language_server_command(language, container_dir, delegate, cached_binary, cx) + .get_language_server_command(container_dir, delegate, cached_binary, cx) .await } - pub fn will_start_server( - &self, - delegate: &Arc, - cx: &mut AsyncAppContext, - ) -> Option>> { - self.adapter.will_start_server(delegate, cx) - } - pub fn can_be_reinstalled(&self) -> bool { self.adapter.can_be_reinstalled() } @@ -262,11 +264,11 @@ impl CachedLspAdapter { .await } - pub fn language_id(&self, language: &Language) -> String { + pub fn language_id(&self, language_name: &LanguageName) -> String { self.language_ids - .get(language.name().as_ref()) + .get(language_name.0.as_ref()) .cloned() - .unwrap_or_else(|| language.lsp_id()) + .unwrap_or_else(|| language_name.lsp_id()) } #[cfg(any(test, feature = "test-support"))] @@ -296,7 +298,6 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, - language: Arc, container_dir: Arc, delegate: Arc, mut cached_binary: futures::lock::MutexGuard<'a, Option>, @@ -317,7 +318,7 @@ pub trait LspAdapter: 'static + Send + Sync { if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { log::info!( "found user-installed language server for {}. path: {:?}, arguments: {:?}", - language.name(), + self.name().0, binary.path, binary.arguments ); @@ -387,14 +388,6 @@ pub trait LspAdapter: 'static + Send + Sync { None } - fn will_start_server( - &self, - _: &Arc, - _: &mut AsyncAppContext, - ) -> Option>> { - None - } - async fn fetch_server_binary( &self, latest_version: Box, @@ -562,7 +555,7 @@ pub struct CodeLabel { #[derive(Clone, Deserialize, JsonSchema)] pub struct LanguageConfig { /// Human-readable name of the language. - pub name: Arc, + pub name: LanguageName, /// The name of this language for a Markdown code fence block pub code_fence_block_name: Option>, // The name of the grammar in a WASM bundle (experimental). @@ -699,7 +692,7 @@ impl Override { impl Default for LanguageConfig { fn default() -> Self { Self { - name: Arc::default(), + name: LanguageName::new(""), code_fence_block_name: None, grammar: None, matcher: LanguageMatcher::default(), @@ -1335,7 +1328,7 @@ impl Language { Arc::get_mut(self.grammar.as_mut()?) } - pub fn name(&self) -> Arc { + pub fn name(&self) -> LanguageName { self.config.name.clone() } @@ -1343,7 +1336,7 @@ impl Language { self.config .code_fence_block_name .clone() - .unwrap_or_else(|| self.config.name.to_lowercase().into()) + .unwrap_or_else(|| self.config.name.0.to_lowercase().into()) } pub fn context_provider(&self) -> Option> { @@ -1408,10 +1401,7 @@ impl Language { } pub fn lsp_id(&self) -> String { - match self.config.name.as_ref() { - "Plain Text" => "plaintext".to_string(), - language_name => language_name.to_lowercase(), - } + self.config.name.lsp_id() } pub fn prettier_parser_name(&self) -> Option<&str> { @@ -1420,7 +1410,7 @@ impl Language { } impl LanguageScope { - pub fn language_name(&self) -> Arc { + pub fn language_name(&self) -> LanguageName { self.language.config.name.clone() } @@ -1663,7 +1653,6 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, _: Arc, _: Arc, _: futures::lock::MutexGuard<'a, Option>, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index a558b942d6..a65d20019f 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -6,9 +6,9 @@ use crate::{ with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, LanguageServerName, LspAdapter, LspAdapterDelegate, PLAIN_TEXT, }; -use anyhow::{anyhow, Context as _, Result}; +use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; -use futures::TryFutureExt; + use futures::{ channel::{mpsc, oneshot}, future::Shared, @@ -19,8 +19,10 @@ use gpui::{AppContext, BackgroundExecutor, Task}; use lsp::LanguageServerId; use parking_lot::{Mutex, RwLock}; use postage::watch; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; use std::{ - borrow::Cow, + borrow::{Borrow, Cow}, ffi::OsStr, ops::Not, path::{Path, PathBuf}, @@ -32,6 +34,48 @@ use theme::Theme; use unicase::UniCase; use util::{maybe, paths::PathExt, post_inc, ResultExt}; +#[derive( + Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema, +)] +pub struct LanguageName(pub Arc); + +impl LanguageName { + pub fn new(s: &str) -> Self { + Self(Arc::from(s)) + } + + pub fn from_proto(s: String) -> Self { + Self(Arc::from(s)) + } + pub fn to_proto(self) -> String { + self.0.to_string() + } + pub fn lsp_id(&self) -> String { + match self.0.as_ref() { + "Plain Text" => "plaintext".to_string(), + language_name => language_name.to_lowercase(), + } + } +} + +impl Borrow for LanguageName { + fn borrow(&self) -> &str { + self.0.as_ref() + } +} + +impl std::fmt::Display for LanguageName { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl<'a> From<&'a str> for LanguageName { + fn from(str: &'a str) -> LanguageName { + LanguageName(str.into()) + } +} + pub struct LanguageRegistry { state: RwLock, language_server_download_dir: Option>, @@ -46,7 +90,7 @@ struct LanguageRegistryState { language_settings: AllLanguageSettingsContent, available_languages: Vec, grammars: HashMap, AvailableGrammar>, - lsp_adapters: HashMap, Vec>>, + lsp_adapters: HashMap>>, available_lsp_adapters: HashMap Arc + 'static + Send + Sync>>, loading_languages: HashMap>>>>, @@ -56,8 +100,10 @@ struct LanguageRegistryState { reload_count: usize, #[cfg(any(test, feature = "test-support"))] - fake_server_txs: - HashMap, Vec>>, + fake_server_txs: HashMap< + LanguageName, + Vec>, + >, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -75,9 +121,9 @@ pub struct PendingLanguageServer { } #[derive(Clone)] -struct AvailableLanguage { +pub struct AvailableLanguage { id: LanguageId, - name: Arc, + name: LanguageName, grammar: Option>, matcher: LanguageMatcher, load: Arc< @@ -93,6 +139,16 @@ struct AvailableLanguage { loaded: bool, } +impl AvailableLanguage { + pub fn name(&self) -> LanguageName { + self.name.clone() + } + + pub fn matcher(&self) -> &LanguageMatcher { + &self.matcher + } +} + enum AvailableGrammar { Native(tree_sitter::Language), Loaded(#[allow(unused)] PathBuf, tree_sitter::Language), @@ -196,7 +252,7 @@ impl LanguageRegistry { /// appended to the end. pub fn reorder_language_servers( &self, - language: &Arc, + language: &LanguageName, ordered_lsp_adapters: Vec>, ) { self.state @@ -207,7 +263,7 @@ impl LanguageRegistry { /// Removes the specified languages and grammars from the registry. pub fn remove_languages( &self, - languages_to_remove: &[Arc], + languages_to_remove: &[LanguageName], grammars_to_remove: &[Arc], ) { self.state @@ -215,7 +271,7 @@ impl LanguageRegistry { .remove_languages(languages_to_remove, grammars_to_remove) } - pub fn remove_lsp_adapter(&self, language_name: &str, name: &LanguageServerName) { + pub fn remove_lsp_adapter(&self, language_name: &LanguageName, name: &LanguageServerName) { let mut state = self.state.write(); if let Some(adapters) = state.lsp_adapters.get_mut(language_name) { adapters.retain(|adapter| &adapter.name != name) @@ -267,7 +323,7 @@ impl LanguageRegistry { Some(load_lsp_adapter()) } - pub fn register_lsp_adapter(&self, language_name: Arc, adapter: Arc) { + pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc) { self.state .write() .lsp_adapters @@ -279,13 +335,14 @@ impl LanguageRegistry { #[cfg(any(feature = "test-support", test))] pub fn register_fake_lsp_adapter( &self, - language_name: &str, + language_name: impl Into, adapter: crate::FakeLspAdapter, ) -> futures::channel::mpsc::UnboundedReceiver { + let language_name = language_name.into(); self.state .write() .lsp_adapters - .entry(language_name.into()) + .entry(language_name.clone()) .or_default() .push(CachedLspAdapter::new(Arc::new(adapter))); self.fake_language_servers(language_name) @@ -294,13 +351,13 @@ impl LanguageRegistry { #[cfg(any(feature = "test-support", test))] pub fn fake_language_servers( &self, - language_name: &str, + language_name: LanguageName, ) -> futures::channel::mpsc::UnboundedReceiver { let (servers_tx, servers_rx) = futures::channel::mpsc::unbounded(); self.state .write() .fake_server_txs - .entry(language_name.into()) + .entry(language_name) .or_default() .push(servers_tx); servers_rx @@ -309,7 +366,7 @@ impl LanguageRegistry { /// Adds a language to the registry, which can be loaded if needed. pub fn register_language( &self, - name: Arc, + name: LanguageName, grammar_name: Option>, matcher: LanguageMatcher, load: impl Fn() -> Result<( @@ -445,7 +502,7 @@ impl LanguageRegistry { ) -> impl Future>> { let name = UniCase::new(name); let rx = self.get_or_load_language(|language_name, _| { - if UniCase::new(language_name) == name { + if UniCase::new(&language_name.0) == name { 1 } else { 0 @@ -460,7 +517,7 @@ impl LanguageRegistry { ) -> impl Future>> { let string = UniCase::new(string); let rx = self.get_or_load_language(|name, config| { - if UniCase::new(name) == string + if UniCase::new(&name.0) == string || config .path_suffixes .iter() @@ -474,13 +531,26 @@ impl LanguageRegistry { async move { rx.await? } } + pub fn available_language_for_name( + self: &Arc, + name: &LanguageName, + ) -> Option { + let state = self.state.read(); + state + .available_languages + .iter() + .find(|l| &l.name == name) + .cloned() + } + pub fn language_for_file( self: &Arc, file: &Arc, content: Option<&Rope>, cx: &AppContext, - ) -> impl Future>> { + ) -> Option { let user_file_types = all_language_settings(Some(file), cx); + self.language_for_file_internal( &file.full_path(cx), content, @@ -492,8 +562,16 @@ impl LanguageRegistry { self: &Arc, path: &'a Path, ) -> impl Future>> + 'a { - self.language_for_file_internal(path, None, None) - .map_err(|error| error.context(format!("language for file path {}", path.display()))) + let available_language = self.language_for_file_internal(path, None, None); + + let this = self.clone(); + async move { + if let Some(language) = available_language { + this.load_language(&language).await? + } else { + Err(anyhow!(LanguageNotFound)) + } + } } fn language_for_file_internal( @@ -501,19 +579,19 @@ impl LanguageRegistry { path: &Path, content: Option<&Rope>, user_file_types: Option<&HashMap, GlobSet>>, - ) -> impl Future>> { + ) -> Option { let filename = path.file_name().and_then(|name| name.to_str()); let extension = path.extension_or_hidden_file_name(); let path_suffixes = [extension, filename, path.to_str()]; let empty = GlobSet::empty(); - let rx = self.get_or_load_language(move |language_name, config| { + self.find_matching_language(move |language_name, config| { let path_matches_default_suffix = config .path_suffixes .iter() .any(|suffix| path_suffixes.contains(&Some(suffix.as_str()))); let custom_suffixes = user_file_types - .and_then(|types| types.get(language_name)) + .and_then(|types| types.get(&language_name.0)) .unwrap_or(&empty); let path_matches_custom_suffix = path_suffixes .iter() @@ -535,18 +613,15 @@ impl LanguageRegistry { } else { 0 } - }); - async move { rx.await? } + }) } - fn get_or_load_language( + fn find_matching_language( self: &Arc, - callback: impl Fn(&str, &LanguageMatcher) -> usize, - ) -> oneshot::Receiver>> { - let (tx, rx) = oneshot::channel(); - - let mut state = self.state.write(); - let Some((language, _)) = state + callback: impl Fn(&LanguageName, &LanguageMatcher) -> usize, + ) -> Option { + let state = self.state.read(); + let available_language = state .available_languages .iter() .filter_map(|language| { @@ -559,15 +634,23 @@ impl LanguageRegistry { }) .max_by_key(|e| e.1) .clone() - else { - let _ = tx.send(Err(anyhow!(LanguageNotFound))); - return rx; - }; + .map(|(available_language, _)| available_language); + drop(state); + available_language + } + + pub fn load_language( + self: &Arc, + language: &AvailableLanguage, + ) -> oneshot::Receiver>> { + let (tx, rx) = oneshot::channel(); + + let mut state = self.state.write(); // If the language is already loaded, resolve with it immediately. for loaded_language in state.languages.iter() { if loaded_language.id == language.id { - let _ = tx.send(Ok(loaded_language.clone())); + tx.send(Ok(loaded_language.clone())).unwrap(); return rx; } } @@ -580,12 +663,15 @@ impl LanguageRegistry { // Otherwise, start loading the language. hash_map::Entry::Vacant(entry) => { let this = self.clone(); + + let id = language.id; + let name = language.name.clone(); + let language_load = language.load.clone(); + self.executor .spawn(async move { - let id = language.id; - let name = language.name.clone(); let language = async { - let (config, queries, provider) = (language.load)()?; + let (config, queries, provider) = (language_load)()?; if let Some(grammar) = config.grammar.clone() { let grammar = Some(this.get_or_load_grammar(grammar).await?); @@ -629,13 +715,28 @@ impl LanguageRegistry { }; }) .detach(); + entry.insert(vec![tx]); } } + drop(state); rx } + fn get_or_load_language( + self: &Arc, + callback: impl Fn(&LanguageName, &LanguageMatcher) -> usize, + ) -> oneshot::Receiver>> { + let Some(language) = self.find_matching_language(callback) else { + let (tx, rx) = oneshot::channel(); + let _ = tx.send(Err(anyhow!(LanguageNotFound))); + return rx; + }; + + self.load_language(&language) + } + fn get_or_load_grammar( self: &Arc, name: Arc, @@ -702,11 +803,11 @@ impl LanguageRegistry { self.state.read().languages.to_vec() } - pub fn lsp_adapters(&self, language: &Arc) -> Vec> { + pub fn lsp_adapters(&self, language_name: &LanguageName) -> Vec> { self.state .read() .lsp_adapters - .get(&language.config.name) + .get(language_name) .cloned() .unwrap_or_default() } @@ -723,7 +824,7 @@ impl LanguageRegistry { pub fn create_pending_language_server( self: &Arc, stderr_capture: Arc>>, - language: Arc, + _language_name_for_tests: LanguageName, adapter: Arc, root_path: Arc, delegate: Arc, @@ -741,7 +842,6 @@ impl LanguageRegistry { .clone() .ok_or_else(|| anyhow!("language server download directory has not been assigned before starting server")) .log_err()?; - let language = language.clone(); let container_dir: Arc = Arc::from(download_dir.join(adapter.name.0.as_ref())); let root_path = root_path.clone(); let login_shell_env_loaded = self.login_shell_env_loaded.clone(); @@ -756,12 +856,7 @@ impl LanguageRegistry { let binary_result = adapter .clone() - .get_language_server_command( - language.clone(), - container_dir, - delegate.clone(), - &mut cx, - ) + .get_language_server_command(container_dir, delegate.clone(), &mut cx) .await; delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); @@ -785,10 +880,6 @@ impl LanguageRegistry { .initialization_options(&delegate) .await?; - if let Some(task) = adapter.will_start_server(&delegate, &mut cx) { - task.await?; - } - #[cfg(any(test, feature = "test-support"))] if true { let capabilities = adapter @@ -825,7 +916,7 @@ impl LanguageRegistry { .state .write() .fake_server_txs - .get_mut(language.name().as_ref()) + .get_mut(&_language_name_for_tests) { for tx in txs { tx.unbounded_send(fake_server.clone()).ok(); @@ -935,10 +1026,10 @@ impl LanguageRegistryState { /// appended to the end. fn reorder_language_servers( &mut self, - language: &Arc, + language_name: &LanguageName, ordered_lsp_adapters: Vec>, ) { - let Some(lsp_adapters) = self.lsp_adapters.get_mut(&language.config.name) else { + let Some(lsp_adapters) = self.lsp_adapters.get_mut(language_name) else { return; }; @@ -959,7 +1050,7 @@ impl LanguageRegistryState { fn remove_languages( &mut self, - languages_to_remove: &[Arc], + languages_to_remove: &[LanguageName], grammars_to_remove: &[Arc], ) { if languages_to_remove.is_empty() && grammars_to_remove.is_empty() { diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index ac3c9eb6ca..e1fcaaba28 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1,6 +1,6 @@ //! Provides `language`-related settings. -use crate::{File, Language, LanguageServerName}; +use crate::{File, Language, LanguageName, LanguageServerName}; use anyhow::Result; use collections::{HashMap, HashSet}; use core::slice; @@ -32,7 +32,7 @@ pub fn language_settings<'a>( cx: &'a AppContext, ) -> &'a LanguageSettings { let language_name = language.map(|l| l.name()); - all_language_settings(file, cx).language(language_name.as_deref()) + all_language_settings(file, cx).language(language_name.as_ref()) } /// Returns the settings for all languages from the provided file. @@ -53,7 +53,7 @@ pub struct AllLanguageSettings { /// The inline completion settings. pub inline_completions: InlineCompletionSettings, defaults: LanguageSettings, - languages: HashMap, LanguageSettings>, + languages: HashMap, pub(crate) file_types: HashMap, GlobSet>, } @@ -204,7 +204,7 @@ pub struct AllLanguageSettingsContent { pub defaults: LanguageSettingsContent, /// The settings for individual languages. #[serde(default)] - pub languages: HashMap, LanguageSettingsContent>, + pub languages: HashMap, /// Settings for associating file extensions and filenames /// with languages. #[serde(default)] @@ -791,7 +791,7 @@ impl InlayHintSettings { impl AllLanguageSettings { /// Returns the [`LanguageSettings`] for the language with the specified name. - pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings { + pub fn language<'a>(&'a self, language_name: Option<&LanguageName>) -> &'a LanguageSettings { if let Some(name) = language_name { if let Some(overrides) = self.languages.get(name) { return overrides; @@ -821,7 +821,7 @@ impl AllLanguageSettings { } } - self.language(language.map(|l| l.name()).as_deref()) + self.language(language.map(|l| l.name()).as_ref()) .show_inline_completions } } diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index 647ff93b81..6aa31d7ff8 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -1,13 +1,13 @@ use editor::Editor; use gpui::{div, IntoElement, ParentElement, Render, Subscription, View, ViewContext, WeakView}; -use std::sync::Arc; +use language::LanguageName; use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; use workspace::{item::ItemHandle, StatusItemView, Workspace}; use crate::LanguageSelector; pub struct ActiveBufferLanguage { - active_language: Option>>, + active_language: Option>, workspace: WeakView, _observe_active_editor: Option, } diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 6bdf5a67d0..489f6fd141 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -217,7 +217,7 @@ impl PickerDelegate for LanguageSelectorDelegate { let mat = &self.matches[ix]; let buffer_language_name = self.buffer.read(cx).language().map(|l| l.name()); let mut label = mat.string.clone(); - if buffer_language_name.as_deref() == Some(mat.string.as_str()) { + if buffer_language_name.map(|n| n.0).as_deref() == Some(mat.string.as_str()) { label.push_str(" (current)"); } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 5cf800d306..53def5eb2a 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -683,7 +683,7 @@ impl LspLogView { self.project .read(cx) .supplementary_language_servers(cx) - .filter_map(|(&server_id, name)| { + .filter_map(|(server_id, name)| { let state = log_store.language_servers.get(&server_id)?; Some(LogMenuItem { server_id, diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index 1d98c3d0b0..e2c4903e19 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -471,7 +471,7 @@ impl SyntaxTreeToolbarItemView { fn render_header(active_layer: &OwnedSyntaxLayer) -> ButtonLike { ButtonLike::new("syntax tree header") - .child(Label::new(active_layer.language.name())) + .child(Label::new(active_layer.language.name().0)) .child(Label::new(format_node_range(active_layer.node()))) } } diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 6ed20abe17..46b6ce475d 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -451,7 +451,7 @@ impl ContextProvider for RustContextProvider { ) -> Option { const DEFAULT_RUN_NAME_STR: &str = "RUST_DEFAULT_PACKAGE_RUN"; let package_to_run = all_language_settings(file.as_ref(), cx) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .tasks .variables .get(DEFAULT_RUN_NAME_STR); diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 4f0270fb26..51a9913b24 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -141,7 +141,7 @@ impl LspAdapter for YamlLspAdapter { let tab_size = cx.update(|cx| { AllLanguageSettings::get(Some(location), cx) - .language(Some("YAML")) + .language(Some(&"YAML".into())) .tab_size })?; let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 30feffad97..0612917575 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -89,6 +89,16 @@ pub struct LanguageServer { #[repr(transparent)] pub struct LanguageServerId(pub usize); +impl LanguageServerId { + pub fn from_proto(id: u64) -> Self { + Self(id as usize) + } + + pub fn to_proto(self) -> u64 { + self.0 as u64 + } +} + /// Handle to a language server RPC activity subscription. pub enum Subscription { Notification { diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index d73e205483..1aa60e2a3b 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -282,7 +282,7 @@ impl MarkdownPreviewView { let buffer = editor.read(cx).buffer().read(cx); if let Some(buffer) = buffer.as_singleton() { if let Some(language) = buffer.read(cx).language() { - return language.name().as_ref() == "Markdown"; + return language.name() == "Markdown".into(); } } false diff --git a/crates/project/src/lsp_command/signature_help.rs b/crates/project/src/lsp_command/signature_help.rs index 163c6ae134..bf197a11ba 100644 --- a/crates/project/src/lsp_command/signature_help.rs +++ b/crates/project/src/lsp_command/signature_help.rs @@ -86,7 +86,7 @@ impl SignatureHelp { } else { let markdown = markdown.join(str_for_join); let language_name = language - .map(|n| n.name().to_lowercase()) + .map(|n| n.name().0.to_lowercase()) .unwrap_or_default(); let markdown = if function_options_count >= 2 { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 1d9ca98c06..b218ac5804 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -15,7 +15,7 @@ use async_trait::async_trait; use client::{proto, TypedEnvelope}; use collections::{btree_map, BTreeMap, HashMap, HashSet}; use futures::{ - future::{join_all, Shared}, + future::{join_all, BoxFuture, Shared}, select, stream::FuturesUnordered, Future, FutureExt, StreamExt, @@ -25,22 +25,26 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::HttpClient; +use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; use itertools::Itertools; use language::{ - language_settings::{language_settings, AllLanguageSettings, LanguageSettings}, + language_settings::{ + all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, + }, markdown, point_to_lsp, prepare_completion_documentation, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, - DiagnosticEntry, DiagnosticSet, Documentation, File as _, Language, LanguageRegistry, - LanguageServerName, LocalFile, LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, - TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, + DiagnosticEntry, DiagnosticSet, Documentation, File as _, Language, LanguageConfig, + LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, + LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{ - CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, - Edit, FileSystemWatcher, InsertTextFormat, LanguageServer, LanguageServerBinary, - LanguageServerId, LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, - ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, + CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, + DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, + LanguageServer, LanguageServerBinary, LanguageServerId, LspRequestFuture, MessageActionItem, + MessageType, OneOf, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, + WorkDoneProgressCancelParams, WorkspaceFolder, }; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -54,6 +58,7 @@ use similar::{ChangeTag, TextDiff}; use smol::channel::Sender; use snippet::Snippet; use std::{ + any::Any, cmp::Ordering, convert::TryInto, ffi::OsStr, @@ -85,27 +90,86 @@ const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); -pub struct LspStore { - downstream_client: Option, - upstream_client: Option, - project_id: u64, +pub struct LocalLspStore { http_client: Option>, + environment: Model, fs: Arc, - nonce: u128, - buffer_store: Model, - worktree_store: Model, - buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots - environment: Option>, - supplementary_language_servers: - HashMap)>, - languages: Arc, - language_servers: HashMap, - language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, - language_server_statuses: BTreeMap, + yarn: Model, + pub language_servers: HashMap, last_workspace_edits_by_language_server: HashMap, language_server_watched_paths: HashMap>, language_server_watcher_registrations: HashMap>>, + supplementary_language_servers: + HashMap)>, + _subscription: gpui::Subscription, +} + +impl LocalLspStore { + fn shutdown_language_servers( + &mut self, + _cx: &mut ModelContext, + ) -> impl Future { + let shutdown_futures = self + .language_servers + .drain() + .map(|(_, server_state)| async { + use LanguageServerState::*; + match server_state { + Running { server, .. } => server.shutdown()?.await, + Starting(task) => task.await?.shutdown()?.await, + } + }) + .collect::>(); + + async move { + futures::future::join_all(shutdown_futures).await; + } + } +} + +pub struct RemoteLspStore { + upstream_client: AnyProtoClient, +} + +impl RemoteLspStore {} + +pub struct SshLspStore { + upstream_client: AnyProtoClient, +} + +#[allow(clippy::large_enum_variant)] +pub enum LspStoreMode { + Local(LocalLspStore), // ssh host and collab host + Remote(RemoteLspStore), // collab guest + Ssh(SshLspStore), // ssh client +} + +impl LspStoreMode { + fn is_local(&self) -> bool { + matches!(self, LspStoreMode::Local(_)) + } + + fn is_ssh(&self) -> bool { + matches!(self, LspStoreMode::Ssh(_)) + } + + fn is_remote(&self) -> bool { + matches!(self, LspStoreMode::Remote(_)) + } +} + +pub struct LspStore { + mode: LspStoreMode, + downstream_client: Option, + project_id: u64, + nonce: u128, + buffer_store: Model, + worktree_store: Model, + buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots + pub languages: Arc, + language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, + pub language_server_statuses: BTreeMap, active_entry: Option, _maintain_workspace_config: Task>, _maintain_buffer_languages: Task<()>, @@ -122,8 +186,6 @@ pub struct LspStore { )>, >, >, - yarn: Model, - _subscription: gpui::Subscription, } pub enum LspStoreEvent { @@ -209,17 +271,53 @@ impl LspStore { client.add_model_request_handler(Self::handle_lsp_command::); } - #[allow(clippy::too_many_arguments)] - pub fn new( + pub fn as_remote(&self) -> Option<&RemoteLspStore> { + match &self.mode { + LspStoreMode::Remote(remote_lsp_store) => Some(remote_lsp_store), + _ => None, + } + } + + pub fn as_ssh(&self) -> Option<&SshLspStore> { + match &self.mode { + LspStoreMode::Ssh(ssh_lsp_store) => Some(ssh_lsp_store), + _ => None, + } + } + + pub fn as_local(&self) -> Option<&LocalLspStore> { + match &self.mode { + LspStoreMode::Local(local_lsp_store) => Some(local_lsp_store), + _ => None, + } + } + + pub fn as_local_mut(&mut self) -> Option<&mut LocalLspStore> { + match &mut self.mode { + LspStoreMode::Local(local_lsp_store) => Some(local_lsp_store), + _ => None, + } + } + + pub fn upstream_client(&self) -> Option { + match &self.mode { + LspStoreMode::Ssh(SshLspStore { + upstream_client, .. + }) + | LspStoreMode::Remote(RemoteLspStore { + upstream_client, .. + }) => Some(upstream_client.clone()), + LspStoreMode::Local(_) => None, + } + } + + pub fn new_local( buffer_store: Model, worktree_store: Model, - environment: Option>, + environment: Model, languages: Arc, http_client: Option>, fs: Arc, - downstream_client: Option, - upstream_client: Option, - remote_id: Option, cx: &mut ModelContext, ) -> Self { let yarn = YarnPathStore::new(fs.clone(), cx); @@ -229,32 +327,118 @@ impl LspStore { .detach(); Self { - downstream_client, - upstream_client, - http_client, - fs, - project_id: remote_id.unwrap_or(0), + mode: LspStoreMode::Local(LocalLspStore { + supplementary_language_servers: Default::default(), + language_servers: Default::default(), + last_workspace_edits_by_language_server: Default::default(), + language_server_watched_paths: Default::default(), + language_server_watcher_registrations: Default::default(), + environment, + http_client, + fs, + yarn, + _subscription: cx.on_app_quit(|this, cx| { + this.as_local_mut().unwrap().shutdown_language_servers(cx) + }), + }), + downstream_client: None, + project_id: 0, buffer_store, worktree_store, languages: languages.clone(), - environment, - nonce: StdRng::from_entropy().gen(), - buffer_snapshots: Default::default(), - supplementary_language_servers: Default::default(), - language_servers: Default::default(), language_server_ids: Default::default(), language_server_statuses: Default::default(), - last_workspace_edits_by_language_server: Default::default(), - language_server_watched_paths: Default::default(), - language_server_watcher_registrations: Default::default(), + nonce: StdRng::from_entropy().gen(), + buffer_snapshots: Default::default(), + next_diagnostic_group_id: Default::default(), + diagnostic_summaries: Default::default(), + diagnostics: Default::default(), + active_entry: None, + _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), + } + } + + fn send_lsp_proto_request( + &self, + buffer: Model, + client: AnyProtoClient, + request: R, + cx: &mut ModelContext<'_, LspStore>, + ) -> Task::Response>> { + let message = request.to_proto(self.project_id, buffer.read(cx)); + cx.spawn(move |this, cx| async move { + let response = client.request(message).await?; + let this = this.upgrade().context("project dropped")?; + request + .response_from_proto(response, this, buffer, cx) + .await + }) + } + + pub fn new_ssh( + buffer_store: Model, + worktree_store: Model, + languages: Arc, + upstream_client: AnyProtoClient, + project_id: u64, + cx: &mut ModelContext, + ) -> Self { + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + Self { + mode: LspStoreMode::Ssh(SshLspStore { upstream_client }), + downstream_client: None, + project_id, + buffer_store, + worktree_store, + languages: languages.clone(), + language_server_ids: Default::default(), + language_server_statuses: Default::default(), + nonce: StdRng::from_entropy().gen(), + buffer_snapshots: Default::default(), + next_diagnostic_group_id: Default::default(), + diagnostic_summaries: Default::default(), + diagnostics: Default::default(), + active_entry: None, + _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), + } + } + + pub fn new_remote( + buffer_store: Model, + worktree_store: Model, + languages: Arc, + upstream_client: AnyProtoClient, + project_id: u64, + cx: &mut ModelContext, + ) -> Self { + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + Self { + mode: LspStoreMode::Remote(RemoteLspStore { upstream_client }), + downstream_client: None, + project_id, + buffer_store, + worktree_store, + languages: languages.clone(), + language_server_ids: Default::default(), + language_server_statuses: Default::default(), + nonce: StdRng::from_entropy().gen(), + buffer_snapshots: Default::default(), next_diagnostic_group_id: Default::default(), diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, - yarn, _maintain_workspace_config: Self::maintain_workspace_config(cx), _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), - _subscription: cx.on_app_quit(Self::shutdown_language_servers), } } @@ -273,7 +457,6 @@ impl LspStore { self.unregister_buffer_from_language_servers(buffer, old_file, cx); } - self.detect_language_for_buffer(buffer, cx); self.register_buffer_with_language_servers(buffer, cx); } BufferStoreEvent::BufferDropped(_) => {} @@ -338,7 +521,6 @@ impl LspStore { }) .detach(); - self.detect_language_for_buffer(buffer, cx); self.register_buffer_with_language_servers(buffer, cx); cx.observe_release(buffer, |this, buffer, cx| { if let Some(file) = File::from_dyn(buffer.file()) { @@ -406,9 +588,7 @@ impl LspStore { buffers_with_unknown_injections.push(handle); } } - for buffer in plain_text_buffers { - this.detect_language_for_buffer(&buffer, cx); this.register_buffer_with_language_servers(&buffer, cx); } @@ -426,34 +606,29 @@ impl LspStore { &mut self, buffer_handle: &Model, cx: &mut ModelContext, - ) { + ) -> Option { // If the buffer has a language, set it and start the language server if we haven't already. let buffer = buffer_handle.read(cx); - let Some(file) = buffer.file() else { - return; - }; - let content = buffer.as_rope(); - let Some(new_language_result) = self - .languages - .language_for_file(file, Some(content), cx) - .now_or_never() - else { - return; - }; + let file = buffer.file()?; - match new_language_result { - Err(e) => { - if e.is::() { - cx.emit(LspStoreEvent::LanguageDetected { - buffer: buffer_handle.clone(), - new_language: None, - }); - } - } - Ok(new_language) => { + let content = buffer.as_rope(); + let available_language = self.languages.language_for_file(file, Some(content), cx); + if let Some(available_language) = &available_language { + if let Some(Ok(Ok(new_language))) = self + .languages + .load_language(available_language) + .now_or_never() + { self.set_language_for_buffer(buffer_handle, new_language, cx); } - }; + } else { + cx.emit(LspStoreEvent::LanguageDetected { + buffer: buffer_handle.clone(), + new_language: None, + }); + } + + available_language } pub fn set_language_for_buffer( @@ -475,9 +650,7 @@ impl LspStore { if let Some(file) = buffer_file { let worktree = file.worktree.clone(); - if worktree.read(cx).is_local() { - self.start_language_servers(&worktree, new_language.clone(), cx) - } + self.start_language_servers(&worktree, new_language.name(), cx) } cx.emit(LspStoreEvent::LanguageDetected { @@ -494,27 +667,6 @@ impl LspStore { self.active_entry = active_entry; } - fn shutdown_language_servers( - &mut self, - _cx: &mut ModelContext, - ) -> impl Future { - let shutdown_futures = self - .language_servers - .drain() - .map(|(_, server_state)| async { - use LanguageServerState::*; - match server_state { - Running { server, .. } => server.shutdown()?.await, - Starting(task) => task.await?.shutdown()?.await, - } - }) - .collect::>(); - - async move { - futures::future::join_all(shutdown_futures).await; - } - } - pub(crate) fn send_diagnostic_summaries( &self, worktree: &mut Worktree, @@ -547,9 +699,11 @@ impl LspStore { ::Params: Send, { let buffer = buffer_handle.read(cx); - if self.upstream_client.is_some() { - return self.send_lsp_proto_request(buffer_handle, self.project_id, request, cx); + + if let Some(upstream_client) = self.upstream_client() { + return self.send_lsp_proto_request(buffer_handle, upstream_client, request, cx); } + let language_server = match server { LanguageServerToQuery::Primary => { match self.primary_language_server_for_buffer(buffer, cx) { @@ -635,26 +789,6 @@ impl LspStore { Task::ready(Ok(Default::default())) } - fn send_lsp_proto_request( - &self, - buffer: Model, - project_id: u64, - request: R, - cx: &mut ModelContext<'_, Self>, - ) -> Task::Response>> { - let Some(upstream_client) = self.upstream_client.clone() else { - return Task::ready(Err(anyhow!("disconnected before completing request"))); - }; - let message = request.to_proto(project_id, buffer.read(cx)); - cx.spawn(move |this, cx| async move { - let response = upstream_client.request(message).await?; - let this = this.upgrade().context("project dropped")?; - request - .response_from_proto(response, this, buffer, cx) - .await - }) - } - pub async fn execute_code_actions_on_servers( this: &WeakModel, adapters_and_servers: &[(Arc, Arc)], @@ -702,8 +836,10 @@ impl LspStore { if let Some(command) = action.lsp_action.command { this.update(cx, |this, _| { - this.last_workspace_edits_by_language_server - .remove(&language_server.server_id()); + if let LspStoreMode::Local(mode) = &mut this.mode { + mode.last_workspace_edits_by_language_server + .remove(&language_server.server_id()); + } })?; language_server @@ -715,12 +851,14 @@ impl LspStore { .await?; this.update(cx, |this, _| { - project_transaction.0.extend( - this.last_workspace_edits_by_language_server - .remove(&language_server.server_id()) - .unwrap_or_default() - .0, - ) + if let LspStoreMode::Local(mode) = &mut this.mode { + project_transaction.0.extend( + mode.last_workspace_edits_by_language_server + .remove(&language_server.server_id()) + .unwrap_or_default() + .0, + ) + } })?; } } @@ -752,7 +890,7 @@ impl LspStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client.clone() { + if let Some(upstream_client) = self.upstream_client() { let request = proto::ApplyCodeAction { project_id: self.project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -801,7 +939,9 @@ impl LspStore { if let Some(command) = action.lsp_action.command { this.update(&mut cx, |this, _| { - this.last_workspace_edits_by_language_server + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server .remove(&lang_server.server_id()); })?; @@ -816,7 +956,9 @@ impl LspStore { result?; return this.update(&mut cx, |this, _| { - this.last_workspace_edits_by_language_server + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server .remove(&lang_server.server_id()) .unwrap_or_default() }); @@ -834,7 +976,7 @@ impl LspStore { server_id: LanguageServerId, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client.clone() { + if let Some(upstream_client) = self.upstream_client() { let request = proto::ResolveInlayHint { project_id: self.project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -912,7 +1054,7 @@ impl LspStore { .map(|(_, server)| LanguageServerToQuery::Other(server.server_id())) .next() .or_else(|| { - self.upstream_client + self.upstream_client() .is_some() .then_some(LanguageServerToQuery::Primary) }) @@ -945,7 +1087,7 @@ impl LspStore { trigger: String, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = proto::OnTypeFormatting { project_id: self.project_id, buffer_id: buffer.read(cx).remote_id().into(), @@ -1095,7 +1237,7 @@ impl LspStore { range: Range, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client.as_ref() { + if let Some(upstream_client) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), version: serialize_version(&buffer_handle.read(cx).version()), @@ -1175,10 +1317,10 @@ impl LspStore { ) -> Task>> { let language_registry = self.languages.clone(); - if let Some(_) = self.upstream_client.clone() { + if let Some(upstream_client) = self.upstream_client() { let task = self.send_lsp_proto_request( buffer.clone(), - self.project_id, + upstream_client, GetCompletions { position, context }, cx, ); @@ -1187,9 +1329,12 @@ impl LspStore { // In the future, we should provide project guests with the names of LSP adapters, // so that they can use the correct LSP adapter when computing labels. For now, // guests just use the first LSP adapter associated with the buffer's language. - let lsp_adapter = language - .as_ref() - .and_then(|language| language_registry.lsp_adapters(language).first().cloned()); + let lsp_adapter = language.as_ref().and_then(|language| { + language_registry + .lsp_adapters(&language.name()) + .first() + .cloned() + }); cx.foreground_executor().spawn(async move { let completions = task.await?; @@ -1269,7 +1414,7 @@ impl LspStore { completions: Arc>>, cx: &mut ModelContext, ) -> Task> { - let client = self.upstream_client.clone(); + let client = self.upstream_client(); let language_registry = self.languages.clone(); let project_id = self.project_id; @@ -1478,7 +1623,7 @@ impl LspStore { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let project_id = self.project_id; cx.spawn(move |_, mut cx| async move { let response = client @@ -1594,7 +1739,7 @@ impl LspStore { let buffer_id = buffer.remote_id().into(); let lsp_request = InlayHints { range }; - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = proto::InlayHints { project_id: self.project_id, buffer_id, @@ -1644,7 +1789,7 @@ impl LspStore { ) -> Task> { let position = position.to_point_utf16(buffer.read(cx)); - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), @@ -1716,7 +1861,7 @@ impl LspStore { position: PointUtf16, cx: &mut ModelContext, ) -> Task> { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), @@ -1790,7 +1935,7 @@ impl LspStore { pub fn symbols(&self, query: &str, cx: &mut ModelContext) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client.as_ref() { + if let Some(upstream_client) = self.upstream_client().as_ref() { let request = upstream_client.request(proto::GetProjectSymbols { project_id: self.project_id, query: query.to_string(), @@ -1816,7 +1961,7 @@ impl LspStore { } else { struct WorkspaceSymbolsResult { lsp_adapter: Arc, - language: Arc, + language: LanguageName, worktree: WeakModel, worktree_abs_path: Arc, lsp_symbols: Vec<(String, SymbolKind, lsp::Location)>, @@ -1837,16 +1982,17 @@ impl LspStore { } let worktree_abs_path = worktree.abs_path().clone(); - let (lsp_adapter, language, server) = match self.language_servers.get(server_id) { - Some(LanguageServerState::Running { - adapter, - language, - server, - .. - }) => (adapter.clone(), language.clone(), server), + let (lsp_adapter, language, server) = + match self.as_local().unwrap().language_servers.get(server_id) { + Some(LanguageServerState::Running { + adapter, + language, + server, + .. + }) => (adapter.clone(), language.clone(), server), - _ => continue, - }; + _ => continue, + }; requests.push( server @@ -2105,7 +2251,7 @@ impl LspStore { uri: lsp::Url::from_file_path(abs_path).log_err()?, }; - for (_, _, server) in self.language_servers_for_worktree(worktree_id) { + for server in self.language_servers_for_worktree(worktree_id) { if let Some(include_text) = include_text(server.as_ref()) { let text = if include_text { Some(buffer.read(cx).text()) @@ -2148,8 +2294,9 @@ impl LspStore { .worktree_store .read(cx) .worktree_for_id(*worktree_id, cx)?; - let state = this.language_servers.get(server_id)?; - let delegate = ProjectLspAdapterDelegate::new(this, &worktree, cx); + let state = this.as_local()?.language_servers.get(server_id)?; + let delegate = + ProjectLspAdapterDelegate::for_local(this, &worktree, cx); match state { LanguageServerState::Starting(_) => None, LanguageServerState::Running { @@ -2204,19 +2351,15 @@ impl LspStore { fn language_servers_for_worktree( &self, worktree_id: WorktreeId, - ) -> impl Iterator, &Arc, &Arc)> { + ) -> impl Iterator> { self.language_server_ids .iter() .filter_map(move |((language_server_worktree_id, _), id)| { if *language_server_worktree_id == worktree_id { - if let Some(LanguageServerState::Running { - adapter, - language, - server, - .. - }) = self.language_servers.get(id) + if let Some(LanguageServerState::Running { server, .. }) = + self.as_local()?.language_servers.get(id) { - return Some((adapter, language, server)); + return Some(server); } } None @@ -2241,11 +2384,17 @@ impl LspStore { self.language_server_ids .remove(&(id_to_remove, server_name)); self.language_server_statuses.remove(&server_id_to_remove); - self.language_server_watched_paths - .remove(&server_id_to_remove); - self.last_workspace_edits_by_language_server - .remove(&server_id_to_remove); - self.language_servers.remove(&server_id_to_remove); + if let Some(local_lsp_store) = self.as_local_mut() { + local_lsp_store + .language_server_watched_paths + .remove(&server_id_to_remove); + local_lsp_store + .last_workspace_edits_by_language_server + .remove(&server_id_to_remove); + local_lsp_store + .language_servers + .remove(&server_id_to_remove); + } cx.emit(LspStoreEvent::LanguageServerRemoved(server_id_to_remove)); } } @@ -2306,11 +2455,14 @@ impl LspStore { .insert((worktree_id, language_server_name), language_server_id); } + #[track_caller] pub(crate) fn register_buffer_with_language_servers( &mut self, buffer_handle: &Model, cx: &mut ModelContext, ) { + let available_language = self.detect_language_for_buffer(buffer_handle, cx); + let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); @@ -2324,7 +2476,6 @@ impl LspStore { return; }; let initial_snapshot = buffer.text_snapshot(); - let language = buffer.language().cloned(); let worktree_id = file.worktree_id(cx); if let Some(diagnostics) = self.diagnostics.get(&worktree_id) { @@ -2336,12 +2487,12 @@ impl LspStore { } } - if let Some(language) = language { - for adapter in self.languages.lsp_adapters(&language) { + if let Some(language) = available_language { + for adapter in self.languages.lsp_adapters(&language.name()) { let server = self .language_server_ids .get(&(worktree_id, adapter.name.clone())) - .and_then(|id| self.language_servers.get(id)) + .and_then(|id| self.as_local()?.language_servers.get(id)) .and_then(|server_state| { if let LanguageServerState::Running { server, .. } = server_state { Some(server.clone()) @@ -2359,7 +2510,7 @@ impl LspStore { lsp::DidOpenTextDocumentParams { text_document: lsp::TextDocumentItem::new( uri.clone(), - adapter.language_id(&language), + adapter.language_id(&language.name()), 0, initial_snapshot.text(), ), @@ -2409,7 +2560,7 @@ impl LspStore { let ids = &self.language_server_ids; if let Some(language) = buffer.language().cloned() { - for adapter in self.languages.lsp_adapters(&language) { + for adapter in self.languages.lsp_adapters(&language.name()) { if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) { buffer.update_diagnostics(*server_id, Default::default(), cx); } @@ -2537,7 +2688,7 @@ impl LspStore { symbol: &Symbol, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = client.request(proto::OpenBufferForSymbol { project_id: self.project_id, symbol: Some(Self::serialize_symbol(symbol)), @@ -2605,7 +2756,7 @@ impl LspStore { let p = abs_path.clone(); let yarn_worktree = this .update(&mut cx, move |this, cx| { - this.yarn.update(cx, |_, cx| { + this.as_local().unwrap().yarn.update(cx, |_, cx| { cx.spawn(|this, mut cx| async move { let t = this .update(&mut cx, |this, cx| { @@ -2755,7 +2906,7 @@ impl LspStore { ::Result: Send, ::Params: Send, { - debug_assert!(self.upstream_client.is_none()); + debug_assert!(self.upstream_client().is_none()); let snapshot = buffer.read(cx).snapshot(); let scope = position.and_then(|position| snapshot.language_scope_at(position)); @@ -2801,7 +2952,7 @@ impl LspStore { ::Params: Send, ::Result: Send, { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = T::buffer_id_from_proto(&envelope.payload)?; let buffer_handle = this.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) @@ -2839,7 +2990,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); let buffer = this.update(&mut cx, |this, cx| { @@ -2979,7 +3130,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let action = Self::deserialize_code_action( envelope .payload @@ -3184,7 +3335,9 @@ impl LspStore { simulate_disk_based_diagnostics_completion, adapter, .. - }) = self.language_servers.get_mut(&language_server_id) + }) = self + .as_local_mut() + .and_then(|local_store| local_store.language_servers.get_mut(&language_server_id)) else { return; }; @@ -3205,8 +3358,9 @@ impl LspStore { if let Some(LanguageServerState::Running { simulate_disk_based_diagnostics_completion, .. - }) = this.language_servers.get_mut(&language_server_id) - { + }) = this.as_local_mut().and_then(|local_store| { + local_store.language_servers.get_mut(&language_server_id) + }) { *simulate_disk_based_diagnostics_completion = None; } }) @@ -3264,7 +3418,20 @@ impl LspStore { language_server_id: LanguageServerId, cx: &mut ModelContext, ) { - let Some(watchers) = self + let worktrees = self + .worktree_store + .read(cx) + .worktrees() + .filter_map(|worktree| { + self.language_servers_for_worktree(worktree.read(cx).id()) + .find(|server| server.server_id() == language_server_id) + .map(|_| worktree) + }) + .collect::>(); + + let local_lsp_store = self.as_local_mut().unwrap(); + + let Some(watchers) = local_lsp_store .language_server_watcher_registrations .get(&language_server_id) else { @@ -3278,17 +3445,6 @@ impl LspStore { language_server_id ); - let worktrees = self - .worktree_store - .read(cx) - .worktrees() - .filter_map(|worktree| { - self.language_servers_for_worktree(worktree.read(cx).id()) - .find(|(_, _, server)| server.server_id() == language_server_id) - .map(|_| worktree) - }) - .collect::>(); - enum PathToWatch { Worktree { literal_prefix: Arc, @@ -3438,18 +3594,27 @@ impl LspStore { watch_builder.watch_abs_path(abs_path, globset); } } - let watcher = watch_builder.build(self.fs.clone(), language_server_id, cx); - self.language_server_watched_paths + let watcher = watch_builder.build(local_lsp_store.fs.clone(), language_server_id, cx); + local_lsp_store + .language_server_watched_paths .insert(language_server_id, watcher); cx.notify(); } pub fn language_server_for_id(&self, id: LanguageServerId) -> Option> { - if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) { - Some(server.clone()) - } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) { - Some(Arc::clone(server)) + if let Some(local_lsp_store) = self.as_local() { + if let Some(LanguageServerState::Running { server, .. }) = + local_lsp_store.language_servers.get(&id) + { + Some(server.clone()) + } else if let Some((_, server)) = + local_lsp_store.supplementary_language_servers.get(&id) + { + Some(Arc::clone(server)) + } else { + None + } } else { None } @@ -3480,7 +3645,9 @@ impl LspStore { .log_err(); this.update(&mut cx, |this, _| { if let Some(transaction) = transaction { - this.last_workspace_edits_by_language_server + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server .insert(server_id, transaction); } })?; @@ -3665,14 +3832,16 @@ impl LspStore { params: DidChangeWatchedFilesRegistrationOptions, cx: &mut ModelContext, ) { - let registrations = self - .language_server_watcher_registrations - .entry(language_server_id) - .or_default(); + if let Some(local) = self.as_local_mut() { + let registrations = local + .language_server_watcher_registrations + .entry(language_server_id) + .or_default(); - registrations.insert(registration_id.to_string(), params.watchers); + registrations.insert(registration_id.to_string(), params.watchers); - self.rebuild_watched_paths(language_server_id, cx); + self.rebuild_watched_paths(language_server_id, cx); + } } fn on_lsp_unregister_did_change_watched_files( @@ -3681,26 +3850,28 @@ impl LspStore { registration_id: &str, cx: &mut ModelContext, ) { - let registrations = self - .language_server_watcher_registrations - .entry(language_server_id) - .or_default(); + if let Some(local) = self.as_local_mut() { + let registrations = local + .language_server_watcher_registrations + .entry(language_server_id) + .or_default(); - if registrations.remove(registration_id).is_some() { - log::info!( + if registrations.remove(registration_id).is_some() { + log::info!( "language server {}: unregistered workspace/DidChangeWatchedFiles capability with id {}", language_server_id, registration_id ); - } else { - log::warn!( + } else { + log::warn!( "language server {}: failed to unregister workspace/DidChangeWatchedFiles capability with id {}. not registered.", language_server_id, registration_id ); - } + } - self.rebuild_watched_paths(language_server_id, cx); + self.rebuild_watched_paths(language_server_id, cx); + } } #[allow(clippy::type_complexity)] @@ -3915,7 +4086,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let buffer = this.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) @@ -3991,7 +4162,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let peer_id = envelope.original_sender_id()?; + let peer_id = envelope.original_sender_id().unwrap_or_default(); let symbol = envelope .payload .symbol @@ -4093,6 +4264,76 @@ impl LspStore { Ok(proto::Ack {}) } + pub async fn handle_create_language_server( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let name = LanguageServerName::from_proto(envelope.payload.name); + + let binary = envelope + .payload + .binary + .ok_or_else(|| anyhow!("missing binary"))?; + let binary = LanguageServerBinary { + path: PathBuf::from(binary.path), + env: None, + arguments: binary.arguments.into_iter().map(Into::into).collect(), + }; + let language = envelope + .payload + .language + .ok_or_else(|| anyhow!("missing language"))?; + let language_name = LanguageName::from_proto(language.name); + let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; + this.update(&mut cx, |this, cx| { + this.languages + .register_language(language_name.clone(), None, matcher.clone(), { + let language_name = language_name.clone(); + move || { + Ok(( + LanguageConfig { + name: language_name.clone(), + matcher: matcher.clone(), + ..Default::default() + }, + Default::default(), + Default::default(), + )) + } + }); + cx.background_executor() + .spawn(this.languages.language_for_name(language_name.0.as_ref())) + .detach(); + + let adapter = Arc::new(SshLspAdapter::new( + name, + binary, + envelope.payload.initialization_options, + envelope.payload.code_action_kinds, + )); + + this.languages + .register_lsp_adapter(language_name.clone(), adapter.clone()); + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return Err(anyhow!("worktree not found")); + }; + this.start_language_server( + &worktree, + CachedLspAdapter::new(adapter), + language_name, + cx, + ); + Ok(()) + })??; + Ok(proto::Ack {}) + } + async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -4139,16 +4380,24 @@ impl LspStore { }) } + fn language_settings<'a>( + &'a self, + worktree: &'a Model, + language: &LanguageName, + cx: &'a mut ModelContext, + ) -> &'a LanguageSettings { + let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx)); + all_language_settings(root_file.map(|f| f as _).as_ref(), cx).language(Some(language)) + } + pub fn start_language_servers( &mut self, worktree: &Model, - language: Arc, + language: LanguageName, cx: &mut ModelContext, ) { - let (root_file, is_local) = - worktree.update(cx, |tree, cx| (tree.root_file(cx), tree.is_local())); - let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx); - if !settings.enable_language_server || !is_local { + let settings = self.language_settings(worktree, &language, cx); + if !settings.enable_language_server || self.mode.is_remote() { return; } @@ -4176,7 +4425,7 @@ impl LspStore { .load_available_lsp_adapter(&desired_language_server) { self.languages - .register_lsp_adapter(language.name(), adapter.adapter.clone()); + .register_lsp_adapter(language.clone(), adapter.adapter.clone()); enabled_lsp_adapters.push(adapter); continue; } @@ -4189,7 +4438,6 @@ impl LspStore { log::info!( "starting language servers for {language}: {adapters}", - language = language.name(), adapters = enabled_lsp_adapters .iter() .map(|adapter| adapter.name.0.as_ref()) @@ -4210,14 +4458,108 @@ impl LspStore { .reorder_language_servers(&language, enabled_lsp_adapters); } + /* + ssh client owns the lifecycle of the language servers + ssh host actually runs the binaries + + in the future: ssh client will use the local extensions to get the downloads etc. + and send them up over the ssh connection (but today) we'll just the static config + + languages::() <-- registers lsp adapters + on the ssh host we won't have adapters for the LSPs + */ + + fn start_language_server_on_ssh_host( + &mut self, + worktree: &Model, + adapter: Arc, + language: LanguageName, + cx: &mut ModelContext, + ) { + let ssh = self.as_ssh().unwrap(); + + let configured_binary = ProjectSettings::get( + Some(worktree.update(cx, |worktree, cx| worktree.settings_location(cx))), + cx, + ) + .lsp + .get(&adapter.name()) + .and_then(|c| c.binary.as_ref()) + .and_then(|config| { + if let Some(path) = &config.path { + Some((path.clone(), config.arguments.clone().unwrap_or_default())) + } else { + None + } + }); + let delegate = + ProjectLspAdapterDelegate::for_ssh(self, worktree, cx) as Arc; + let project_id = self.project_id; + let worktree_id = worktree.read(cx).id().to_proto(); + let upstream_client = ssh.upstream_client.clone(); + let name = adapter.name().to_string(); + let Some((path, arguments)) = configured_binary else { + cx.emit(LspStoreEvent::Notification(format!( + "ssh-remoting currently requires manually configuring {} in your settings", + adapter.name() + ))); + return; + }; + let Some(available_language) = self.languages.available_language_for_name(&language) else { + log::error!("failed to find available language {language}"); + return; + }; + let task = cx.spawn(|_, _| async move { + let delegate = delegate; + let name = adapter.name().to_string(); + let code_action_kinds = adapter + .adapter + .code_action_kinds() + .map(|kinds| serde_json::to_string(&kinds)) + .transpose()?; + let get_options = adapter.adapter.clone().initialization_options(&delegate); + let initialization_options = get_options + .await? + .map(|options| serde_json::to_string(&options)) + .transpose()?; + + upstream_client + .request(proto::CreateLanguageServer { + project_id, + worktree_id, + name, + binary: Some(proto::LanguageServerCommand { path, arguments }), + initialization_options, + code_action_kinds, + language: Some(proto::AvailableLanguage { + name: language.to_proto(), + matcher: serde_json::to_string(&available_language.matcher())?, + }), + }) + .await + }); + cx.spawn(|this, mut cx| async move { + if let Err(e) = task.await { + this.update(&mut cx, |_this, cx| { + cx.emit(LspStoreEvent::Notification(format!( + "failed to start {}: {}", + name, e + ))) + }) + .ok(); + } + }) + .detach(); + } + fn start_language_server( &mut self, worktree_handle: &Model, adapter: Arc, - language: Arc, + language: LanguageName, cx: &mut ModelContext, ) { - if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT { + if self.mode.is_remote() { return; } @@ -4229,12 +4571,24 @@ impl LspStore { return; } + if self.mode.is_ssh() { + self.start_language_server_on_ssh_host(worktree_handle, adapter, language, cx); + return; + } + + if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT { + return; + } + let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let lsp_adapter_delegate = ProjectLspAdapterDelegate::new(self, worktree_handle, cx); + let lsp_adapter_delegate = ProjectLspAdapterDelegate::for_local(self, worktree_handle, cx); let cli_environment = self + .as_local() + .unwrap() .environment - .as_ref() - .and_then(|environment| environment.read(cx).get_cli_environment()); + .read(cx) + .get_cli_environment(); + let pending_server = match self.languages.create_pending_language_server( stderr_capture.clone(), language.clone(), @@ -4255,6 +4609,8 @@ impl LspStore { }), cx, ); + + // We need some on the SSH client, and some on SSH host let lsp = project_settings.lsp.get(&adapter.name.0); let override_options = lsp.and_then(|s| s.initialization_options.clone()); @@ -4329,7 +4685,10 @@ impl LspStore { }) }); - self.language_servers.insert(server_id, state); + self.as_local_mut() + .unwrap() + .language_servers + .insert(server_id, state); self.language_server_ids.insert(key, server_id); } @@ -4340,7 +4699,7 @@ impl LspStore { override_initialization_options: Option, pending_server: PendingLanguageServer, adapter: Arc, - language: Arc, + language: LanguageName, server_id: LanguageServerId, key: (WorktreeId, LanguageServerName), cx: &mut AsyncAppContext, @@ -4377,51 +4736,63 @@ impl LspStore { fn reinstall_language_server( &mut self, - language: Arc, + language: LanguageName, adapter: Arc, server_id: LanguageServerId, cx: &mut ModelContext, ) -> Option> { log::info!("beginning to reinstall server"); - let existing_server = match self.language_servers.remove(&server_id) { - Some(LanguageServerState::Running { server, .. }) => Some(server), - _ => None, - }; - - self.worktree_store.update(cx, |store, cx| { - for worktree in store.worktrees() { - let key = (worktree.read(cx).id(), adapter.name.clone()); - self.language_server_ids.remove(&key); - } - }); - - Some(cx.spawn(move |this, mut cx| async move { - if let Some(task) = existing_server.and_then(|server| server.shutdown()) { - log::info!("shutting down existing server"); - task.await; - } - - // TODO: This is race-safe with regards to preventing new instances from - // starting while deleting, but existing instances in other projects are going - // to be very confused and messed up - let Some(task) = this - .update(&mut cx, |this, cx| { - this.languages.delete_server_container(adapter.clone(), cx) - }) - .log_err() - else { - return; + if let Some(local) = self.as_local_mut() { + let existing_server = match local.language_servers.remove(&server_id) { + Some(LanguageServerState::Running { server, .. }) => Some(server), + _ => None, }; - task.await; - this.update(&mut cx, |this, cx| { - for worktree in this.worktree_store.read(cx).worktrees().collect::>() { - this.start_language_server(&worktree, adapter.clone(), language.clone(), cx); + self.worktree_store.update(cx, |store, cx| { + for worktree in store.worktrees() { + let key = (worktree.read(cx).id(), adapter.name.clone()); + self.language_server_ids.remove(&key); } - }) - .ok(); - })) + }); + + Some(cx.spawn(move |this, mut cx| async move { + if let Some(task) = existing_server.and_then(|server| server.shutdown()) { + log::info!("shutting down existing server"); + task.await; + } + + // TODO: This is race-safe with regards to preventing new instances from + // starting while deleting, but existing instances in other projects are going + // to be very confused and messed up + let Some(task) = this + .update(&mut cx, |this, cx| { + this.languages.delete_server_container(adapter.clone(), cx) + }) + .log_err() + else { + return; + }; + task.await; + + this.update(&mut cx, |this, cx| { + for worktree in this.worktree_store.read(cx).worktrees().collect::>() { + this.start_language_server( + &worktree, + adapter.clone(), + language.clone(), + cx, + ); + } + }) + .ok(); + })) + } else if let Some(_ssh_store) = self.as_ssh() { + // TODO + None + } else { + None + } } async fn shutdown_language_server( @@ -4469,76 +4840,90 @@ impl LspStore { cx: &mut ModelContext, ) -> Task> { let key = (worktree_id, adapter_name); - if let Some(server_id) = self.language_server_ids.remove(&key) { - let name = key.1 .0; - log::info!("stopping language server {name}"); + if self.mode.is_local() { + if let Some(server_id) = self.language_server_ids.remove(&key) { + let name = key.1 .0; + log::info!("stopping language server {name}"); - // Remove other entries for this language server as well - let mut orphaned_worktrees = vec![worktree_id]; - let other_keys = self.language_server_ids.keys().cloned().collect::>(); - for other_key in other_keys { - if self.language_server_ids.get(&other_key) == Some(&server_id) { - self.language_server_ids.remove(&other_key); - orphaned_worktrees.push(other_key.0); + // Remove other entries for this language server as well + let mut orphaned_worktrees = vec![worktree_id]; + let other_keys = self.language_server_ids.keys().cloned().collect::>(); + for other_key in other_keys { + if self.language_server_ids.get(&other_key) == Some(&server_id) { + self.language_server_ids.remove(&other_key); + orphaned_worktrees.push(other_key.0); + } } - } - self.buffer_store.update(cx, |buffer_store, cx| { - for buffer in buffer_store.buffers() { - buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(server_id, Default::default(), cx); + self.buffer_store.update(cx, |buffer_store, cx| { + for buffer in buffer_store.buffers() { + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics(server_id, Default::default(), cx); + }); + } + }); + + let project_id = self.project_id; + for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { + summaries.retain(|path, summaries_by_server_id| { + if summaries_by_server_id.remove(&server_id).is_some() { + if let Some(downstream_client) = self.downstream_client.clone() { + downstream_client + .send(proto::UpdateDiagnosticSummary { + project_id, + worktree_id: worktree_id.to_proto(), + summary: Some(proto::DiagnosticSummary { + path: path.to_string_lossy().to_string(), + language_server_id: server_id.0 as u64, + error_count: 0, + warning_count: 0, + }), + }) + .log_err(); + } + !summaries_by_server_id.is_empty() + } else { + true + } }); } - }); - let project_id = self.project_id; - for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { - summaries.retain(|path, summaries_by_server_id| { - if summaries_by_server_id.remove(&server_id).is_some() { - if let Some(downstream_client) = self.downstream_client.clone() { - downstream_client - .send(proto::UpdateDiagnosticSummary { - project_id, - worktree_id: worktree_id.to_proto(), - summary: Some(proto::DiagnosticSummary { - path: path.to_string_lossy().to_string(), - language_server_id: server_id.0 as u64, - error_count: 0, - warning_count: 0, - }), - }) - .log_err(); + for diagnostics in self.diagnostics.values_mut() { + diagnostics.retain(|_, diagnostics_by_server_id| { + if let Ok(ix) = + diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) + { + diagnostics_by_server_id.remove(ix); + !diagnostics_by_server_id.is_empty() + } else { + true } - !summaries_by_server_id.is_empty() - } else { - true - } - }); + }); + } + + self.as_local_mut() + .unwrap() + .language_server_watched_paths + .remove(&server_id); + self.language_server_statuses.remove(&server_id); + cx.notify(); + + let server_state = self + .as_local_mut() + .unwrap() + .language_servers + .remove(&server_id); + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); + cx.spawn(move |_, cx| async move { + Self::shutdown_language_server(server_state, name, cx).await; + orphaned_worktrees + }) + } else { + Task::ready(Vec::new()) } - - for diagnostics in self.diagnostics.values_mut() { - diagnostics.retain(|_, diagnostics_by_server_id| { - if let Ok(ix) = - diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) - { - diagnostics_by_server_id.remove(ix); - !diagnostics_by_server_id.is_empty() - } else { - true - } - }); - } - - self.language_server_watched_paths.remove(&server_id); - self.language_server_statuses.remove(&server_id); - cx.notify(); - - let server_state = self.language_servers.remove(&server_id); - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); - cx.spawn(move |_, cx| async move { - Self::shutdown_language_server(server_state, name, cx).await; - orphaned_worktrees - }) + } else if self.mode.is_ssh() { + // TODO ssh + Task::ready(Vec::new()) } else { Task::ready(Vec::new()) } @@ -4549,7 +4934,7 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = client.request(proto::RestartLanguageServers { project_id: self.project_id, buffer_ids: buffers @@ -4562,18 +4947,17 @@ impl LspStore { .detach_and_log_err(cx); } else { #[allow(clippy::mutable_key_type)] - let language_server_lookup_info: HashSet<(Model, Arc)> = buffers + let language_server_lookup_info: HashSet<(Model, LanguageName)> = buffers .into_iter() .filter_map(|buffer| { let buffer = buffer.read(cx); let file = buffer.file()?; let worktree = File::from_dyn(Some(file))?.worktree.clone(); - let language = self - .languages - .language_for_file(file, Some(buffer.as_rope()), cx) - .now_or_never()? - .ok()?; - Some((worktree, language)) + let language = + self.languages + .language_for_file(file, Some(buffer.as_rope()), cx)?; + + Some((worktree, language.name())) }) .collect(); @@ -4586,7 +4970,7 @@ impl LspStore { pub fn restart_language_servers( &mut self, worktree: Model, - language: Arc, + language: LanguageName, cx: &mut ModelContext, ) { let worktree_id = worktree.read(cx).id(); @@ -4637,7 +5021,7 @@ impl LspStore { } fn check_errored_server( - language: Arc, + language: LanguageName, adapter: Arc, server_id: LanguageServerId, installation_test_binary: Option, @@ -4719,6 +5103,7 @@ impl LspStore { .clone() .workspace_configuration(&delegate, cx) .await?; + // This has to come from the server let (language_server, mut initialization_options) = pending_server.task.await?; let name = language_server.name(); @@ -4730,6 +5115,7 @@ impl LspStore { let adapter = adapter.clone(); if let Some(this) = this.upgrade() { adapter.process_diagnostics(&mut params); + // Everything else has to be on the server, Can we make it on the client? this.update(&mut cx, |this, cx| { this.update_diagnostics( server_id, @@ -5341,7 +5727,7 @@ impl LspStore { fn insert_newly_running_language_server( &mut self, - language: Arc, + language: LanguageName, adapter: Arc, language_server: Arc, server_id: LanguageServerId, @@ -5361,15 +5747,17 @@ impl LspStore { // Update language_servers collection with Running variant of LanguageServerState // indicating that the server is up and running and ready - self.language_servers.insert( - server_id, - LanguageServerState::Running { - adapter: adapter.clone(), - language: language.clone(), - server: language_server.clone(), - simulate_disk_based_diagnostics_completion: None, - }, - ); + if let Some(local) = self.as_local_mut() { + local.language_servers.insert( + server_id, + LanguageServerState::Running { + adapter: adapter.clone(), + language: language.clone(), + server: language_server.clone(), + simulate_disk_based_diagnostics_completion: None, + }, + ); + } self.language_server_statuses.insert( server_id, @@ -5409,7 +5797,7 @@ impl LspStore { if file.worktree.read(cx).id() != key.0 || !self .languages - .lsp_adapters(language) + .lsp_adapters(&language.name()) .iter() .any(|a| a.name == key.1) { @@ -5441,7 +5829,7 @@ impl LspStore { lsp::DidOpenTextDocumentParams { text_document: lsp::TextDocumentItem::new( uri, - adapter.language_id(language), + adapter.language_id(&language.name()), version, initial_snapshot.text(), ), @@ -5521,12 +5909,14 @@ impl LspStore { ) -> impl Iterator, &'a Arc)> { self.language_server_ids_for_buffer(buffer, cx) .into_iter() - .filter_map(|server_id| match self.language_servers.get(&server_id)? { - LanguageServerState::Running { - adapter, server, .. - } => Some((adapter, server)), - _ => None, - }) + .filter_map( + |server_id| match self.as_local()?.language_servers.get(&server_id)? { + LanguageServerState::Running { + adapter, server, .. + } => Some((adapter, server)), + _ => None, + }, + ) } pub(crate) fn cancel_language_server_work_for_buffers( @@ -5564,9 +5954,12 @@ impl LspStore { server: Arc, cx: &mut ModelContext, ) { - self.supplementary_language_servers - .insert(id, (name, server)); - cx.emit(LspStoreEvent::LanguageServerAdded(id)); + if let Some(local) = self.as_local_mut() { + local + .supplementary_language_servers + .insert(id, (name, server)); + cx.emit(LspStoreEvent::LanguageServerAdded(id)); + } } pub fn unregister_supplementary_language_server( @@ -5574,27 +5967,33 @@ impl LspStore { id: LanguageServerId, cx: &mut ModelContext, ) { - self.supplementary_language_servers.remove(&id); - cx.emit(LspStoreEvent::LanguageServerRemoved(id)); + if let Some(local) = self.as_local_mut() { + local.supplementary_language_servers.remove(&id); + cx.emit(LspStoreEvent::LanguageServerRemoved(id)); + } } pub fn supplementary_language_servers( &self, - ) -> impl '_ + Iterator { - self.supplementary_language_servers - .iter() - .map(|(id, (name, _))| (id, name)) + ) -> impl '_ + Iterator { + self.as_local().into_iter().flat_map(|local| { + local + .supplementary_language_servers + .iter() + .map(|(id, (name, _))| (*id, name.clone())) + }) } pub fn language_server_adapter_for_id( &self, id: LanguageServerId, ) -> Option> { - if let Some(LanguageServerState::Running { adapter, .. }) = self.language_servers.get(&id) { - Some(adapter.clone()) - } else { - None - } + self.as_local() + .and_then(|local| local.language_servers.get(&id)) + .and_then(|language_server_state| match language_server_state { + LanguageServerState::Running { adapter, .. } => Some(adapter.clone()), + _ => None, + }) } pub(super) fn update_local_worktree_language_servers( @@ -5607,6 +6006,8 @@ impl LspStore { return; } + let Some(local) = self.as_local() else { return }; + let worktree_id = worktree_handle.read(cx).id(); let mut language_server_ids = self .language_server_ids @@ -5621,9 +6022,9 @@ impl LspStore { let abs_path = worktree_handle.read(cx).abs_path(); for server_id in &language_server_ids { if let Some(LanguageServerState::Running { server, .. }) = - self.language_servers.get(server_id) + local.language_servers.get(server_id) { - if let Some(watched_paths) = self + if let Some(watched_paths) = local .language_server_watched_paths .get(server_id) .and_then(|paths| paths.read(cx).worktree_paths.get(&worktree_id)) @@ -5665,8 +6066,11 @@ impl LspStore { token_to_cancel: Option, _cx: &mut ModelContext, ) { + let Some(local) = self.as_local() else { + return; + }; let status = self.language_server_statuses.get(&server_id); - let server = self.language_servers.get(&server_id); + let server = local.language_servers.get(&server_id); if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) { for (token, progress) in &status.pending_work { if let Some(token_to_cancel) = token_to_cancel.as_ref() { @@ -5715,7 +6119,7 @@ impl LspStore { if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { let worktree_id = file.worktree_id(cx); self.languages - .lsp_adapters(language) + .lsp_adapters(&language.name()) .iter() .flat_map(|adapter| { let key = (worktree_id, adapter.name.clone()); @@ -5777,7 +6181,8 @@ impl LspStore { language_server: Arc, cx: &mut AsyncAppContext, ) -> Result { - let fs = this.update(cx, |this, _| this.fs.clone())?; + let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone())?; + let mut operations = Vec::new(); if let Some(document_changes) = edit.document_changes { match document_changes { @@ -6207,7 +6612,10 @@ impl LanguageServerWatchedPathsBuilder { while let Some(update) = push_updates.0.next().await { let action = lsp_store .update(&mut cx, |this, cx| { - let Some(watcher) = this + let Some(local) = this.as_local() else { + return ControlFlow::Break(()); + }; + let Some(watcher) = local .language_server_watched_paths .get(&language_server_id) else { @@ -6297,13 +6705,27 @@ pub enum LanguageServerState { Starting(Task>>), Running { - language: Arc, + language: LanguageName, adapter: Arc, server: Arc, simulate_disk_based_diagnostics_completion: Option>, }, } +impl std::fmt::Debug for LanguageServerState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LanguageServerState::Starting(_) => { + f.debug_struct("LanguageServerState::Starting").finish() + } + LanguageServerState::Running { language, .. } => f + .debug_struct("LanguageServerState::Running") + .field("language", &language) + .finish(), + } + } +} + #[derive(Clone, Debug, Serialize)] pub struct LanguageServerProgress { pub is_disk_based_diagnostics_progress: bool, @@ -6378,24 +6800,136 @@ fn glob_literal_prefix(glob: &str) -> &str { &glob[..literal_end] } +pub struct SshLspAdapter { + name: LanguageServerName, + binary: LanguageServerBinary, + initialization_options: Option, + code_action_kinds: Option>, +} + +impl SshLspAdapter { + pub fn new( + name: LanguageServerName, + binary: LanguageServerBinary, + initialization_options: Option, + code_action_kinds: Option, + ) -> Self { + Self { + name, + binary, + initialization_options, + code_action_kinds: code_action_kinds + .as_ref() + .and_then(|c| serde_json::from_str(c).ok()), + } + } +} + +#[async_trait(?Send)] +impl LspAdapter for SshLspAdapter { + fn name(&self) -> LanguageServerName { + self.name.clone() + } + + async fn initialization_options( + self: Arc, + _: &Arc, + ) -> Result> { + let Some(options) = &self.initialization_options else { + return Ok(None); + }; + let result = serde_json::from_str(options)?; + Ok(result) + } + + fn code_action_kinds(&self) -> Option> { + self.code_action_kinds.clone() + } + + async fn check_if_user_installed( + &self, + _: &dyn LspAdapterDelegate, + _: &AsyncAppContext, + ) -> Option { + Some(self.binary.clone()) + } + + async fn cached_server_binary( + &self, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + None + } + + async fn fetch_latest_server_version( + &self, + _: &dyn LspAdapterDelegate, + ) -> Result> { + anyhow::bail!("SshLspAdapter does not support fetch_latest_server_version") + } + + async fn fetch_server_binary( + &self, + _: Box, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Result { + anyhow::bail!("SshLspAdapter does not support fetch_server_binary") + } + + async fn installation_test_binary(&self, _: PathBuf) -> Option { + None + } +} + pub struct ProjectLspAdapterDelegate { lsp_store: WeakModel, worktree: worktree::Snapshot, - fs: Arc, + fs: Option>, http_client: Arc, language_registry: Arc, load_shell_env_task: Shared>>>, } impl ProjectLspAdapterDelegate { - pub fn new( + fn for_local( lsp_store: &LspStore, worktree: &Model, cx: &mut ModelContext, + ) -> Arc { + let local = lsp_store + .as_local() + .expect("ProjectLspAdapterDelegate cannot be constructed on a remote"); + + let http_client = local + .http_client + .clone() + .unwrap_or_else(|| Arc::new(BlockedHttpClient)); + + Self::new(lsp_store, worktree, http_client, Some(local.fs.clone()), cx) + } + + fn for_ssh( + lsp_store: &LspStore, + worktree: &Model, + cx: &mut ModelContext, + ) -> Arc { + Self::new(lsp_store, worktree, Arc::new(BlockedHttpClient), None, cx) + } + + pub fn new( + lsp_store: &LspStore, + worktree: &Model, + http_client: Arc, + fs: Option>, + cx: &mut ModelContext, ) -> Arc { let worktree_id = worktree.read(cx).id(); let worktree_abs_path = worktree.read(cx).abs_path(); - let load_shell_env_task = if let Some(environment) = &lsp_store.environment { + let load_shell_env_task = if let Some(environment) = + &lsp_store.as_local().map(|local| local.environment.clone()) + { environment.update(cx, |env, cx| { env.get_environment(Some(worktree_id), Some(worktree_abs_path), cx) }) @@ -6403,14 +6937,10 @@ impl ProjectLspAdapterDelegate { Task::ready(None).shared() }; - let Some(http_client) = lsp_store.http_client.clone() else { - panic!("ProjectLspAdapterDelegate cannot be constructedd on an ssh-remote yet") - }; - Arc::new(Self { lsp_store: cx.weak_model(), worktree: worktree.read(cx).snapshot(), - fs: lsp_store.fs.clone(), + fs, http_client, language_registry: lsp_store.languages.clone(), load_shell_env_task, @@ -6418,6 +6948,26 @@ impl ProjectLspAdapterDelegate { } } +struct BlockedHttpClient; + +impl HttpClient for BlockedHttpClient { + fn send( + &self, + _req: Request, + ) -> BoxFuture<'static, Result, Error>> { + Box::pin(async { + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + "ssh host blocked http connection", + ) + .into()) + }) + } + + fn proxy(&self) -> Option<&Uri> { + None + } +} #[async_trait] impl LspAdapterDelegate for ProjectLspAdapterDelegate { fn show_notification(&self, message: &str, cx: &mut AppContext) { @@ -6447,6 +6997,7 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { + self.fs.as_ref()?; let worktree_abs_path = self.worktree.abs_path(); let shell_path = self.shell_env().await.get("PATH").cloned(); which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() @@ -6454,6 +7005,8 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { #[cfg(target_os = "windows")] async fn which(&self, command: &OsStr) -> Option { + self.fs.as_ref()?; + // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup @@ -6472,17 +7025,20 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { async fn read_text_file(&self, path: PathBuf) -> Result { if self.worktree.entry_for_path(&path).is_none() { return Err(anyhow!("no such path {path:?}")); + }; + if let Some(fs) = &self.fs { + let content = fs.load(&path).await?; + Ok(content) + } else { + return Err(anyhow!("cannot open {path:?} on ssh host (yet!)")); } - let path = self.worktree.absolutize(path.as_ref())?; - let content = self.fs.load(&path).await?; - Ok(content) } } async fn populate_labels_for_symbols( symbols: Vec, language_registry: &Arc, - default_language: Option>, + default_language: Option, lsp_adapter: Option>, output: &mut Vec, ) { @@ -6497,7 +7053,12 @@ async fn populate_labels_for_symbols( .ok() .or_else(|| { unknown_path.get_or_insert(symbol.path.path.clone()); - default_language.clone() + default_language.as_ref().and_then(|name| { + language_registry + .language_for_name(&name.0) + .now_or_never()? + .ok() + }) }); symbols_by_language .entry(language) @@ -6523,9 +7084,12 @@ async fn populate_labels_for_symbols( let mut labels = Vec::new(); if let Some(language) = language { - let lsp_adapter = lsp_adapter - .clone() - .or_else(|| language_registry.lsp_adapters(&language).first().cloned()); + let lsp_adapter = lsp_adapter.clone().or_else(|| { + language_registry + .lsp_adapters(&language.name()) + .first() + .cloned() + }); if let Some(lsp_adapter) = lsp_adapter { labels = lsp_adapter .labels_for_symbols(&label_params, &language) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ed489af687..f67423b073 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -107,7 +107,7 @@ pub use buffer_store::ProjectTransaction; pub use lsp_store::{ DiagnosticSummary, LanguageServerLogType, LanguageServerProgress, LanguageServerPromptRequest, LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, - ProjectLspAdapterDelegate, SERVER_PROGRESS_THROTTLE_TIMEOUT, + SERVER_PROGRESS_THROTTLE_TIMEOUT, }; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; @@ -643,16 +643,13 @@ impl Project { let environment = ProjectEnvironment::new(&worktree_store, env, cx); let lsp_store = cx.new_model(|cx| { - LspStore::new( + LspStore::new_local( buffer_store.clone(), worktree_store.clone(), - Some(environment.clone()), + environment.clone(), languages.clone(), Some(client.http_client()), fs.clone(), - None, - None, - None, cx, ) }); @@ -712,17 +709,90 @@ impl Project { fs: Arc, cx: &mut AppContext, ) -> Model { - let this = Self::local(client, node, user_store, languages, fs, None, cx); - this.update(cx, |this, cx| { - let client: AnyProtoClient = ssh.clone().into(); + cx.new_model(|cx: &mut ModelContext| { + let (tx, rx) = mpsc::unbounded(); + cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) + .detach(); + let tasks = Inventory::new(cx); + let global_snippets_dir = paths::config_dir().join("snippets"); + let snippets = + SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - this.worktree_store.update(cx, |store, _cx| { - store.set_upstream_client(client.clone()); + let worktree_store = cx.new_model(|_| { + let mut worktree_store = WorktreeStore::new(false, fs.clone()); + worktree_store.set_upstream_client(ssh.clone().into()); + worktree_store }); - this.settings_observer = cx.new_model(|cx| { - SettingsObserver::new_ssh(ssh.clone().into(), this.worktree_store.clone(), cx) + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + let buffer_store = + cx.new_model(|cx| BufferStore::new(worktree_store.clone(), None, cx)); + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); + + let settings_observer = cx.new_model(|cx| { + SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) }); + let environment = ProjectEnvironment::new(&worktree_store, None, cx); + let lsp_store = cx.new_model(|cx| { + LspStore::new_ssh( + buffer_store.clone(), + worktree_store.clone(), + languages.clone(), + ssh.clone().into(), + 0, + cx, + ) + }); + cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); + + let this = Self { + buffer_ordered_messages_tx: tx, + collaborators: Default::default(), + worktree_store, + buffer_store, + lsp_store, + current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), + join_project_response_message_id: 0, + client_state: ProjectClientState::Local, + client_subscriptions: Vec::new(), + _subscriptions: vec![ + cx.observe_global::(Self::on_settings_changed), + cx.on_release(Self::release), + ], + active_entry: None, + snippets, + languages, + client, + user_store, + settings_observer, + fs, + ssh_session: Some(ssh.clone()), + buffers_needing_diff: Default::default(), + git_diff_debouncer: DebouncedDelay::new(), + terminals: Terminals { + local_handles: Vec::new(), + }, + node: Some(node), + default_prettier: DefaultPrettier::default(), + prettiers_per_worktree: HashMap::default(), + prettier_instances: HashMap::default(), + tasks, + hosted_project_id: None, + dev_server_project_id: None, + search_history: Self::new_search_history(), + environment, + remotely_created_buffers: Default::default(), + last_formatting_failure: None, + buffers_being_formatted: Default::default(), + search_included_history: Self::new_search_history(), + search_excluded_history: Self::new_search_history(), + }; + + let client: AnyProtoClient = ssh.clone().into(); + ssh.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.buffer_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.worktree_store); @@ -735,9 +805,8 @@ impl Project { LspStore::init(&client); SettingsObserver::init(&client); - this.ssh_session = Some(ssh); - }); - this + this + }) } pub async fn remote( @@ -820,16 +889,12 @@ impl Project { cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; let lsp_store = cx.new_model(|cx| { - let mut lsp_store = LspStore::new( + let mut lsp_store = LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), - None, languages.clone(), - Some(client.http_client()), - fs.clone(), - None, - Some(client.clone().into()), - Some(remote_id), + client.clone().into(), + remote_id, cx, ); lsp_store.set_language_server_statuses_from_proto(response.payload.language_servers); @@ -1125,8 +1190,7 @@ impl Project { if let Some(language) = buffer_language { if settings.enable_language_server { if let Some(file) = buffer_file { - language_servers_to_start - .push((file.worktree.clone(), Arc::clone(language))); + language_servers_to_start.push((file.worktree.clone(), language.name())); } } language_formatters_to_check @@ -1144,7 +1208,7 @@ impl Project { let language = languages.iter().find_map(|l| { let adapter = self .languages - .lsp_adapters(l) + .lsp_adapters(&l.name()) .iter() .find(|adapter| adapter.name == started_lsp_name)? .clone(); @@ -1165,11 +1229,11 @@ impl Project { ) { (None, None) => {} (Some(_), None) | (None, Some(_)) => { - language_servers_to_restart.push((worktree, Arc::clone(language))); + language_servers_to_restart.push((worktree, language.name())); } (Some(current_lsp_settings), Some(new_lsp_settings)) => { if current_lsp_settings != new_lsp_settings { - language_servers_to_restart.push((worktree, Arc::clone(language))); + language_servers_to_restart.push((worktree, language.name())); } } } @@ -4777,7 +4841,7 @@ impl Project { pub fn supplementary_language_servers<'a>( &'a self, cx: &'a AppContext, - ) -> impl '_ + Iterator { + ) -> impl '_ + Iterator { self.lsp_store.read(cx).supplementary_language_servers() } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index c2af1c3597..70b2eccf23 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -19,7 +19,7 @@ use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct ProjectSettings { /// Configuration for language servers. /// diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index ffa206684f..4662c75477 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -6,7 +6,7 @@ use http_client::Url; use language::{ language_settings::{AllLanguageSettings, LanguageSettingsContent}, tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter, - LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint, + LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, }; use lsp::{DiagnosticSeverity, NumberOrString}; use parking_lot::Mutex; @@ -1559,7 +1559,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { settings.languages.insert( - Arc::from("Rust"), + "Rust".into(), LanguageSettingsContent { enable_language_server: Some(false), ..Default::default() @@ -1578,14 +1578,14 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { settings.languages.insert( - Arc::from("Rust"), + LanguageName::new("Rust"), LanguageSettingsContent { enable_language_server: Some(true), ..Default::default() }, ); settings.languages.insert( - Arc::from("JavaScript"), + LanguageName::new("JavaScript"), LanguageSettingsContent { enable_language_server: Some(false), ..Default::default() @@ -2983,7 +2983,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { buffer.edit([(0..0, "abc")], None, cx); assert!(buffer.is_dirty()); assert!(!buffer.has_conflict()); - assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text"); + assert_eq!(buffer.language().unwrap().name(), "Plain Text".into()); }); project .update(cx, |project, cx| { @@ -3006,7 +3006,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { ); assert!(!buffer.is_dirty()); assert!(!buffer.has_conflict()); - assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); let opened_buffer = project @@ -5308,7 +5308,7 @@ fn json_lang() -> Arc { fn js_lang() -> Arc { Arc::new(Language::new( LanguageConfig { - name: Arc::from("JavaScript"), + name: "JavaScript".into(), matcher: LanguageMatcher { path_suffixes: vec!["js".to_string()], ..Default::default() diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 95ae6aee13..314903ec5d 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -161,7 +161,7 @@ impl Inventory { cx: &AppContext, ) -> Vec<(TaskSourceKind, TaskTemplate)> { let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { - name: language.name(), + name: language.name().0, }); let language_tasks = language .and_then(|language| language.context_provider()?.associated_tasks(file, cx)) @@ -207,7 +207,7 @@ impl Inventory { .as_ref() .and_then(|location| location.buffer.read(cx).language_at(location.range.start)); let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { - name: language.name(), + name: language.name().0, }); let file = location .as_ref() diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 3d464904b8..b24d939965 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -281,7 +281,9 @@ message Envelope { FindSearchCandidatesResponse find_search_candidates_response = 244; CloseBuffer close_buffer = 245; - UpdateUserSettings update_user_settings = 246; // current max + UpdateUserSettings update_user_settings = 246; + + CreateLanguageServer create_language_server = 247; // current max } reserved 158 to 161; @@ -2497,3 +2499,36 @@ message UpdateUserSettings { uint64 project_id = 1; string content = 2; } + +message LanguageServerCommand { + string path = 1; + repeated string arguments = 2; +} + +message AvailableLanguage { + string name = 7; + string matcher = 8; +} + +message CreateLanguageServer { + uint64 project_id = 1; + uint64 worktree_id = 2; + string name = 3; + + LanguageServerCommand binary = 4; + optional string initialization_options = 5; + optional string code_action_kinds = 6; + + AvailableLanguage language = 7; +} + +// message RestartLanguageServer { + +// } +// message DestroyLanguageServer { + +// } + +// message LspWorkspaceConfiguration { + +// } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index d8ebf66588..44cb91db10 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -366,7 +366,8 @@ messages!( (FindSearchCandidates, Background), (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), - (UpdateUserSettings, Foreground) + (UpdateUserSettings, Foreground), + (CreateLanguageServer, Foreground) ); request_messages!( @@ -490,6 +491,7 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), + (CreateLanguageServer, Ack) ); entity_messages!( @@ -562,7 +564,8 @@ entity_messages!( UpdateContext, SynchronizeContexts, LspExtSwitchSourceHeader, - UpdateUserSettings + UpdateUserSettings, + CreateLanguageServer ); entity_messages!( diff --git a/crates/quick_action_bar/src/repl_menu.rs b/crates/quick_action_bar/src/repl_menu.rs index fbf2ac17e5..f4e4cd2d1a 100644 --- a/crates/quick_action_bar/src/repl_menu.rs +++ b/crates/quick_action_bar/src/repl_menu.rs @@ -62,7 +62,7 @@ impl QuickActionBar { return self.render_repl_launch_menu(spec, cx); } SessionSupport::RequiresSetup(language) => { - return self.render_repl_setup(&language, cx); + return self.render_repl_setup(&language.0, cx); } SessionSupport::Unsupported => return None, }; diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 4bee1c5a9f..8da4284b7f 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -291,11 +291,24 @@ impl SshClientDelegate { self.update_status(Some("building remote server binary from source"), cx); log::info!("building remote server binary from source"); - run_cmd(Command::new("cargo").args(["build", "--package", "remote_server"])).await?; - run_cmd(Command::new("strip").args(["target/debug/remote_server"])).await?; - run_cmd(Command::new("gzip").args(["-9", "-f", "target/debug/remote_server"])).await?; + run_cmd(Command::new("cargo").args([ + "build", + "--package", + "remote_server", + "--target-dir", + "target/remote_server", + ])) + .await?; + // run_cmd(Command::new("strip").args(["target/remote_server/debug/remote_server"])) + // .await?; + run_cmd(Command::new("gzip").args([ + "-9", + "-f", + "target/remote_server/debug/remote_server", + ])) + .await?; - let path = std::env::current_dir()?.join("target/debug/remote_server.gz"); + let path = std::env::current_dir()?.join("target/remote_server/debug/remote_server.gz"); return Ok((path, version)); async fn run_cmd(command: &mut Command) -> Result<()> { diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 91f7b330e4..5ff11fe099 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -41,11 +41,11 @@ pub struct SshSocket { pub struct SshSession { next_message_id: AtomicU32, - response_channels: ResponseChannels, + response_channels: ResponseChannels, // Lock outgoing_tx: mpsc::UnboundedSender, spawn_process_tx: mpsc::UnboundedSender, client_socket: Option, - state: Mutex, + state: Mutex, // Lock } struct SshClientState { @@ -392,9 +392,9 @@ impl SshSession { ) -> impl 'static + Future> { envelope.id = self.next_message_id.fetch_add(1, SeqCst); let (tx, rx) = oneshot::channel(); - self.response_channels - .lock() - .insert(MessageId(envelope.id), tx); + let mut response_channels_lock = self.response_channels.lock(); + response_channels_lock.insert(MessageId(envelope.id), tx); + drop(response_channels_lock); self.outgoing_tx.unbounded_send(envelope).ok(); async move { Ok(rx.await.context("connection lost")?.0) } } diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 60f29bb573..ca5fe06e13 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -4,14 +4,13 @@ use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, Task}; use language::LanguageRegistry; use project::{ buffer_store::BufferStore, project_settings::SettingsObserver, search::SearchQuery, - worktree_store::WorktreeStore, LspStore, ProjectPath, WorktreeId, WorktreeSettings, + worktree_store::WorktreeStore, LspStore, ProjectPath, WorktreeId, }; use remote::SshSession; use rpc::{ proto::{self, AnyProtoClient, SSH_PEER_ID, SSH_PROJECT_ID}, TypedEnvelope, }; -use settings::Settings as _; use smol::stream::StreamExt; use std::{ path::{Path, PathBuf}, @@ -33,15 +32,17 @@ impl HeadlessProject { pub fn init(cx: &mut AppContext) { settings::init(cx); language::init(cx); - WorktreeSettings::register(cx); + project::Project::init_settings(cx); } pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { // TODO: we should load the env correctly (as we do in login_shell_env_loaded when stdout is not a pty). Can we re-use the ProjectEnvironment for that? - let languages = Arc::new(LanguageRegistry::new( - Task::ready(()), - cx.background_executor().clone(), - )); + let mut languages = + LanguageRegistry::new(Task::ready(()), cx.background_executor().clone()); + languages + .set_language_server_download_dir(PathBuf::from("/Users/conrad/what-could-go-wrong")); + + let languages = Arc::new(languages); let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); let buffer_store = cx.new_model(|cx| { @@ -57,18 +58,17 @@ impl HeadlessProject { }); let environment = project::ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { - LspStore::new( + let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), - Some(environment), + environment, languages, None, fs.clone(), - Some(session.clone().into()), - None, - Some(0), cx, - ) + ); + lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx); + lsp_store }); let client: AnyProtoClient = session.clone().into(); @@ -88,9 +88,12 @@ impl HeadlessProject { client.add_model_request_handler(BufferStore::handle_update_buffer); client.add_model_message_handler(BufferStore::handle_close_buffer); + client.add_model_request_handler(LspStore::handle_create_language_server); + BufferStore::init(&client); WorktreeStore::init(&client); SettingsObserver::init(&client); + LspStore::init(&client); HeadlessProject { session: client, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 0aea585538..67a2f0b57d 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -6,7 +6,7 @@ use gpui::{Context, Model, TestAppContext}; use http_client::FakeHttpClient; use language::{ language_settings::{all_language_settings, AllLanguageSettings}, - Buffer, LanguageRegistry, + Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, }; use node_runtime::FakeNodeRuntime; use project::{ @@ -202,15 +202,29 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo server_cx.read(|cx| { assert_eq!( AllLanguageSettings::get_global(cx) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .language_servers, ["custom-rust-analyzer".into()] ) }); - fs.insert_tree("/code/project1/.zed", json!({ - "settings.json": r#"{"languages":{"Rust":{"language_servers":["override-rust-analyzer"]}}}"# - })).await; + fs.insert_tree( + "/code/project1/.zed", + json!({ + "settings.json": r#" + { + "languages": {"Rust":{"language_servers":["override-rust-analyzer"]}}, + "lsp": { + "override-rust-analyzer": { + "binary": { + "path": "~/.cargo/bin/rust-analyzer" + } + } + } + }"# + }), + ) + .await; let worktree_id = project .update(cx, |project, cx| { @@ -247,7 +261,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo }), cx ) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .language_servers, ["override-rust-analyzer".into()] ) @@ -257,13 +271,107 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo let file = buffer.read(cx).file(); assert_eq!( all_language_settings(file, cx) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .language_servers, ["override-rust-analyzer".into()] ) }); } +#[gpui::test] +async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, headless, fs) = init_test(cx, server_cx).await; + + fs.insert_tree( + "/code/project1/.zed", + json!({ + "settings.json": r#" + { + "languages": {"Rust":{"language_servers":["rust-analyzer"]}}, + "lsp": { + "rust-analyzer": { + "binary": { + "path": "~/.cargo/bin/rust-analyzer" + } + } + } + }"# + }), + ) + .await; + + cx.update_model(&project, |project, _| { + project.languages().register_test_language(LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".into()], + ..Default::default() + }, + ..Default::default() + }); + project.languages().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + ..Default::default() + }, + ) + }); + cx.run_until_parked(); + + let worktree_id = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap() + .0 + .read_with(cx, |worktree, _| worktree.id()); + + // Wait for the settings to synchronize + cx.run_until_parked(); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + cx.read(|cx| { + let file = buffer.read(cx).file(); + assert_eq!( + all_language_settings(file, cx) + .language(Some(&"Rust".into())) + .language_servers, + ["rust-analyzer".into()] + ) + }); + + let buffer_id = cx.read(|cx| { + let buffer = buffer.read(cx); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); + buffer.remote_id() + }); + + server_cx.read(|cx| { + let buffer = headless + .read(cx) + .buffer_store + .read(cx) + .get(buffer_id) + .unwrap(); + + assert_eq!(buffer.read(cx).language().unwrap().name(), "Rust".into()); + }); + + server_cx.read(|cx| { + let lsp_store = headless.read(cx).lsp_store.read(cx); + assert_eq!(lsp_store.as_local().unwrap().language_servers.len(), 1); + }); +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 112cf591e9..868594aaf1 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -6,7 +6,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; use editor::Editor; use gpui::{prelude::*, AppContext, Entity, View, WeakView, WindowContext}; -use language::{BufferSnapshot, Language, Point}; +use language::{BufferSnapshot, Language, LanguageName, Point}; use crate::repl_store::ReplStore; use crate::session::SessionEvent; @@ -99,7 +99,7 @@ pub fn run(editor: WeakView, move_down: bool, cx: &mut WindowContext) -> pub enum SessionSupport { ActiveSession(View), Inactive(Box), - RequiresSetup(Arc), + RequiresSetup(LanguageName), Unsupported, } @@ -268,7 +268,7 @@ fn runnable_ranges( range: Range, ) -> (Vec>, Option) { if let Some(language) = buffer.language() { - if language.name().as_ref() == "Markdown" { + if language.name() == "Markdown".into() { return (markdown_code_blocks(buffer, range.clone()), None); } } @@ -305,7 +305,7 @@ fn markdown_code_blocks(buffer: &BufferSnapshot, range: Range) -> Vec) -> bool { - match language.name().as_ref() { + match language.name().0.as_ref() { "TypeScript" | "Python" => true, _ => false, } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 82aad401a4..c6e64deb59 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -564,6 +564,13 @@ impl Worktree { !self.is_local() } + pub fn settings_location(&self, _: &ModelContext) -> SettingsLocation<'static> { + SettingsLocation { + worktree_id: self.id(), + path: Path::new(EMPTY_PATH), + } + } + pub fn snapshot(&self) -> Snapshot { match self { Worktree::Local(worktree) => worktree.snapshot.snapshot.clone(), diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 9ec43d607a..93fee57ecd 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2251,14 +2251,8 @@ mod tests { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "the-new-name.rs"); assert_eq!( - editor - .buffer() - .read(cx) - .language_at(0, cx) - .unwrap() - .name() - .as_ref(), - "Rust" + editor.buffer().read(cx).language_at(0, cx).unwrap().name(), + "Rust".into() ); }); }) @@ -2374,14 +2368,8 @@ mod tests { editor.update(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!( - editor - .buffer() - .read(cx) - .language_at(0, cx) - .unwrap() - .name() - .as_ref(), - "Rust" + editor.buffer().read(cx).language_at(0, cx).unwrap().name(), + "Rust".into() ) }); }) From 19463b59e2fb9ea2c14bf5f75c16891ca9e204f5 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 16:12:45 -0400 Subject: [PATCH 036/270] Add docs for search settings (#17662) --- docs/src/configuring-zed.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index b935a8b824..82f5a24484 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1134,6 +1134,21 @@ These values take in the same options as the root-level settings with the same n `boolean` values +## Search + +- Description: Search options to enable by default when opening new project and buffer searches. +- Setting: `search` +- Default: + +``` +"search": { + "whole_word": false, + "case_sensitive": false, + "include_ignored": false, + "regex": false +}, +``` + ## Show Call Status Icon - Description: Whether or not to show the call status icon in the status bar. From 48a16f9e70715316c0d189d9053963742dd79140 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 10 Sep 2024 16:41:29 -0400 Subject: [PATCH 037/270] ssh: Lookup language servers in env on SSH host (#17658) Release Notes: - ssh remoting: Lookup language server binaries in environment on SSH host --------- Co-authored-by: Bennet --- crates/assistant/src/assistant_panel.rs | 2 +- crates/language/src/language.rs | 8 + crates/project/src/lsp_store.rs | 159 +++++++++++++++---- crates/proto/proto/zed.proto | 28 +++- crates/proto/src/proto.rs | 14 +- crates/remote_server/src/headless_project.rs | 2 + 6 files changed, 181 insertions(+), 32 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 7eebc97b1d..a0d4cbcf81 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -5349,7 +5349,7 @@ fn make_lsp_adapter_delegate( let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { Ok( - ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, cx) + ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, None, cx) as Arc, ) }) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 6424da8a54..cd39490d0b 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1651,6 +1651,14 @@ impl LspAdapter for FakeLspAdapter { LanguageServerName(self.name.into()) } + async fn check_if_user_installed( + &self, + _: &dyn LspAdapterDelegate, + _: &AsyncAppContext, + ) -> Option { + Some(self.language_server_binary.clone()) + } + fn get_language_server_command<'a>( self: Arc, _: Arc, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index b218ac5804..3b6b9ebb0a 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -442,6 +442,17 @@ impl LspStore { } } + fn worktree_for_id( + &self, + worktree_id: WorktreeId, + cx: &ModelContext, + ) -> Result> { + self.worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .ok_or_else(|| anyhow!("worktree not found")) + } + fn on_buffer_store_event( &mut self, _: Model, @@ -4287,6 +4298,7 @@ impl LspStore { .ok_or_else(|| anyhow!("missing language"))?; let language_name = LanguageName::from_proto(language.name); let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; + this.update(&mut cx, |this, cx| { this.languages .register_language(language_name.clone(), None, matcher.clone(), { @@ -4334,6 +4346,47 @@ impl LspStore { Ok(proto::Ack {}) } + pub async fn handle_which_command( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let command = PathBuf::from(envelope.payload.command); + let response = this + .update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok( + cx.spawn(|_, _| async move { delegate.which(command.as_os_str()).await }), + ) + })?? + .await; + + Ok(proto::WhichCommandResponse { + path: response.map(|path| path.to_string_lossy().to_string()), + }) + } + + pub async fn handle_shell_env( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let response = this + .update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok(cx.spawn(|_, _| async move { delegate.shell_env().await })) + })?? + .await; + + Ok(proto::ShellEnvResponse { + env: response.into_iter().collect(), + }) + } + async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -4478,39 +4531,34 @@ impl LspStore { ) { let ssh = self.as_ssh().unwrap(); - let configured_binary = ProjectSettings::get( - Some(worktree.update(cx, |worktree, cx| worktree.settings_location(cx))), - cx, - ) - .lsp - .get(&adapter.name()) - .and_then(|c| c.binary.as_ref()) - .and_then(|config| { - if let Some(path) = &config.path { - Some((path.clone(), config.arguments.clone().unwrap_or_default())) - } else { - None - } - }); let delegate = - ProjectLspAdapterDelegate::for_ssh(self, worktree, cx) as Arc; + ProjectLspAdapterDelegate::for_ssh(self, worktree, ssh.upstream_client.clone(), cx) + as Arc; + + // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. + let lsp_adapter = adapter.adapter.clone(); + let project_id = self.project_id; let worktree_id = worktree.read(cx).id().to_proto(); let upstream_client = ssh.upstream_client.clone(); let name = adapter.name().to_string(); - let Some((path, arguments)) = configured_binary else { - cx.emit(LspStoreEvent::Notification(format!( - "ssh-remoting currently requires manually configuring {} in your settings", - adapter.name() - ))); - return; - }; + let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); return; }; - let task = cx.spawn(|_, _| async move { - let delegate = delegate; + + let task = cx.spawn(|_, cx| async move { + let user_binary_task = lsp_adapter.check_if_user_installed(delegate.as_ref(), &cx); + let binary = match user_binary_task.await { + Some(binary) => binary, + None => { + return Err(anyhow!( + "Downloading language server for ssh host is not supported yet" + )) + } + }; + let name = adapter.name().to_string(); let code_action_kinds = adapter .adapter @@ -4523,12 +4571,22 @@ impl LspStore { .map(|options| serde_json::to_string(&options)) .transpose()?; + let language_server_command = proto::LanguageServerCommand { + path: binary.path.to_string_lossy().to_string(), + arguments: binary + .arguments + .iter() + .map(|args| args.to_string_lossy().to_string()) + .collect(), + env: binary.env.unwrap_or_default().into_iter().collect(), + }; + upstream_client .request(proto::CreateLanguageServer { project_id, worktree_id, name, - binary: Some(proto::LanguageServerCommand { path, arguments }), + binary: Some(language_server_command), initialization_options, code_action_kinds, language: Some(proto::AvailableLanguage { @@ -6890,6 +6948,7 @@ pub struct ProjectLspAdapterDelegate { http_client: Arc, language_registry: Arc, load_shell_env_task: Shared>>>, + upstream_client: Option, } impl ProjectLspAdapterDelegate { @@ -6907,15 +6966,30 @@ impl ProjectLspAdapterDelegate { .clone() .unwrap_or_else(|| Arc::new(BlockedHttpClient)); - Self::new(lsp_store, worktree, http_client, Some(local.fs.clone()), cx) + Self::new( + lsp_store, + worktree, + http_client, + Some(local.fs.clone()), + None, + cx, + ) } fn for_ssh( lsp_store: &LspStore, worktree: &Model, + upstream_client: AnyProtoClient, cx: &mut ModelContext, ) -> Arc { - Self::new(lsp_store, worktree, Arc::new(BlockedHttpClient), None, cx) + Self::new( + lsp_store, + worktree, + Arc::new(BlockedHttpClient), + None, + Some(upstream_client), + cx, + ) } pub fn new( @@ -6923,6 +6997,7 @@ impl ProjectLspAdapterDelegate { worktree: &Model, http_client: Arc, fs: Option>, + upstream_client: Option, cx: &mut ModelContext, ) -> Arc { let worktree_id = worktree.read(cx).id(); @@ -6942,6 +7017,7 @@ impl ProjectLspAdapterDelegate { worktree: worktree.read(cx).snapshot(), fs, http_client, + upstream_client, language_registry: lsp_store.languages.clone(), load_shell_env_task, }) @@ -6991,13 +7067,42 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { } async fn shell_env(&self) -> HashMap { + if let Some(upstream_client) = &self.upstream_client { + use rpc::proto::SSH_PROJECT_ID; + + return upstream_client + .request(proto::ShellEnv { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + }) + .await + .map(|response| response.env.into_iter().collect()) + .unwrap_or_default(); + } + let task = self.load_shell_env_task.clone(); task.await.unwrap_or_default() } #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { + if let Some(upstream_client) = &self.upstream_client { + use rpc::proto::SSH_PROJECT_ID; + + return upstream_client + .request(proto::WhichCommand { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + command: command.to_string_lossy().to_string(), + }) + .await + .log_err() + .and_then(|response| response.path) + .map(PathBuf::from); + } + self.fs.as_ref()?; + let worktree_abs_path = self.worktree.abs_path(); let shell_path = self.shell_env().await.get("PATH").cloned(); which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index b24d939965..e5d767fffb 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -283,7 +283,13 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CreateLanguageServer create_language_server = 247; // current max + CreateLanguageServer create_language_server = 247; + + WhichCommand which_command = 248; + WhichCommandResponse which_command_response = 249; + + ShellEnv shell_env = 250; + ShellEnvResponse shell_env_response = 251; // current max } reserved 158 to 161; @@ -2503,6 +2509,7 @@ message UpdateUserSettings { message LanguageServerCommand { string path = 1; repeated string arguments = 2; + map env = 3; } message AvailableLanguage { @@ -2522,6 +2529,25 @@ message CreateLanguageServer { AvailableLanguage language = 7; } +message WhichCommand { + uint64 project_id = 1; + uint64 worktree_id = 2; + string command = 3; +} + +message WhichCommandResponse { + optional string path = 1; +} + +message ShellEnv { + uint64 project_id = 1; + uint64 worktree_id = 2; +} + +message ShellEnvResponse { + map env = 1; +} + // message RestartLanguageServer { // } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 44cb91db10..7af66a6a6b 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -367,7 +367,11 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CreateLanguageServer, Foreground) + (CreateLanguageServer, Foreground), + (WhichCommand, Foreground), + (WhichCommandResponse, Foreground), + (ShellEnv, Foreground), + (ShellEnvResponse, Foreground), ); request_messages!( @@ -491,7 +495,9 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CreateLanguageServer, Ack) + (CreateLanguageServer, Ack), + (WhichCommand, WhichCommandResponse), + (ShellEnv, ShellEnvResponse) ); entity_messages!( @@ -565,7 +571,9 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CreateLanguageServer + CreateLanguageServer, + WhichCommand, + ShellEnv ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ca5fe06e13..e654e2a190 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -89,6 +89,8 @@ impl HeadlessProject { client.add_model_message_handler(BufferStore::handle_close_buffer); client.add_model_request_handler(LspStore::handle_create_language_server); + client.add_model_request_handler(LspStore::handle_which_command); + client.add_model_request_handler(LspStore::handle_shell_env); BufferStore::init(&client); WorktreeStore::init(&client); From 3cea7ccbff438c331ce6e11cd9de78a7c3b1e1b0 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 17:26:06 -0400 Subject: [PATCH 038/270] pane: Fix pinned tabs being persisted after closing (#17666) Release Notes: - Fixed tabs staying pinned after closing unrelated tabs --- crates/workspace/src/pane.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index e2a77402de..23148d6376 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1364,6 +1364,9 @@ impl Pane { self.activation_history .retain(|entry| entry.entity_id != self.items[item_index].item_id()); + if self.is_tab_pinned(item_index) { + self.pinned_tab_count -= 1; + } if item_index == self.active_item_index { let index_to_activate = self .activation_history From 3ff81c2e864d3bdbc576a099c055478b5ad7f0e6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 17:37:26 -0400 Subject: [PATCH 039/270] assistant: Simplify image insertion (#17668) This PR simplifies how images are inserted into the context editor. We don't need to hold the `images` in a `HashMap` on the `Context`, as we were only inserting them to pull them out again. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 19 ++++++++++-- crates/assistant/src/context.rs | 39 ++----------------------- 2 files changed, 19 insertions(+), 39 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index a0d4cbcf81..634f2231cd 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -34,6 +34,7 @@ use editor::{ }; use editor::{display_map::CreaseId, FoldPlaceholder}; use fs::Fs; +use futures::FutureExt; use gpui::{ canvas, div, img, percentage, point, pulsating_between, size, Action, Animation, AnimationExt, AnyElement, AnyView, AppContext, AsyncWindowContext, ClipboardEntry, ClipboardItem, @@ -46,11 +47,11 @@ use indexed_docs::IndexedDocsStore; use language::{ language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, }; -use language_model::LanguageModelToolUse; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, Role, }; +use language_model::{LanguageModelImage, LanguageModelToolUse}; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::lsp_store::ProjectLspAdapterDelegate; @@ -3551,10 +3552,22 @@ impl ContextEditor { self.context.update(cx, |context, cx| { for image in images { + let Some(render_image) = image.to_image_data(cx).log_err() else { + continue; + }; let image_id = image.id(); - context.insert_image(image, cx); + let image_task = LanguageModelImage::from_image(image, cx).shared(); + for image_position in image_positions.iter() { - context.insert_image_content(image_id, image_position.text_anchor, cx); + context.insert_content( + Content::Image { + anchor: image_position.text_anchor, + image_id, + image: image_task.clone(), + render_image: render_image.clone(), + }, + cx, + ); } } }); diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index e43ec203e9..7702207d8d 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -20,8 +20,8 @@ use futures::{ FutureExt, StreamExt, }; use gpui::{ - AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext, - RenderImage, SharedString, Subscription, Task, + AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage, + SharedString, Subscription, Task, }; use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset}; @@ -38,7 +38,6 @@ use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::{ cmp::{self, max, Ordering}, - collections::hash_map, fmt::Debug, iter, mem, ops::Range, @@ -49,7 +48,7 @@ use std::{ }; use telemetry_events::AssistantKind; use text::BufferSnapshot; -use util::{post_inc, ResultExt, TryFutureExt}; +use util::{post_inc, TryFutureExt}; use uuid::Uuid; #[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)] @@ -468,7 +467,6 @@ pub struct Context { slash_command_output_sections: Vec>, pending_tool_uses_by_id: HashMap, PendingToolUse>, message_anchors: Vec, - images: HashMap, Shared>>)>, contents: Vec, messages_metadata: HashMap, summary: Option, @@ -564,7 +562,6 @@ impl Context { operations: Vec::new(), message_anchors: Default::default(), contents: Default::default(), - images: Default::default(), messages_metadata: Default::default(), pending_slash_commands: Vec::new(), finished_slash_commands: HashSet::default(), @@ -2374,36 +2371,6 @@ impl Context { } } - pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext) -> Option<()> { - if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) { - entry.insert(( - image.to_image_data(cx).log_err()?, - LanguageModelImage::from_image(image, cx).shared(), - )); - } - - Some(()) - } - - pub fn insert_image_content( - &mut self, - image_id: u64, - anchor: language::Anchor, - cx: &mut ModelContext, - ) { - if let Some((render_image, image)) = self.images.get(&image_id) { - self.insert_content( - Content::Image { - anchor, - image_id, - image: image.clone(), - render_image: render_image.clone(), - }, - cx, - ); - } - } - pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext) { let buffer = self.buffer.read(cx); let insertion_ix = match self From b16af138e20d054d7bc4a17be51420dac2bb26f7 Mon Sep 17 00:00:00 2001 From: Fernando Tagawa Date: Tue, 10 Sep 2024 18:41:37 -0300 Subject: [PATCH 040/270] php: Add auto-indent (#17545) Release Notes: - N/A --- extensions/php/languages/php/indents.scm | 1 + 1 file changed, 1 insertion(+) create mode 100644 extensions/php/languages/php/indents.scm diff --git a/extensions/php/languages/php/indents.scm b/extensions/php/languages/php/indents.scm new file mode 100644 index 0000000000..e975469092 --- /dev/null +++ b/extensions/php/languages/php/indents.scm @@ -0,0 +1 @@ +(_ "{" "}" @end) @indent From d6663fcb29fcc477e4e0a05115561690ad1b7854 Mon Sep 17 00:00:00 2001 From: maan2003 <49202620+maan2003@users.noreply.github.com> Date: Wed, 11 Sep 2024 03:39:00 +0530 Subject: [PATCH 041/270] Pass temperature to Anthropic (#17509) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/assistant/src/inline_assistant.rs | 15 +-------------- crates/language_model/src/request.rs | 2 +- 2 files changed, 2 insertions(+), 15 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 051db0f247..246a408477 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2373,19 +2373,6 @@ impl Codegen { None }; - // Higher Temperature increases the randomness of model outputs. - // If Markdown or No Language is Known, increase the randomness for more creative output - // If Code, decrease temperature to get more deterministic outputs - let temperature = if let Some(language) = language_name.clone() { - if language == "Markdown".into() { - 1.0 - } else { - 0.5 - } - } else { - 1.0 - }; - let language_name = language_name.as_ref(); let start = buffer.point_to_buffer_offset(edit_range.start); let end = buffer.point_to_buffer_offset(edit_range.end); @@ -2421,7 +2408,7 @@ impl Codegen { messages, tools: Vec::new(), stop: vec!["|END|>".to_string()], - temperature, + temperature: 1., }) } diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 7549785b3b..4162e9df87 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -399,7 +399,7 @@ impl LanguageModelRequest { tool_choice: None, metadata: None, stop_sequences: Vec::new(), - temperature: None, + temperature: Some(self.temperature), top_k: None, top_p: None, } From d1a47faeb7295a4ec450ad26191e8a65047805c9 Mon Sep 17 00:00:00 2001 From: Bedis Nbiba Date: Tue, 10 Sep 2024 23:26:11 +0100 Subject: [PATCH 042/270] docs: Update Deno docs (#17579) add the minimal configuration needed to make deno lsp work Release Notes: - N/A --- docs/src/languages/deno.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/src/languages/deno.md b/docs/src/languages/deno.md index 3b5e4a1ff9..5b92acfeeb 100644 --- a/docs/src/languages/deno.md +++ b/docs/src/languages/deno.md @@ -10,6 +10,15 @@ To use the Deno Language Server with TypeScript and TSX files, you will likely w ```json { + "lsp": { + "deno": { + "settings": { + "deno": { + "enable": true + } + } + } + }, "languages": { "TypeScript": { "language_servers": [ From f374038da0e9dbe81e29b42df29f6c42ab39c378 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 19:19:21 -0400 Subject: [PATCH 043/270] pane: Serialize pinned tab state (#17670) Release Notes: - Tab pin state is now persisted across Zed runs. --- crates/workspace/src/pane.rs | 8 ++++++ crates/workspace/src/persistence.rs | 34 ++++++++++++++++++----- crates/workspace/src/persistence/model.rs | 13 +++++++-- crates/workspace/src/workspace.rs | 5 ++-- 4 files changed, 49 insertions(+), 11 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 23148d6376..cc752f7aec 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -715,6 +715,14 @@ impl Pane { } } + pub(crate) fn set_pinned_count(&mut self, count: usize) { + self.pinned_tab_count = count; + } + + pub(crate) fn pinned_count(&self) -> usize { + self.pinned_tab_count + } + pub fn handle_item_edit(&mut self, item_id: EntityId, cx: &AppContext) { if let Some(preview_item) = self.preview_item() { if preview_item.item_id() == item_id && !preview_item.preserve_preview(cx) { diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d035b35c10..88ede4228d 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -13,7 +13,7 @@ use sqlez::{ }; use ui::px; -use util::ResultExt; +use util::{maybe, ResultExt}; use uuid::Uuid; use crate::WorkspaceId; @@ -352,6 +352,9 @@ define_connection! { sql!( ALTER TABLE workspaces ADD COLUMN window_id INTEGER DEFAULT NULL; ), + sql!( + ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; + ) ]; } @@ -846,6 +849,7 @@ impl WorkspaceDb { SerializedPaneGroup::Pane(SerializedPane { active: true, children: vec![], + pinned_count: 0, }) })) } @@ -861,15 +865,17 @@ impl WorkspaceDb { Option, Option, Option, + Option, Option, ); self.select_bound::(sql!( - SELECT group_id, axis, pane_id, active, flexes + SELECT group_id, axis, pane_id, active, pinned_count, flexes FROM (SELECT group_id, axis, NULL as pane_id, NULL as active, + NULL as pinned_count, position, parent_group_id, workspace_id, @@ -881,6 +887,7 @@ impl WorkspaceDb { NULL, center_panes.pane_id, panes.active as active, + pinned_count, position, parent_group_id, panes.workspace_id as workspace_id, @@ -891,7 +898,8 @@ impl WorkspaceDb { ORDER BY position ))?((group_id, workspace_id))? .into_iter() - .map(|(group_id, axis, pane_id, active, flexes)| { + .map(|(group_id, axis, pane_id, active, pinned_count, flexes)| { + let maybe_pane = maybe!({ Some((pane_id?, active?, pinned_count?)) }); if let Some((group_id, axis)) = group_id.zip(axis) { let flexes = flexes .map(|flexes: String| serde_json::from_str::>(&flexes)) @@ -902,10 +910,11 @@ impl WorkspaceDb { children: self.get_pane_group(workspace_id, Some(group_id))?, flexes, }) - } else if let Some((pane_id, active)) = pane_id.zip(active) { + } else if let Some((pane_id, active, pinned_count)) = maybe_pane { Ok(SerializedPaneGroup::Pane(SerializedPane::new( self.get_items(pane_id)?, active, + pinned_count, ))) } else { bail!("Pane Group Child was neither a pane group or a pane"); @@ -977,10 +986,10 @@ impl WorkspaceDb { parent: Option<(GroupId, usize)>, ) -> Result { let pane_id = conn.select_row_bound::<_, i64>(sql!( - INSERT INTO panes(workspace_id, active) - VALUES (?, ?) + INSERT INTO panes(workspace_id, active, pinned_count) + VALUES (?, ?, ?) RETURNING pane_id - ))?((workspace_id, pane.active))? + ))?((workspace_id, pane.active, pane.pinned_count))? .ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?; let (parent_id, order) = parent.unzip(); @@ -1219,6 +1228,7 @@ mod tests { SerializedItem::new("Terminal", 6, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1226,6 +1236,7 @@ mod tests { SerializedItem::new("Terminal", 8, false, false), ], false, + 0, )), ], ), @@ -1235,6 +1246,7 @@ mod tests { SerializedItem::new("Terminal", 10, true, false), ], false, + 0, )), ], ); @@ -1523,6 +1535,7 @@ mod tests { SerializedItem::new("Terminal", 2, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1530,6 +1543,7 @@ mod tests { SerializedItem::new("Terminal", 3, true, false), ], true, + 0, )), ], ), @@ -1539,6 +1553,7 @@ mod tests { SerializedItem::new("Terminal", 6, false, false), ], false, + 0, )), ], ); @@ -1570,6 +1585,7 @@ mod tests { SerializedItem::new("Terminal", 2, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1577,6 +1593,7 @@ mod tests { SerializedItem::new("Terminal", 3, true, false), ], true, + 0, )), ], ), @@ -1586,6 +1603,7 @@ mod tests { SerializedItem::new("Terminal", 6, true, false), ], false, + 0, )), ], ); @@ -1605,6 +1623,7 @@ mod tests { SerializedItem::new("Terminal", 2, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1612,6 +1631,7 @@ mod tests { SerializedItem::new("Terminal", 3, false, false), ], true, + 0, )), ], ); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 8b6d66f3cb..d6f8001f25 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -297,6 +297,7 @@ impl Default for SerializedPaneGroup { Self::Pane(SerializedPane { children: vec![SerializedItem::default()], active: false, + pinned_count: 0, }) } } @@ -379,11 +380,16 @@ impl SerializedPaneGroup { pub struct SerializedPane { pub(crate) active: bool, pub(crate) children: Vec, + pub(crate) pinned_count: usize, } impl SerializedPane { - pub fn new(children: Vec, active: bool) -> Self { - SerializedPane { children, active } + pub fn new(children: Vec, active: bool, pinned_count: usize) -> Self { + SerializedPane { + children, + active, + pinned_count, + } } pub async fn deserialize_to( @@ -442,6 +448,9 @@ impl SerializedPane { } })?; } + pane.update(cx, |pane, _| { + pane.set_pinned_count(self.pinned_count); + })?; anyhow::Ok(items) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 219f75624c..7371e56cee 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4025,7 +4025,7 @@ impl Workspace { }; fn serialize_pane_handle(pane_handle: &View, cx: &WindowContext) -> SerializedPane { - let (items, active) = { + let (items, active, pinned_count) = { let pane = pane_handle.read(cx); let active_item_id = pane.active_item().map(|item| item.item_id()); ( @@ -4042,10 +4042,11 @@ impl Workspace { }) .collect::>(), pane.has_focus(cx), + pane.pinned_count(), ) }; - SerializedPane::new(items, active) + SerializedPane::new(items, active, pinned_count) } fn build_serialized_pane_group( From 8f284456126d53b69d639b159a33effa57bfa8cd Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 22:57:44 -0400 Subject: [PATCH 044/270] php: Bump to v0.2.0 (#17674) This PR bumps the PHP extension to v0.2.0. Changes: - https://github.com/zed-industries/zed/pull/16720 - https://github.com/zed-industries/zed/pull/16955 - https://github.com/zed-industries/zed/pull/17243 - https://github.com/zed-industries/zed/pull/17545 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/php/Cargo.toml | 2 +- extensions/php/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f9d3240d68..07b97e030d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14441,7 +14441,7 @@ dependencies = [ [[package]] name = "zed_php" -version = "0.1.3" +version = "0.2.0" dependencies = [ "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/extensions/php/Cargo.toml b/extensions/php/Cargo.toml index 91f309d9de..d4a8f36cbd 100644 --- a/extensions/php/Cargo.toml +++ b/extensions/php/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_php" -version = "0.1.3" +version = "0.2.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/php/extension.toml b/extensions/php/extension.toml index cb30eb5f0c..92bd7ed85b 100644 --- a/extensions/php/extension.toml +++ b/extensions/php/extension.toml @@ -1,7 +1,7 @@ id = "php" name = "PHP" description = "PHP support." -version = "0.1.3" +version = "0.2.0" schema_version = 1 authors = ["Piotr Osiewicz "] repository = "https://github.com/zed-industries/zed" From ccf6f27b8f1bdfb803b9cc0da0b0cf5c9e136dd9 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 22:59:10 -0400 Subject: [PATCH 045/270] settings: Remove auxiliary Content types where possible (#16744) Release Notes: - N/A --- crates/auto_update/src/auto_update.rs | 23 +- crates/call/src/call_settings.rs | 22 +- crates/client/src/client.rs | 50 ++- crates/collab/src/tests/editor_tests.rs | 8 +- crates/collab/src/tests/following_tests.rs | 2 +- crates/collab_ui/src/chat_panel.rs | 2 +- .../src/chat_panel/message_editor.rs | 8 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/collab_ui/src/notification_panel.rs | 2 +- crates/collab_ui/src/panel_settings.rs | 102 +++--- .../src/project_diagnostics_settings.rs | 20 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/editor_settings.rs | 318 +++++++----------- crates/editor/src/editor_settings_controls.rs | 22 +- crates/editor/src/editor_tests.rs | 18 +- crates/editor/src/element.rs | 17 +- crates/extension/src/extension_settings.rs | 13 +- crates/extensions_ui/src/extensions_ui.rs | 2 +- crates/go_to_line/src/cursor_position.rs | 16 +- crates/gpui/src/geometry.rs | 2 + crates/language/src/language_settings.rs | 8 +- crates/languages/src/json.rs | 29 +- crates/outline_panel/src/outline_panel.rs | 16 +- .../src/outline_panel_settings.rs | 72 ++-- crates/performance/src/performance.rs | 184 ++++++++++ crates/project/src/project_settings.rs | 41 +-- crates/project_panel/src/project_panel.rs | 28 +- .../src/project_panel_settings.rs | 92 ++--- crates/recent_projects/src/dev_servers.rs | 3 +- crates/recent_projects/src/ssh_connections.rs | 25 +- crates/repl/src/jupyter_settings.rs | 28 +- crates/tasks_ui/src/settings.rs | 18 +- crates/vim/src/digraph.rs | 2 +- crates/vim/src/normal.rs | 6 +- crates/vim/src/normal/paste.rs | 12 +- crates/vim/src/normal/scroll.rs | 2 +- crates/vim/src/normal/search.rs | 4 +- crates/vim/src/test.rs | 2 +- crates/vim/src/test/vim_test_context.rs | 6 +- crates/vim/src/vim.rs | 32 +- crates/welcome/src/base_keymap_picker.rs | 2 +- crates/welcome/src/base_keymap_setting.rs | 6 +- crates/welcome/src/welcome.rs | 2 +- crates/workspace/src/item.rs | 70 ++-- crates/workspace/src/workspace.rs | 8 +- crates/workspace/src/workspace_settings.rs | 132 ++++---- crates/worktree/src/worktree_settings.rs | 43 ++- crates/worktree/src/worktree_tests.rs | 11 +- crates/zed/src/zed.rs | 2 +- 49 files changed, 843 insertions(+), 696 deletions(-) create mode 100644 crates/performance/src/performance.rs diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 8063ff4c40..499df7fc29 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -116,27 +116,30 @@ impl Drop for MacOsUnmounter { } } +/// Whether or not to automatically check for updates. +#[derive(Clone, Copy, JsonSchema, Deserialize, Serialize)] +#[serde(default)] +#[serde(transparent)] struct AutoUpdateSetting(bool); -/// Whether or not to automatically check for updates. -/// -/// Default: true -#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] -#[serde(transparent)] -struct AutoUpdateSettingContent(bool); +impl Default for AutoUpdateSetting { + fn default() -> Self { + Self(true) + } +} impl Settings for AutoUpdateSetting { const KEY: Option<&'static str> = Some("auto_update"); - type FileContent = Option; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let auto_update = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied().flatten()) - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); + .find_map(|value| value.copied()) + .unwrap_or(*sources.default); - Ok(Self(auto_update.0)) + Ok(auto_update) } } diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs index 446178ffb9..e10b711734 100644 --- a/crates/call/src/call_settings.rs +++ b/crates/call/src/call_settings.rs @@ -4,30 +4,20 @@ use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize, Debug)] -pub struct CallSettings { - pub mute_on_join: bool, - pub share_on_join: bool, -} - /// Configuration of voice calls in Zed. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct CallSettingsContent { +#[derive(Clone, Debug, Default, Deserialize, Serialize, JsonSchema)] +#[serde(default)] +pub struct CallSettings { /// Whether the microphone should be muted when joining a channel or a call. - /// - /// Default: false - pub mute_on_join: Option, - + pub mute_on_join: bool, /// Whether your current project should be shared when joining an empty channel. - /// - /// Default: true - pub share_on_join: Option, + pub share_on_join: bool, } impl Settings for CallSettings { const KEY: Option<&'static str> = Some("calls"); - type FileContent = CallSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 8787e2ed96..83eef45be8 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -99,20 +99,26 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20); actions!(client, [SignIn, SignOut, Reconnect]); -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct ClientSettingsContent { - server_url: Option, +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] +pub struct ClientSettings { + /// The server to connect to. If the environment variable + /// ZED_SERVER_URL is set, it will override this setting. + pub server_url: String, } -#[derive(Deserialize)] -pub struct ClientSettings { - pub server_url: String, +impl Default for ClientSettings { + fn default() -> Self { + Self { + server_url: "https://zed.dev".to_owned(), + } + } } impl Settings for ClientSettings { const KEY: Option<&'static str> = None; - type FileContent = ClientSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let mut result = sources.json_merge::()?; @@ -124,19 +130,37 @@ impl Settings for ClientSettings { } #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] -pub struct ProxySettingsContent { - proxy: Option, +#[serde(default)] +pub struct ProxySettings { + /// Set a proxy to use. The proxy protocol is specified by the URI scheme. + /// + /// Supported URI scheme: `http`, `https`, `socks4`, `socks4a`, `socks5`, + /// `socks5h`. `http` will be used when no scheme is specified. + /// + /// By default no proxy will be used, or Zed will try get proxy settings from + /// environment variables. + /// + /// Examples: + /// - "proxy": "socks5://localhost:10808" + /// - "proxy": "http://127.0.0.1:10809" + #[schemars(example = "Self::example_1")] + #[schemars(example = "Self::example_2")] + pub proxy: Option, } -#[derive(Deserialize, Default)] -pub struct ProxySettings { - pub proxy: Option, +impl ProxySettings { + fn example_1() -> String { + "http://127.0.0.1:10809".to_owned() + } + fn example_2() -> String { + "socks5://localhost:10808".to_owned() + } } impl Settings for ProxySettings { const KEY: Option<&'static str> = None; - type FileContent = ProxySettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { Ok(Self { diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 3f205b7f93..a214291752 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -2261,11 +2261,11 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA cx_a.update(editor::init); cx_b.update(editor::init); // Turn inline-blame-off by default so no state is transferred without us explicitly doing so - let inline_blame_off_settings = Some(InlineBlameSettings { + let inline_blame_off_settings = InlineBlameSettings { enabled: false, - delay_ms: None, - min_column: None, - }); + delay_ms: 0, + min_column: 0, + }; cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings::(cx, |settings| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e66b66a1b4..1bc3cd6917 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1649,7 +1649,7 @@ async fn test_following_into_excluded_file( cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |settings| { - settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]); + settings.file_scan_exclusions = vec!["**/.git".to_string()]; }); }); }); diff --git a/crates/collab_ui/src/chat_panel.rs b/crates/collab_ui/src/chat_panel.rs index 5a79f364ff..f6e6c7321f 100644 --- a/crates/collab_ui/src/chat_panel.rs +++ b/crates/collab_ui/src/chat_panel.rs @@ -1108,7 +1108,7 @@ impl Panel for ChatPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = Some(position), + move |settings, _| settings.dock = position, ); } diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 028e148cba..0b1a2dbe69 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -113,9 +113,7 @@ impl MessageEditor { editor.set_show_indent_guides(false, cx); editor.set_completion_provider(Box::new(MessageEditorCompletionProvider(this))); editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx) - .auto_replace_emoji_shortcode - .unwrap_or_default(), + MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, ); }); @@ -130,9 +128,7 @@ impl MessageEditor { cx.observe_global::(|view, cx| { view.editor.update(cx, |editor, cx| { editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx) - .auto_replace_emoji_shortcode - .unwrap_or_default(), + MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, ) }) }) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 7270110181..3e6483c42d 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2813,7 +2813,7 @@ impl Panel for CollabPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = Some(position), + move |settings, _| settings.dock = position, ); } diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs index 33ca5a2952..326e1f0f5b 100644 --- a/crates/collab_ui/src/notification_panel.rs +++ b/crates/collab_ui/src/notification_panel.rs @@ -672,7 +672,7 @@ impl Panel for NotificationPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = Some(position), + move |settings, _| settings.dock = position, ); } diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index f9851d5797..a594f023bb 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -2,58 +2,84 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; +use ui::px; use workspace::dock::DockPosition; -#[derive(Deserialize, Debug)] +#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] +#[serde(default)] pub struct CollaborationPanelSettings { - pub button: bool, - pub dock: DockPosition, - pub default_width: Pixels, -} - -#[derive(Deserialize, Debug)] -pub struct ChatPanelSettings { - pub button: bool, - pub dock: DockPosition, - pub default_width: Pixels, -} - -#[derive(Deserialize, Debug)] -pub struct NotificationPanelSettings { - pub button: bool, - pub dock: DockPosition, - pub default_width: Pixels, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct PanelSettingsContent { /// Whether to show the panel button in the status bar. - /// - /// Default: true - pub button: Option, + pub button: bool, /// Where to dock the panel. - /// - /// Default: left - pub dock: Option, + pub dock: DockPosition, /// Default width of the panel in pixels. - /// - /// Default: 240 - pub default_width: Option, + pub default_width: Pixels, +} + +impl Default for CollaborationPanelSettings { + fn default() -> Self { + Self { + button: true, + dock: DockPosition::Left, + default_width: px(240.), + } + } +} + +#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] +#[serde(default)] +pub struct ChatPanelSettings { + /// Whether to show the panel button in the status bar. + pub button: bool, + /// Where to dock the panel. + pub dock: DockPosition, + /// Default width of the panel in pixels. + pub default_width: Pixels, +} + +impl Default for ChatPanelSettings { + fn default() -> Self { + Self { + button: true, + dock: DockPosition::Right, + default_width: px(240.), + } + } +} + +#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] +#[serde(default)] +pub struct NotificationPanelSettings { + /// Whether to show the panel button in the status bar. + pub button: bool, + /// Where to dock the panel. + pub dock: DockPosition, + /// Default width of the panel in pixels. + pub default_width: Pixels, +} + +impl Default for NotificationPanelSettings { + fn default() -> Self { + Self { + button: true, + dock: DockPosition::Right, + default_width: px(380.), + } + } } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] pub struct MessageEditorSettings { /// Whether to automatically replace emoji shortcodes with emoji characters. /// For example: typing `:wave:` gets replaced with `👋`. - /// - /// Default: false - pub auto_replace_emoji_shortcode: Option, + pub auto_replace_emoji_shortcode: bool, } impl Settings for CollaborationPanelSettings { const KEY: Option<&'static str> = Some("collaboration_panel"); - type FileContent = PanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -66,7 +92,7 @@ impl Settings for CollaborationPanelSettings { impl Settings for ChatPanelSettings { const KEY: Option<&'static str> = Some("chat_panel"); - type FileContent = PanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -79,7 +105,7 @@ impl Settings for ChatPanelSettings { impl Settings for NotificationPanelSettings { const KEY: Option<&'static str> = Some("notification_panel"); - type FileContent = PanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -92,7 +118,7 @@ impl Settings for NotificationPanelSettings { impl Settings for MessageEditorSettings { const KEY: Option<&'static str> = Some("message_editor"); - type FileContent = MessageEditorSettings; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/diagnostics/src/project_diagnostics_settings.rs b/crates/diagnostics/src/project_diagnostics_settings.rs index 55879d0c42..34739bcd17 100644 --- a/crates/diagnostics/src/project_diagnostics_settings.rs +++ b/crates/diagnostics/src/project_diagnostics_settings.rs @@ -4,23 +4,25 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] +/// Diagnostics configuration. pub struct ProjectDiagnosticsSettings { + /// Whether to show warnings or not by default. pub include_warnings: bool, } -/// Diagnostics configuration. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct ProjectDiagnosticsSettingsContent { - /// Whether to show warnings or not by default. - /// - /// Default: true - include_warnings: Option, +impl Default for ProjectDiagnosticsSettings { + fn default() -> Self { + Self { + include_warnings: true, + } + } } impl Settings for ProjectDiagnosticsSettings { const KEY: Option<&'static str> = Some("diagnostics"); - type FileContent = ProjectDiagnosticsSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3466888c94..f750abd95c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10639,7 +10639,7 @@ impl Editor { let fs = workspace.read(cx).app_state().fs.clone(); let current_show = TabBarSettings::get_global(cx).show; update_settings_file::(fs, cx, move |setting, _| { - setting.show = Some(!current_show); + setting.show = !current_show; }); } @@ -12562,7 +12562,7 @@ impl EditorSnapshot { let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - Some(GitGutterSetting::TrackedFiles) + GitGutterSetting::TrackedFiles ) }); let gutter_settings = EditorSettings::get_global(cx).gutter; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 2614e4ea30..0532fd7bdf 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -3,38 +3,105 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize, Clone)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct EditorSettings { + /// Whether the cursor blinks in the editor. pub cursor_blink: bool, + /// How to highlight the current line in the editor. pub current_line_highlight: CurrentLineHighlight, + /// Whether to show the informational hover box when moving the mouse + /// over symbols in the editor. pub hover_popover_enabled: bool, + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. pub show_completions_on_input: bool, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. pub show_completion_documentation: bool, + /// The debounce delay before re-querying the language server for completion + /// documentation when not included in original completion list. pub completion_documentation_secondary_query_debounce: u64, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, + /// Toolbar related settings pub toolbar: Toolbar, + /// Scrollbar related settings pub scrollbar: Scrollbar, + /// Gutter related settings pub gutter: Gutter, + /// Whether the editor will scroll beyond the last line. pub scroll_beyond_last_line: ScrollBeyondLastLine, + /// The number of lines to keep above/below the cursor when auto-scrolling. pub vertical_scroll_margin: f32, + /// Scroll sensitivity multiplier. This multiplier is applied + /// to both the horizontal and vertical delta values while scrolling. pub scroll_sensitivity: f32, + /// Whether the line numbers on editors gutter are relative or not. pub relative_line_numbers: bool, + /// When to populate a new search's query based on the text under the cursor. pub seed_search_query_from_cursor: SeedQuerySetting, pub use_smartcase_search: bool, + /// The key to use for adding multiple cursors pub multi_cursor_modifier: MultiCursorModifier, + /// Hide the values of variables in `private` files, as defined by the + /// private_files setting. This only changes the visual representation, + /// the values are still present in the file and can be selected / copied / pasted pub redact_private_values: bool, + + /// How many lines to expand the multibuffer excerpts by default pub expand_excerpt_lines: u32, pub middle_click_paste: bool, + /// What to do when multibuffer is double clicked in some of its excerpts + /// (parts of singleton buffers). #[serde(default)] pub double_click_in_multibuffer: DoubleClickInMultibuffer, + /// Whether the editor search results will loop pub search_wrap: bool, #[serde(default)] pub search: SearchSettings, + /// Show method signatures in the editor, when inside parentheses. pub auto_signature_help: bool, + /// Whether to show the signature help after completion or a bracket pair inserted. + /// If `auto_signature_help` is enabled, this setting will be treated as enabled also. pub show_signature_help_after_edits: bool, + /// Jupyter REPL settings. pub jupyter: Jupyter, } +impl Default for EditorSettings { + fn default() -> Self { + Self { + cursor_blink: true, + current_line_highlight: CurrentLineHighlight::All, + hover_popover_enabled: true, + show_completions_on_input: true, + show_completion_documentation: true, + completion_documentation_secondary_query_debounce: 300, + use_on_type_format: true, + toolbar: Default::default(), + scrollbar: Default::default(), + gutter: Default::default(), + scroll_beyond_last_line: ScrollBeyondLastLine::OnePage, + vertical_scroll_margin: 3., + scroll_sensitivity: 1.0, + relative_line_numbers: false, + seed_search_query_from_cursor: SeedQuerySetting::Always, + multi_cursor_modifier: MultiCursorModifier::Alt, + redact_private_values: false, + expand_excerpt_lines: 3, + double_click_in_multibuffer: DoubleClickInMultibuffer::Select, + search_wrap: true, + auto_signature_help: false, + show_signature_help_after_edits: true, + jupyter: Default::default(), + use_smartcase_search: false, + middle_click_paste: true, + search: SearchSettings::default(), + } + } +} #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CurrentLineHighlight { @@ -72,48 +139,93 @@ pub enum DoubleClickInMultibuffer { Open, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] pub struct Jupyter { /// Whether the Jupyter feature is enabled. - /// - /// Default: true pub enabled: bool, } -#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub struct JupyterContent { - /// Whether the Jupyter feature is enabled. - /// - /// Default: true - pub enabled: Option, +impl Default for Jupyter { + fn default() -> Self { + Self { enabled: true } + } } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(default)] pub struct Toolbar { + /// Whether to display breadcrumbs in the editor toolbar. pub breadcrumbs: bool, + /// Whether to display quick action buttons in the editor toolbar. pub quick_actions: bool, + /// Whether to show the selections menu in the editor toolbar pub selections_menu: bool, } +impl Default for Toolbar { + fn default() -> Self { + Self { + breadcrumbs: true, + quick_actions: true, + selections_menu: true, + } + } +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct Scrollbar { + /// When to show the scrollbar in the editor. pub show: ShowScrollbar, + /// Whether to show git diff indicators in the scrollbar. pub git_diff: bool, + /// Whether to show buffer search result indicators in the scrollbar. pub selected_symbol: bool, + /// Whether to show selected symbol occurrences in the scrollbar. pub search_results: bool, + /// Whether to show diagnostic indicators in the scrollbar. pub diagnostics: bool, + /// Whether to show cursor positions in the scrollbar. pub cursors: bool, } +impl Default for Scrollbar { + fn default() -> Self { + Self { + show: ShowScrollbar::Auto, + git_diff: true, + selected_symbol: true, + search_results: true, + diagnostics: true, + cursors: true, + } + } +} + +/// Gutter-related settings. #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(default)] pub struct Gutter { + /// Whether to show line numbers in the gutter. pub line_numbers: bool, + /// Whether to show code action buttons in the gutter. pub code_actions: bool, + /// Whether to show runnable buttons in the gutter. pub runnables: bool, + /// Whether to show fold buttons in the gutter. pub folds: bool, } +impl Default for Gutter { + fn default() -> Self { + Self { + line_numbers: true, + code_actions: true, + runnables: true, + folds: true, + } + } +} + /// When to show the scrollbar in the editor. /// /// Default: auto @@ -171,188 +283,6 @@ pub struct SearchSettings { pub regex: bool, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct EditorSettingsContent { - /// Whether the cursor blinks in the editor. - /// - /// Default: true - pub cursor_blink: Option, - /// How to highlight the current line in the editor. - /// - /// Default: all - pub current_line_highlight: Option, - /// Whether to show the informational hover box when moving the mouse - /// over symbols in the editor. - /// - /// Default: true - pub hover_popover_enabled: Option, - - /// Whether to pop the completions menu while typing in an editor without - /// explicitly requesting it. - /// - /// Default: true - pub show_completions_on_input: Option, - /// Whether to display inline and alongside documentation for items in the - /// completions menu. - /// - /// Default: true - pub show_completion_documentation: Option, - /// The debounce delay before re-querying the language server for completion - /// documentation when not included in original completion list. - /// - /// Default: 300 ms - pub completion_documentation_secondary_query_debounce: Option, - /// Whether to use additional LSP queries to format (and amend) the code after - /// every "trigger" symbol input, defined by LSP server capabilities. - /// - /// Default: true - pub use_on_type_format: Option, - /// Toolbar related settings - pub toolbar: Option, - /// Scrollbar related settings - pub scrollbar: Option, - /// Gutter related settings - pub gutter: Option, - /// Whether the editor will scroll beyond the last line. - /// - /// Default: one_page - pub scroll_beyond_last_line: Option, - /// The number of lines to keep above/below the cursor when auto-scrolling. - /// - /// Default: 3. - pub vertical_scroll_margin: Option, - /// Scroll sensitivity multiplier. This multiplier is applied - /// to both the horizontal and vertical delta values while scrolling. - /// - /// Default: 1.0 - pub scroll_sensitivity: Option, - /// Whether the line numbers on editors gutter are relative or not. - /// - /// Default: false - pub relative_line_numbers: Option, - /// When to populate a new search's query based on the text under the cursor. - /// - /// Default: always - pub seed_search_query_from_cursor: Option, - pub use_smartcase_search: Option, - /// The key to use for adding multiple cursors - /// - /// Default: alt - pub multi_cursor_modifier: Option, - /// Hide the values of variables in `private` files, as defined by the - /// private_files setting. This only changes the visual representation, - /// the values are still present in the file and can be selected / copied / pasted - /// - /// Default: false - pub redact_private_values: Option, - - /// How many lines to expand the multibuffer excerpts by default - /// - /// Default: 3 - pub expand_excerpt_lines: Option, - - /// Whether to enable middle-click paste on Linux - /// - /// Default: true - pub middle_click_paste: Option, - - /// What to do when multibuffer is double clicked in some of its excerpts - /// (parts of singleton buffers). - /// - /// Default: select - pub double_click_in_multibuffer: Option, - /// Whether the editor search results will loop - /// - /// Default: true - pub search_wrap: Option, - - /// Defaults to use when opening a new buffer and project search items. - /// - /// Default: nothing is enabled - pub search: Option, - - /// Whether to automatically show a signature help pop-up or not. - /// - /// Default: false - pub auto_signature_help: Option, - - /// Whether to show the signature help pop-up after completions or bracket pairs inserted. - /// - /// Default: true - pub show_signature_help_after_edits: Option, - - /// Jupyter REPL settings. - pub jupyter: Option, -} - -// Toolbar related settings -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -pub struct ToolbarContent { - /// Whether to display breadcrumbs in the editor toolbar. - /// - /// Default: true - pub breadcrumbs: Option, - /// Whether to display quick action buttons in the editor toolbar. - /// - /// Default: true - pub quick_actions: Option, - - /// Whether to show the selections menu in the editor toolbar - /// - /// Default: true - pub selections_menu: Option, -} - -/// Scrollbar related settings -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] -pub struct ScrollbarContent { - /// When to show the scrollbar in the editor. - /// - /// Default: auto - pub show: Option, - /// Whether to show git diff indicators in the scrollbar. - /// - /// Default: true - pub git_diff: Option, - /// Whether to show buffer search result indicators in the scrollbar. - /// - /// Default: true - pub search_results: Option, - /// Whether to show selected symbol occurrences in the scrollbar. - /// - /// Default: true - pub selected_symbol: Option, - /// Whether to show diagnostic indicators in the scrollbar. - /// - /// Default: true - pub diagnostics: Option, - /// Whether to show cursor positions in the scrollbar. - /// - /// Default: true - pub cursors: Option, -} - -/// Gutter related settings -#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -pub struct GutterContent { - /// Whether to show line numbers in the gutter. - /// - /// Default: true - pub line_numbers: Option, - /// Whether to show code action buttons in the gutter. - /// - /// Default: true - pub code_actions: Option, - /// Whether to show runnable buttons in the gutter. - /// - /// Default: true - pub runnables: Option, - /// Whether to show fold buttons in the gutter. - /// - /// Default: true - pub folds: Option, -} - impl EditorSettings { pub fn jupyter_enabled(cx: &AppContext) -> bool { EditorSettings::get_global(cx).jupyter.enabled @@ -362,7 +292,7 @@ impl EditorSettings { impl Settings for EditorSettings { const KEY: Option<&'static str> = None; - type FileContent = EditorSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/editor/src/editor_settings_controls.rs b/crates/editor/src/editor_settings_controls.rs index bbe1b00324..36d471dfa2 100644 --- a/crates/editor/src/editor_settings_controls.rs +++ b/crates/editor/src/editor_settings_controls.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use gpui::{AppContext, FontFeatures, FontWeight}; -use project::project_settings::{InlineBlameSettings, ProjectSettings}; +use project::project_settings::ProjectSettings; use settings::{EditableSettingControl, Settings}; use theme::{FontFamilyCache, ThemeSettings}; use ui::{ @@ -296,14 +296,7 @@ impl EditableSettingControl for InlineGitBlameControl { value: Self::Value, _cx: &AppContext, ) { - if let Some(inline_blame) = settings.git.inline_blame.as_mut() { - inline_blame.enabled = value; - } else { - settings.git.inline_blame = Some(InlineBlameSettings { - enabled: false, - ..Default::default() - }); - } + settings.git.inline_blame.enabled = value; } } @@ -349,14 +342,7 @@ impl EditableSettingControl for LineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - if let Some(gutter) = settings.gutter.as_mut() { - gutter.line_numbers = Some(value); - } else { - settings.gutter = Some(crate::editor_settings::GutterContent { - line_numbers: Some(value), - ..Default::default() - }); - } + settings.gutter.line_numbers = value; } } @@ -402,7 +388,7 @@ impl EditableSettingControl for RelativeLineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - settings.relative_line_numbers = Some(value); + settings.relative_line_numbers = value; } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 0b1e0385de..7d42dc7a85 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6964,7 +6964,7 @@ async fn test_handle_input_for_show_signature_help_auto_signature_help_true( cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(true); + settings.auto_signature_help = true; }); }); }); @@ -7105,8 +7105,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(false); - settings.show_signature_help_after_edits = Some(false); + settings.auto_signature_help = false; + settings.show_signature_help_after_edits = false; }); }); }); @@ -7232,8 +7232,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(false); - settings.show_signature_help_after_edits = Some(true); + settings.auto_signature_help = false; + settings.show_signature_help_after_edits = true; }); }); }); @@ -7274,8 +7274,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(true); - settings.show_signature_help_after_edits = Some(false); + settings.auto_signature_help = true; + settings.show_signature_help_after_edits = false; }); }); }); @@ -7318,7 +7318,7 @@ async fn test_signature_help(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(true); + settings.auto_signature_help = true; }); }); }); @@ -7759,7 +7759,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.show_completions_on_input = Some(false); + settings.show_completions_on_input = false; }); }) }); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index d4f5c565c2..1c0a325b76 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1283,10 +1283,7 @@ impl EditorElement { .row, ); - let git_gutter_setting = ProjectSettings::get_global(cx) - .git - .git_gutter - .unwrap_or_default(); + let git_gutter_setting = ProjectSettings::get_global(cx).git.git_gutter; let display_hunks = buffer_snapshot .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) @@ -1366,12 +1363,10 @@ impl EditorElement { }; let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS; - let min_column_in_pixels = ProjectSettings::get_global(cx) - .git - .inline_blame - .and_then(|settings| settings.min_column) - .map(|col| self.column_pixels(col as usize, cx)) - .unwrap_or(px(0.)); + let min_column_in_pixels = self.column_pixels( + ProjectSettings::get_global(cx).git.inline_blame.min_column as usize, + cx, + ); let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels; cmp::max(padded_line_end, min_start) @@ -3331,7 +3326,7 @@ impl EditorElement { .unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - Some(GitGutterSetting::TrackedFiles) + GitGutterSetting::TrackedFiles ) }); if show_git_gutter { diff --git a/crates/extension/src/extension_settings.rs b/crates/extension/src/extension_settings.rs index a2ab7ac9cc..715dc3ca82 100644 --- a/crates/extension/src/extension_settings.rs +++ b/crates/extension/src/extension_settings.rs @@ -6,18 +6,25 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use std::sync::Arc; -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] +#[derive(Deserialize, Serialize, Debug, Clone, JsonSchema)] +#[serde(default)] pub struct ExtensionSettings { /// The extensions that should be automatically installed by Zed. /// /// This is used to make functionality provided by extensions (e.g., language support) /// available out-of-the-box. - #[serde(default)] pub auto_install_extensions: HashMap, bool>, - #[serde(default)] pub auto_update_extensions: HashMap, bool>, } +impl Default for ExtensionSettings { + fn default() -> Self { + Self { + auto_install_extensions: HashMap::from_iter([("html".into(), true)]), + auto_update_extensions: Default::default(), + } + } +} impl ExtensionSettings { /// Returns whether the given extension should be auto-installed. pub fn should_auto_install(&self, extension_id: &str) -> bool { diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index f246e3cf4f..b2d6d7f283 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -1000,7 +1000,7 @@ impl ExtensionsPage { this.update_settings::( selection, cx, - |setting, value| *setting = Some(value), + |setting, value| *setting = VimModeSetting(value), ); }), )), diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 63e0f2b079..de3d1dc74d 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -180,18 +180,10 @@ pub(crate) enum LineIndicatorFormat { Long, } -/// Whether or not to automatically check for updates. -/// -/// Values: short, long -/// Default: short -#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] -#[serde(transparent)] -pub(crate) struct LineIndicatorFormatContent(LineIndicatorFormat); - impl Settings for LineIndicatorFormat { const KEY: Option<&'static str> = Some("line_indicator_format"); - type FileContent = Option; + type FileContent = Self; fn load( sources: SettingsSources, @@ -199,9 +191,9 @@ impl Settings for LineIndicatorFormat { ) -> anyhow::Result { let format = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied().flatten()) - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); + .find_map(|value| value.copied()) + .unwrap_or(*sources.default); - Ok(format.0) + Ok(format) } } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 8de9e6f009..b203592360 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -5,6 +5,7 @@ use core::fmt::Debug; use derive_more::{Add, AddAssign, Div, DivAssign, Mul, Neg, Sub, SubAssign}; use refineable::Refineable; +use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use std::{ cmp::{self, PartialOrd}, @@ -2201,6 +2202,7 @@ impl From for Radians { PartialEq, Serialize, Deserialize, + JsonSchema, )] #[repr(transparent)] pub struct Pixels(pub f32); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index e1fcaaba28..7a6b758a25 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -70,10 +70,10 @@ pub struct LanguageSettings { /// The column at which to soft-wrap lines, for buffers where soft-wrap /// is enabled. pub preferred_line_length: u32, - // Whether to show wrap guides (vertical rulers) in the editor. - // Setting this to true will show a guide at the 'preferred_line_length' value - // if softwrap is set to 'preferred_line_length', and will show any - // additional guides as specified by the 'wrap_guides' setting. + /// Whether to show wrap guides (vertical rulers) in the editor. + /// Setting this to true will show a guide at the 'preferred_line_length' value + /// if softwrap is set to 'preferred_line_length', and will show any + /// additional guides as specified by the 'wrap_guides' setting. pub show_wrap_guides: bool, /// Character counts at which to show wrap guides (vertical rulers) in the editor. pub wrap_guides: Vec, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 6b5f74c263..102eb1ef2f 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,10 +7,13 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{ + CodeLabel, Language, LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate, +}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; +use rope::Rope; use serde_json::{json, Value}; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; use smol::{ @@ -202,6 +205,30 @@ impl LspAdapter for JsonLspAdapter { }))) } + async fn label_for_completion( + &self, + item: &lsp::CompletionItem, + language: &Arc, + ) -> Option { + let text = if let Some(description) = item + .label_details + .as_ref() + .and_then(|label_details| label_details.description.as_ref()) + { + format!("{} {}", item.label, description) + } else if let Some(detail) = &item.detail { + format!("{} {}", item.label, detail) + } else { + item.label.clone() + }; + let rope = Rope::from(item.label.as_str()); + let runs = language.highlight_text(&rope, 0..item.label.len()); + Some(language::CodeLabel { + text, + runs, + filter_range: 0..item.label.len(), + }) + } async fn workspace_configuration( self: Arc, _: &Arc, diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index c5f0187c22..361607533b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -24,12 +24,12 @@ use editor::{ use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, - AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, - EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, - KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, - SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, - VisualContext, WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, uniform_list, Action, AnyElement, AppContext, + AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, EventEmitter, + FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, KeyContext, Model, + MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, SharedString, Stateful, + Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext, + WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; @@ -1938,7 +1938,7 @@ impl OutlinePanel { .child( ListItem::new(item_id) .indent_level(depth) - .indent_step_size(px(settings.indent_size)) + .indent_step_size(settings.indent_size) .selected(is_active) .when_some(icon_element, |list_item, icon_element| { list_item.child(h_flex().child(icon_element)) @@ -3801,7 +3801,7 @@ impl Panel for OutlinePanel { DockPosition::Left | DockPosition::Bottom => OutlinePanelDockPosition::Left, DockPosition::Right => OutlinePanelDockPosition::Right, }; - settings.dock = Some(dock); + settings.dock = dock; }, ); } diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index e19fc3c008..a8e51b96c5 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,4 +1,5 @@ -use gpui::Pixels; +use anyhow; +use gpui::{px, Pixels}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -10,66 +11,51 @@ pub enum OutlinePanelDockPosition { Right, } -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] pub struct OutlinePanelSettings { - pub button: bool, - pub default_width: Pixels, - pub dock: OutlinePanelDockPosition, - pub file_icons: bool, - pub folder_icons: bool, - pub git_status: bool, - pub indent_size: f32, - pub auto_reveal_entries: bool, - pub auto_fold_dirs: bool, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct OutlinePanelSettingsContent { /// Whether to show the outline panel button in the status bar. - /// - /// Default: true - pub button: Option, + pub button: bool, /// Customize default width (in pixels) taken by outline panel - /// - /// Default: 240 - pub default_width: Option, + pub default_width: Pixels, /// The position of outline panel - /// - /// Default: left - pub dock: Option, + pub dock: OutlinePanelDockPosition, /// Whether to show file icons in the outline panel. - /// - /// Default: true - pub file_icons: Option, + pub file_icons: bool, /// Whether to show folder icons or chevrons for directories in the outline panel. - /// - /// Default: true - pub folder_icons: Option, + pub folder_icons: bool, /// Whether to show the git status in the outline panel. - /// - /// Default: true - pub git_status: Option, + pub git_status: bool, /// Amount of indentation (in pixels) for nested items. - /// - /// Default: 20 - pub indent_size: Option, + pub indent_size: Pixels, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. - /// - /// Default: true - pub auto_reveal_entries: Option, + pub auto_reveal_entries: bool, /// Whether to fold directories automatically /// when directory has only one directory inside. - /// - /// Default: true - pub auto_fold_dirs: Option, + pub auto_fold_dirs: bool, +} + +impl Default for OutlinePanelSettings { + fn default() -> Self { + Self { + button: true, + default_width: px(240.), + dock: OutlinePanelDockPosition::Left, + file_icons: true, + folder_icons: true, + auto_fold_dirs: true, + auto_reveal_entries: true, + indent_size: px(20.), + git_status: true, + } + } } impl Settings for OutlinePanelSettings { const KEY: Option<&'static str> = Some("outline_panel"); - type FileContent = OutlinePanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/performance/src/performance.rs b/crates/performance/src/performance.rs new file mode 100644 index 0000000000..db2388c59a --- /dev/null +++ b/crates/performance/src/performance.rs @@ -0,0 +1,184 @@ +use std::time::Instant; + +use anyhow::Result; +use gpui::{ + div, AppContext, InteractiveElement as _, Render, StatefulInteractiveElement as _, + Subscription, ViewContext, VisualContext, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources, SettingsStore}; +use workspace::{ + ui::{Label, LabelCommon, LabelSize, Tooltip}, + ItemHandle, StatusItemView, Workspace, +}; + +const SHOW_STARTUP_TIME_DURATION: std::time::Duration = std::time::Duration::from_secs(5); + +pub fn init(cx: &mut AppContext) { + PerformanceSettings::register(cx); + + let mut enabled = PerformanceSettings::get_global(cx).show_in_status_bar; + let start_time = Instant::now(); + let mut _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); + + cx.observe_global::(move |cx| { + let new_value = PerformanceSettings::get_global(cx).show_in_status_bar; + if new_value != enabled { + enabled = new_value; + _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); + } + }) + .detach(); +} + +fn toggle_status_bar_items( + enabled: bool, + start_time: Instant, + cx: &mut AppContext, +) -> Option { + for window in cx.windows() { + if let Some(workspace) = window.downcast::() { + workspace + .update(cx, |workspace, cx| { + toggle_status_bar_item(workspace, enabled, start_time, cx); + }) + .ok(); + } + } + + if enabled { + log::info!("performance metrics display enabled"); + Some(cx.observe_new_views::(move |workspace, cx| { + toggle_status_bar_item(workspace, true, start_time, cx); + })) + } else { + log::info!("performance metrics display disabled"); + None + } +} + +struct PerformanceStatusBarItem { + display_mode: DisplayMode, +} + +#[derive(Copy, Clone, Debug)] +enum DisplayMode { + StartupTime, + Fps, +} + +impl PerformanceStatusBarItem { + fn new(start_time: Instant, cx: &mut ViewContext) -> Self { + let now = Instant::now(); + let display_mode = if now < start_time + SHOW_STARTUP_TIME_DURATION { + DisplayMode::StartupTime + } else { + DisplayMode::Fps + }; + + let this = Self { display_mode }; + + if let DisplayMode::StartupTime = display_mode { + cx.spawn(|this, mut cx| async move { + let now = Instant::now(); + let remaining_duration = + (start_time + SHOW_STARTUP_TIME_DURATION).saturating_duration_since(now); + cx.background_executor().timer(remaining_duration).await; + this.update(&mut cx, |this, cx| { + this.display_mode = DisplayMode::Fps; + cx.notify(); + }) + .ok(); + }) + .detach(); + } + + this + } +} + +impl Render for PerformanceStatusBarItem { + fn render(&mut self, cx: &mut gpui::ViewContext) -> impl gpui::IntoElement { + let text = match self.display_mode { + DisplayMode::StartupTime => cx + .time_to_first_window_draw() + .map_or("Pending".to_string(), |duration| { + format!("{}ms", duration.as_millis()) + }), + DisplayMode::Fps => cx.fps().map_or("".to_string(), |fps| { + format!("{:3} FPS", fps.round() as u32) + }), + }; + + use gpui::ParentElement; + let display_mode = self.display_mode; + div() + .id("performance status") + .child(Label::new(text).size(LabelSize::Small)) + .tooltip(move |cx| match display_mode { + DisplayMode::StartupTime => Tooltip::text("Time to first window draw", cx), + DisplayMode::Fps => cx + .new_view(|cx| { + let tooltip = Tooltip::new("Current FPS"); + if let Some(time_to_first) = cx.time_to_first_window_draw() { + tooltip.meta(format!( + "Time to first window draw: {}ms", + time_to_first.as_millis() + )) + } else { + tooltip + } + }) + .into(), + }) + } +} + +impl StatusItemView for PerformanceStatusBarItem { + fn set_active_pane_item( + &mut self, + _active_pane_item: Option<&dyn ItemHandle>, + _cx: &mut gpui::ViewContext, + ) { + // This is not currently used. + } +} + +fn toggle_status_bar_item( + workspace: &mut Workspace, + enabled: bool, + start_time: Instant, + cx: &mut ViewContext, +) { + if enabled { + workspace.status_bar().update(cx, |bar, cx| { + bar.add_right_item( + cx.new_view(|cx| PerformanceStatusBarItem::new(start_time, cx)), + cx, + ) + }); + } else { + workspace.status_bar().update(cx, |bar, cx| { + bar.remove_items_of_type::(cx); + }); + } +} + +/// Configuration of the display of performance details. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] +pub struct PerformanceSettings { + /// Display the time to first window draw and frame rate in the status bar. + pub show_in_status_bar: bool, +} + +impl Settings for PerformanceSettings { + const KEY: Option<&'static str> = Some("performance"); + + type FileContent = Self; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + sources.json_merge() + } +} diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 70b2eccf23..3c21b1c5e8 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -20,6 +20,7 @@ use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; #[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct ProjectSettings { /// Configuration for language servers. /// @@ -41,7 +42,6 @@ pub struct ProjectSettings { pub load_direnv: DirenvSettings, /// Configuration for session-related features - #[serde(default)] pub session: SessionSettings, } @@ -59,36 +59,31 @@ pub enum DirenvSettings { } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct GitSettings { /// Whether or not to show the git gutter. /// /// Default: tracked_files - pub git_gutter: Option, + pub git_gutter: GitGutterSetting, pub gutter_debounce: Option, /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: on - pub inline_blame: Option, + pub inline_blame: InlineBlameSettings, } impl GitSettings { pub fn inline_blame_enabled(&self) -> bool { #[allow(unknown_lints, clippy::manual_unwrap_or_default)] - match self.inline_blame { - Some(InlineBlameSettings { enabled, .. }) => enabled, - _ => false, - } + self.inline_blame.enabled } pub fn inline_blame_delay(&self) -> Option { - match self.inline_blame { - Some(InlineBlameSettings { - delay_ms: Some(delay_ms), - .. - }) if delay_ms > 0 => Some(Duration::from_millis(delay_ms)), - _ => None, - } + self.inline_blame + .delay_ms + .gt(&0) + .then(|| Duration::from_millis(self.inline_blame.delay_ms)) } } @@ -102,28 +97,34 @@ pub enum GitGutterSetting { Hide, } -#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] +#[serde(default)] pub struct InlineBlameSettings { /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: true - #[serde(default = "true_value")] pub enabled: bool, /// Whether to only show the inline blame information /// after a delay once the cursor stops moving. /// /// Default: 0 - pub delay_ms: Option, + pub delay_ms: u64, /// The minimum column number to show the inline blame information at /// /// Default: 0 - pub min_column: Option, + pub min_column: u32, } -const fn true_value() -> bool { - true +impl Default for InlineBlameSettings { + fn default() -> Self { + Self { + enabled: true, + delay_ms: 0, + min_column: 0, + } + } } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c77a2170dd..6ca843875b 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2289,7 +2289,7 @@ impl ProjectPanel { .child( ListItem::new(entry_id.to_proto() as usize) .indent_level(depth) - .indent_step_size(px(settings.indent_size)) + .indent_step_size(settings.indent_size) .selected(is_marked || is_active) .when_some(canonical_path, |this, path| { this.end_slot::( @@ -2817,7 +2817,7 @@ impl Render for DraggedProjectEntryView { this.bg(cx.theme().colors().background).w(self.width).child( ListItem::new(self.selection.entry_id.to_proto() as usize) .indent_level(self.details.depth) - .indent_step_size(px(settings.indent_size)) + .indent_step_size(settings.indent_size) .child(if let Some(icon) = &self.details.icon { div().child(Icon::from_path(icon.clone())) } else { @@ -2855,7 +2855,7 @@ impl Panel for ProjectPanel { DockPosition::Left | DockPosition::Bottom => ProjectPanelDockPosition::Left, DockPosition::Right => ProjectPanelDockPosition::Right, }; - settings.dock = Some(dock); + settings.dock = dock; }, ); } @@ -3029,7 +3029,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { worktree_settings.file_scan_exclusions = - Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); + vec!["**/.git".to_string(), "**/4/**".to_string()]; }); }); }); @@ -4818,10 +4818,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = Some(false) + project_panel_settings.auto_reveal_entries = false }); }) }); @@ -4940,7 +4940,7 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = Some(true) + project_panel_settings.auto_reveal_entries = true }); }) }); @@ -5054,10 +5054,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = Some(false) + project_panel_settings.auto_reveal_entries = false }); }) }); @@ -5256,7 +5256,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); + vec!["excluded_dir".to_string(), "**/.git".to_string()]; }); }); }); @@ -5569,10 +5569,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = Some(false); + project_panel_settings.auto_fold_dirs = false; }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); }); }); @@ -5591,10 +5591,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = Some(false); + project_panel_settings.auto_fold_dirs = false; }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); }); }); diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 4d73ae9245..6910b4627a 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,6 +2,7 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; +use ui::px; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -10,20 +11,50 @@ pub enum ProjectPanelDockPosition { Right, } -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] +#[serde(default)] pub struct ProjectPanelSettings { + /// Whether to show the project panel button in the status bar. pub button: bool, + /// Customize default width (in pixels) taken by project panel pub default_width: Pixels, + /// The position of project panel pub dock: ProjectPanelDockPosition, + /// Whether to show file icons in the project panel. pub file_icons: bool, + /// Whether to show folder icons or chevrons for directories in the project panel. pub folder_icons: bool, + /// Whether to show the git status in the project panel. pub git_status: bool, - pub indent_size: f32, + /// Amount of indentation (in pixels) for nested items. + pub indent_size: Pixels, + /// Whether to reveal it in the project panel automatically, + /// when a corresponding project entry becomes active. + /// Gitignored entries are never auto revealed. pub auto_reveal_entries: bool, + /// Whether to fold directories automatically + /// when directory has only one directory inside. pub auto_fold_dirs: bool, + /// Scrollbar-related settings pub scrollbar: ScrollbarSettings, } +impl Default for ProjectPanelSettings { + fn default() -> Self { + Self { + button: true, + default_width: px(240.), + dock: ProjectPanelDockPosition::Left, + file_icons: true, + folder_icons: true, + git_status: true, + indent_size: px(20.), + auto_reveal_entries: true, + auto_fold_dirs: true, + scrollbar: Default::default(), + } + } +} /// When to show the scrollbar in the project panel. /// /// Default: always @@ -37,7 +68,7 @@ pub enum ShowScrollbar { Never, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. /// @@ -45,63 +76,10 @@ pub struct ScrollbarSettings { pub show: ShowScrollbar, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -pub struct ScrollbarSettingsContent { - /// When to show the scrollbar in the project panel. - /// - /// Default: always - pub show: Option, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct ProjectPanelSettingsContent { - /// Whether to show the project panel button in the status bar. - /// - /// Default: true - pub button: Option, - /// Customize default width (in pixels) taken by project panel - /// - /// Default: 240 - pub default_width: Option, - /// The position of project panel - /// - /// Default: left - pub dock: Option, - /// Whether to show file icons in the project panel. - /// - /// Default: true - pub file_icons: Option, - /// Whether to show folder icons or chevrons for directories in the project panel. - /// - /// Default: true - pub folder_icons: Option, - /// Whether to show the git status in the project panel. - /// - /// Default: true - pub git_status: Option, - /// Amount of indentation (in pixels) for nested items. - /// - /// Default: 20 - pub indent_size: Option, - /// Whether to reveal it in the project panel automatically, - /// when a corresponding project entry becomes active. - /// Gitignored entries are never auto revealed. - /// - /// Default: true - pub auto_reveal_entries: Option, - /// Whether to fold directories automatically - /// when directory has only one directory inside. - /// - /// Default: false - pub auto_fold_dirs: Option, - /// Scrollbar-related settings - pub scrollbar: Option, -} - impl Settings for ProjectPanelSettings { const KEY: Option<&'static str> = Some("project_panel"); - type FileContent = ProjectPanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index d8b10f31f9..b7fa635945 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -48,7 +48,6 @@ use workspace::{notifications::DetachAndPromptErr, AppState, ModalView, Workspac use crate::open_dev_server_project; use crate::ssh_connections::connect_over_ssh; use crate::ssh_connections::open_ssh_project; -use crate::ssh_connections::RemoteSettingsContent; use crate::ssh_connections::SshConnection; use crate::ssh_connections::SshConnectionModal; use crate::ssh_connections::SshProject; @@ -1024,7 +1023,7 @@ impl DevServerProjects { fn update_settings_file( &mut self, cx: &mut ViewContext, - f: impl FnOnce(&mut RemoteSettingsContent) + Send + Sync + 'static, + f: impl FnOnce(&mut SshSettings) + Send + Sync + 'static, ) { let Some(fs) = self .workspace diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 8da4284b7f..b54196022d 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -22,8 +22,24 @@ use ui::{ use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; -#[derive(Deserialize)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct SshSettings { + /// ssh_connections is an array of ssh connections. + /// By default this setting is null, which disables the direct ssh connection support. + /// You can configure these from `project: Open Remote` in the command palette. + /// Zed's ssh support will pull configuration from your ~/.ssh too. + /// Examples: + /// [ + /// { + /// "host": "example-box", + /// "projects": [ + /// { + /// "paths": ["/home/user/code/zed"] + /// } + /// ] + /// } + /// ] pub ssh_connections: Option>, } @@ -62,15 +78,10 @@ pub struct SshProject { pub paths: Vec, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct RemoteSettingsContent { - pub ssh_connections: Option>, -} - impl Settings for SshSettings { const KEY: Option<&'static str> = None; - type FileContent = RemoteSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/repl/src/jupyter_settings.rs b/crates/repl/src/jupyter_settings.rs index aefef6cec5..f441da4790 100644 --- a/crates/repl/src/jupyter_settings.rs +++ b/crates/repl/src/jupyter_settings.rs @@ -6,8 +6,10 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct JupyterSettings { + /// Default kernels to select for each language. pub kernel_selections: HashMap, } @@ -20,26 +22,10 @@ impl JupyterSettings { } } -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -pub struct JupyterSettingsContent { - /// Default kernels to select for each language. - /// - /// Default: `{}` - pub kernel_selections: Option>, -} - -impl Default for JupyterSettingsContent { - fn default() -> Self { - JupyterSettingsContent { - kernel_selections: Some(HashMap::new()), - } - } -} - impl Settings for JupyterSettings { const KEY: Option<&'static str> = Some("jupyter"); - type FileContent = JupyterSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -51,10 +37,8 @@ impl Settings for JupyterSettings { let mut settings = JupyterSettings::default(); for value in sources.defaults_and_customizations() { - if let Some(source) = &value.kernel_selections { - for (k, v) in source { - settings.kernel_selections.insert(k.clone(), v.clone()); - } + for (k, v) in &value.kernel_selections { + settings.kernel_selections.insert(k.clone(), v.clone()); } } diff --git a/crates/tasks_ui/src/settings.rs b/crates/tasks_ui/src/settings.rs index 1bcd496264..4ad6f607b7 100644 --- a/crates/tasks_ui/src/settings.rs +++ b/crates/tasks_ui/src/settings.rs @@ -2,22 +2,26 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Serialize, Deserialize, PartialEq, Default)] +#[derive(Clone, Serialize, Deserialize, PartialEq, JsonSchema)] +#[serde(default)] +/// Task-related settings. pub(crate) struct TaskSettings { + /// Whether to show task status indicator in the status bar. Default: true pub(crate) show_status_indicator: bool, } -/// Task-related settings. -#[derive(Serialize, Deserialize, PartialEq, Default, Clone, JsonSchema)] -pub(crate) struct TaskSettingsContent { - /// Whether to show task status indicator in the status bar. Default: true - show_status_indicator: Option, +impl Default for TaskSettings { + fn default() -> Self { + Self { + show_status_indicator: true, + } + } } impl Settings for TaskSettings { const KEY: Option<&'static str> = Some("task"); - type FileContent = TaskSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/vim/src/digraph.rs b/crates/vim/src/digraph.rs index 443b7ff378..282016cfda 100644 --- a/crates/vim/src/digraph.rs +++ b/crates/vim/src/digraph.rs @@ -132,7 +132,7 @@ mod test { let mut custom_digraphs = HashMap::default(); custom_digraphs.insert("|-".into(), "⊢".into()); custom_digraphs.insert(":)".into(), "👨‍💻".into()); - s.custom_digraphs = Some(custom_digraphs); + s.custom_digraphs = custom_digraphs; }); }); diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 8198c0da53..815086d0be 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -1184,7 +1184,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = Some(true); + s.use_multiline_find = true; }); }); @@ -1226,7 +1226,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = Some(true); + s.use_multiline_find = true; }); }); @@ -1268,7 +1268,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_smartcase_find = Some(true); + s.use_smartcase_find = true; }); }); diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 05469dbf9f..6465e33e0f 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -291,7 +291,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -327,7 +327,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::OnYank) + s.use_system_clipboard = UseSystemClipboard::OnYank }); }); @@ -584,7 +584,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -630,7 +630,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -659,7 +659,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -707,7 +707,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index f89faa3748..6a20ea4eb3 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -294,7 +294,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.scroll_beyond_last_line = Some(ScrollBeyondLastLine::Off) + s.scroll_beyond_last_line = ScrollBeyondLastLine::Off }); }); diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 28f33d49d8..6418475ad2 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -542,7 +542,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); + store.update_user_settings::(cx, |s| s.search_wrap = false); }); cx.set_state("ˇhi\nhigh\nhi\n", Mode::Normal); @@ -655,7 +655,7 @@ mod test { // check that searching with unable search wrap cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); + store.update_user_settings::(cx, |s| s.search_wrap = false); }); cx.set_state("aa\nbˇb\ncc\ncc\ncc\n", Mode::Normal); cx.simulate_keystrokes("/ c c enter"); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 9c61e9cd93..be7db47315 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -1300,7 +1300,7 @@ async fn test_command_alias(cx: &mut gpui::TestAppContext) { store.update_user_settings::(cx, |s| { let mut aliases = HashMap::default(); aliases.insert("Q".to_string(), "upper".to_string()); - s.command_aliases = Some(aliases) + s.command_aliases = aliases }); }); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index c985f68e70..b68d2ede8b 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -57,7 +57,7 @@ impl VimTestContext { pub fn new_with_lsp(mut cx: EditorLspTestContext, enabled: bool) -> VimTestContext { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(enabled)); + store.update_user_settings::(cx, |s| *s = VimModeSetting(enabled)); }); settings::KeymapFile::load_asset("keymaps/default-macos.json", cx).unwrap(); if enabled { @@ -105,7 +105,7 @@ impl VimTestContext { pub fn enable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(true)); + store.update_user_settings::(cx, |s| *s = VimModeSetting(true)); }); }) } @@ -113,7 +113,7 @@ impl VimTestContext { pub fn disable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(false)); + store.update_user_settings::(cx, |s| *s = VimModeSetting(false)); }); }) } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6e03374c22..6baca17948 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -46,6 +46,8 @@ use crate::state::ReplayableAction; /// Whether or not to enable Vim mode. /// /// Default: false +#[derive(Copy, Clone, Default, Deserialize, Serialize, JsonSchema)] +#[serde(default, transparent)] pub struct VimModeSetting(pub bool); /// An Action to Switch between modes @@ -99,7 +101,7 @@ pub fn init(cx: &mut AppContext) { let fs = workspace.app_state().fs.clone(); let currently_enabled = Vim::enabled(cx); update_settings_file::(fs, cx, move |setting, _| { - *setting = Some(!currently_enabled) + *setting = VimModeSetting(!currently_enabled); }) }); @@ -1068,12 +1070,10 @@ impl Vim { impl Settings for VimModeSetting { const KEY: Option<&'static str> = Some("vim_mode"); - type FileContent = Option; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - Ok(Self(sources.user.copied().flatten().unwrap_or( - sources.default.ok_or_else(Self::missing_default)?, - ))) + Ok(sources.user.copied().unwrap_or(*sources.default)) } } @@ -1089,7 +1089,8 @@ pub enum UseSystemClipboard { OnYank, } -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] struct VimSettings { pub toggle_relative_line_numbers: bool, pub use_system_clipboard: UseSystemClipboard, @@ -1098,19 +1099,22 @@ struct VimSettings { pub custom_digraphs: HashMap>, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -struct VimSettingsContent { - pub toggle_relative_line_numbers: Option, - pub use_system_clipboard: Option, - pub use_multiline_find: Option, - pub use_smartcase_find: Option, - pub custom_digraphs: Option>>, +impl Default for VimSettings { + fn default() -> Self { + Self { + toggle_relative_line_numbers: false, + use_system_clipboard: UseSystemClipboard::Always, + use_multiline_find: false, + use_smartcase_find: false, + custom_digraphs: Default::default(), + } + } } impl Settings for VimSettings { const KEY: Option<&'static str> = Some("vim"); - type FileContent = VimSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/welcome/src/base_keymap_picker.rs b/crates/welcome/src/base_keymap_picker.rs index 96a9df9c3c..fd7361f9b3 100644 --- a/crates/welcome/src/base_keymap_picker.rs +++ b/crates/welcome/src/base_keymap_picker.rs @@ -177,7 +177,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { .report_setting_event("keymap", base_keymap.to_string()); update_settings_file::(self.fs.clone(), cx, move |setting, _| { - *setting = Some(base_keymap) + *setting = base_keymap; }); } diff --git a/crates/welcome/src/base_keymap_setting.rs b/crates/welcome/src/base_keymap_setting.rs index 1b52bbc9f9..0c1724627c 100644 --- a/crates/welcome/src/base_keymap_setting.rs +++ b/crates/welcome/src/base_keymap_setting.rs @@ -87,15 +87,15 @@ impl BaseKeymap { impl Settings for BaseKeymap { const KEY: Option<&'static str> = Some("base_keymap"); - type FileContent = Option; + type FileContent = Self; fn load( sources: SettingsSources, _: &mut gpui::AppContext, ) -> anyhow::Result { - if let Some(Some(user_value)) = sources.user.copied() { + if let Some(user_value) = sources.user.copied() { return Ok(user_value); } - sources.default.ok_or_else(Self::missing_default) + Ok(*sources.default) } } diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index fc837c6867..787c2e589b 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -188,7 +188,7 @@ impl Render for WelcomePage { this.update_settings::( selection, cx, - |setting, value| *setting = Some(value), + |setting, value| *setting = VimModeSetting(value), ); }), )) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 935f0268b6..46b8f3bf7f 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -36,20 +36,49 @@ use util::ResultExt; pub const LEADER_UPDATE_THROTTLE: Duration = Duration::from_millis(200); -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct ItemSettings { + /// Whether to show the Git file status on a tab item. pub git_status: bool, + /// Position of the close button in a tab. pub close_position: ClosePosition, + /// Whether to show the file icon for a tab. pub file_icons: bool, } -#[derive(Deserialize)] +impl Default for ItemSettings { + fn default() -> Self { + Self { + git_status: false, + close_position: ClosePosition::Right, + file_icons: false, + } + } +} + +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct PreviewTabsSettings { + /// Whether to show opened editors as preview tabs. + /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. pub enabled: bool, + /// Whether to open tabs in preview mode when selected from the file finder. pub enable_preview_from_file_finder: bool, + /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. pub enable_preview_from_code_navigation: bool, } +impl Default for PreviewTabsSettings { + fn default() -> Self { + Self { + enabled: true, + enable_preview_from_file_finder: false, + enable_preview_from_code_navigation: false, + } + } +} + #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "lowercase")] pub enum ClosePosition { @@ -67,43 +96,10 @@ impl ClosePosition { } } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct ItemSettingsContent { - /// Whether to show the Git file status on a tab item. - /// - /// Default: false - git_status: Option, - /// Position of the close button in a tab. - /// - /// Default: right - close_position: Option, - /// Whether to show the file icon for a tab. - /// - /// Default: false - file_icons: Option, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct PreviewTabsSettingsContent { - /// Whether to show opened editors as preview tabs. - /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. - /// - /// Default: true - enabled: Option, - /// Whether to open tabs in preview mode when selected from the file finder. - /// - /// Default: false - enable_preview_from_file_finder: Option, - /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. - /// - /// Default: false - enable_preview_from_code_navigation: Option, -} - impl Settings for ItemSettings { const KEY: Option<&'static str> = Some("tabs"); - type FileContent = ItemSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -113,7 +109,7 @@ impl Settings for ItemSettings { impl Settings for PreviewTabsSettings { const KEY: Option<&'static str> = Some("preview_tabs"); - type FileContent = PreviewTabsSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 7371e56cee..50b92326b2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -6418,7 +6418,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::OnWindowChange); + settings.autosave = AutosaveSetting::OnWindowChange; }) }); item.is_dirty = true; @@ -6438,7 +6438,7 @@ mod tests { cx.focus_self(); SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::OnFocusChange); + settings.autosave = AutosaveSetting::OnFocusChange; }) }); item.is_dirty = true; @@ -6461,7 +6461,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::AfterDelay { milliseconds: 500 }); + settings.autosave = AutosaveSetting::AfterDelay { milliseconds: 500 }; }) }); item.is_dirty = true; @@ -6480,7 +6480,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::OnFocusChange); + settings.autosave = AutosaveSetting::OnFocusChange; }) }); item.is_dirty = true; diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 52827c6941..f87840eb30 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -5,22 +5,58 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct WorkspaceSettings { + /// Scale by which to zoom the active pane. + /// When set to 1.0, the active pane has the same size as others, + /// but when set to a larger value, the active pane takes up more space. pub active_pane_magnification: f32, + /// Direction to split horizontally. pub pane_split_direction_horizontal: PaneSplitDirectionHorizontal, + /// Direction to split vertically. pub pane_split_direction_vertical: PaneSplitDirectionVertical, + /// Centered layout related settings. pub centered_layout: CenteredLayoutSettings, + /// Whether or not to prompt the user to confirm before closing the application. pub confirm_quit: bool, + /// Whether or not to show the call status icon in the status bar. pub show_call_status_icon: bool, + /// When to automatically save edited buffers. pub autosave: AutosaveSetting, + /// Controls previous session restoration in freshly launched Zed instance. pub restore_on_startup: RestoreOnStartupBehavior, + /// The size of the workspace split drop targets on the outer edges. + /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. pub drop_target_size: f32, + /// Whether to close the window when using 'close active item' on a workspace with no tabs pub when_closing_with_no_tabs: CloseWindowWhenNoItems, + /// Whether to use the system provided dialogs for Open and Save As. + /// When set to false, Zed will use the built-in keyboard-first pickers. pub use_system_path_prompts: bool, + /// Aliases for the command palette. When you type a key in this map, + /// it will be assumed to equal the value. pub command_aliases: HashMap, } +impl Default for WorkspaceSettings { + fn default() -> Self { + Self { + active_pane_magnification: 1.0, + pane_split_direction_horizontal: PaneSplitDirectionHorizontal::Up, + pane_split_direction_vertical: PaneSplitDirectionVertical::Left, + centered_layout: CenteredLayoutSettings::default(), + confirm_quit: false, + show_call_status_icon: true, + autosave: AutosaveSetting::Off, + restore_on_startup: RestoreOnStartupBehavior::default(), + drop_target_size: 0.2, + when_closing_with_no_tabs: CloseWindowWhenNoItems::default(), + use_system_path_prompts: true, + command_aliases: HashMap::default(), + } + } +} #[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CloseWindowWhenNoItems { @@ -55,77 +91,22 @@ pub enum RestoreOnStartupBehavior { LastSession, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct WorkspaceSettingsContent { - /// Scale by which to zoom the active pane. - /// When set to 1.0, the active pane has the same size as others, - /// but when set to a larger value, the active pane takes up more space. - /// - /// Default: `1.0` - pub active_pane_magnification: Option, - // Direction to split horizontally. - // - // Default: "up" - pub pane_split_direction_horizontal: Option, - // Direction to split vertically. - // - // Default: "left" - pub pane_split_direction_vertical: Option, - // Centered layout related settings. - pub centered_layout: Option, - /// Whether or not to prompt the user to confirm before closing the application. - /// - /// Default: false - pub confirm_quit: Option, - /// Whether or not to show the call status icon in the status bar. - /// - /// Default: true - pub show_call_status_icon: Option, - /// When to automatically save edited buffers. - /// - /// Default: off - pub autosave: Option, - /// Controls previous session restoration in freshly launched Zed instance. - /// Values: none, last_workspace, last_session - /// Default: last_session - pub restore_on_startup: Option, - /// The size of the workspace split drop targets on the outer edges. - /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. - /// - /// Default: `0.2` (20% of the smaller dimension of the workspace) - pub drop_target_size: Option, - /// Whether to close the window when using 'close active item' on a workspace with no tabs - /// - /// Default: auto ("on" on macOS, "off" otherwise) - pub when_closing_with_no_tabs: Option, - /// Whether to use the system provided dialogs for Open and Save As. - /// When set to false, Zed will use the built-in keyboard-first pickers. - /// - /// Default: true - pub use_system_path_prompts: Option, - /// Aliases for the command palette. When you type a key in this map, - /// it will be assumed to equal the value. - /// - /// Default: true - pub command_aliases: Option>, -} - -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct TabBarSettings { + /// Whether or not to show the tab bar in the editor. pub show: bool, + /// Whether or not to show the navigation history buttons in the tab bar. pub show_nav_history_buttons: bool, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct TabBarSettingsContent { - /// Whether or not to show the tab bar in the editor. - /// - /// Default: true - pub show: Option, - /// Whether or not to show the navigation history buttons in the tab bar. - /// - /// Default: true - pub show_nav_history_buttons: Option, +impl Default for TabBarSettings { + fn default() -> Self { + Self { + show_nav_history_buttons: true, + show: true, + } + } } #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] @@ -163,17 +144,26 @@ pub struct CenteredLayoutSettings { /// /// Default: 0.2 pub left_padding: Option, - // The relative width of the right padding of the central pane from the - // workspace when the centered layout is used. + /// The relative width of the right padding of the central pane from the + /// workspace when the centered layout is used. /// /// Default: 0.2 pub right_padding: Option, } +impl Default for CenteredLayoutSettings { + fn default() -> Self { + Self { + left_padding: Some(0.2), + right_padding: Some(0.2), + } + } +} + impl Settings for WorkspaceSettings { const KEY: Option<&'static str> = None; - type FileContent = WorkspaceSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -183,7 +173,7 @@ impl Settings for WorkspaceSettings { impl Settings for TabBarSettings { const KEY: Option<&'static str> = Some("tab_bar"); - type FileContent = TabBarSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 32851d963a..82be3a8028 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -25,7 +25,8 @@ impl WorktreeSettings { } } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct WorktreeSettingsContent { /// Completely ignore files matching globs from `file_scan_exclusions` /// @@ -39,12 +40,42 @@ pub struct WorktreeSettingsContent { /// "**/.classpath", /// "**/.settings" /// ] - #[serde(default)] - pub file_scan_exclusions: Option>, + pub file_scan_exclusions: Vec, /// Treat the files matching these globs as `.env` files. /// Default: [ "**/.env*" ] - pub private_files: Option>, + pub private_files: Vec, +} + +impl Default for WorktreeSettingsContent { + fn default() -> Self { + Self { + private_files: [ + "**/.env*", + "**/*.pem", + "**/*.key", + "**/*.cert", + "**/*.crt", + "**/secrets.yml", + ] + .into_iter() + .map(str::to_owned) + .collect(), + file_scan_exclusions: [ + "**/.git", + "**/.svn", + "**/.hg", + "**/CVS", + "**/.DS_Store", + "**/Thumbs.db", + "**/.classpath", + "**/.settings", + ] + .into_iter() + .map(str::to_owned) + .collect(), + } + } } impl Settings for WorktreeSettings { @@ -57,8 +88,8 @@ impl Settings for WorktreeSettings { _: &mut AppContext, ) -> anyhow::Result { let result: WorktreeSettingsContent = sources.json_merge()?; - let mut file_scan_exclusions = result.file_scan_exclusions.unwrap_or_default(); - let mut private_files = result.private_files.unwrap_or_default(); + let mut file_scan_exclusions = result.file_scan_exclusions; + let mut private_files = result.private_files; file_scan_exclusions.sort(); private_files.sort(); Ok(Self { diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 929dc01c6d..455bc62a79 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -673,7 +673,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Some(Vec::new()); + project_settings.file_scan_exclusions = Vec::new(); }); }); }); @@ -910,7 +910,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); + vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; }); }); }); @@ -945,8 +945,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = - Some(vec!["**/node_modules/**".to_string()]); + project_settings.file_scan_exclusions = vec!["**/node_modules/**".to_string()]; }); }); }); @@ -1009,11 +1008,11 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Some(vec![ + project_settings.file_scan_exclusions = vec![ "**/.git".to_string(), "node_modules/".to_string(), "build_output".to_string(), - ]); + ]; }); }); }); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 93fee57ecd..9f670efcd7 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1996,7 +1996,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); + vec!["excluded_dir".to_string(), "**/.git".to_string()]; }); }); }); From ef5a7e1642ecf7c2c4bb123e66e1bb477c55b255 Mon Sep 17 00:00:00 2001 From: Amin Ahmed Khan Date: Wed, 11 Sep 2024 06:14:43 +0300 Subject: [PATCH 046/270] Fix OpenAI key URL (#17675) Update the create Open AI Key URL Release Notes: - Fixed a link in the Assistant panel to the OpenAI console. --- crates/language_model/src/provider/open_ai.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index a7a962e925..3a371499eb 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -491,7 +491,7 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - const OPENAI_CONSOLE_URL: &str = "https://console.anthropic.com/settings/keys"; + const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys"; const INSTRUCTIONS: [&str; 6] = [ "To use the assistant panel or inline assistant, you need to add your OpenAI API key.", " - You can create an API key at: ", From 8088d1a9b7377cc9ae2a8f94ea9a5ce868ffc1d6 Mon Sep 17 00:00:00 2001 From: "Dairon M." Date: Tue, 10 Sep 2024 23:21:24 -0400 Subject: [PATCH 047/270] erlang: Bump to v0.1.0 (#17679) Changes: - https://github.com/zed-industries/zed/pull/14914 - https://github.com/zed-industries/zed/pull/14879 - https://github.com/zed-industries/zed/pull/14923 - https://github.com/zed-industries/zed/pull/15973 - https://github.com/zed-industries/zed/pull/16955 Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 2 +- extensions/erlang/Cargo.toml | 2 +- extensions/erlang/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07b97e030d..2027c5421f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14371,7 +14371,7 @@ dependencies = [ [[package]] name = "zed_erlang" -version = "0.0.1" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/extensions/erlang/Cargo.toml b/extensions/erlang/Cargo.toml index 1ac6b4b1b6..5067344896 100644 --- a/extensions/erlang/Cargo.toml +++ b/extensions/erlang/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_erlang" -version = "0.0.1" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/erlang/extension.toml b/extensions/erlang/extension.toml index 863da32dfa..23c7cf6d4a 100644 --- a/extensions/erlang/extension.toml +++ b/extensions/erlang/extension.toml @@ -1,7 +1,7 @@ id = "erlang" name = "Erlang" description = "Erlang support." -version = "0.0.1" +version = "0.1.0" schema_version = 1 authors = ["Dairon M ", "Fabian Bergström "] repository = "https://github.com/zed-industries/zed" From 331d28d47982d36c317c80c12851a49422b6468e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 23:37:37 -0400 Subject: [PATCH 048/270] php: Remove invalid node types from highlights queries (#17680) This PR removes some invalid node types from the PHP highlights queries. Release Notes: - N/A --- extensions/php/languages/php/highlights.scm | 3 --- 1 file changed, 3 deletions(-) diff --git a/extensions/php/languages/php/highlights.scm b/extensions/php/languages/php/highlights.scm index ab1fdf662c..6afeb1090b 100644 --- a/extensions/php/languages/php/highlights.scm +++ b/extensions/php/languages/php/highlights.scm @@ -133,8 +133,5 @@ "trait" @keyword "try" @keyword "use" @keyword -"var" @keyword "while" @keyword "xor" @keyword -"yield" @keyword -"yield from" @keyword From ec189fe884f48109d17ca3f2984c49c933b46253 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 10 Sep 2024 23:58:57 -0400 Subject: [PATCH 049/270] Add a way to join all panes into one (#17673) Closes https://github.com/zed-industries/zed/issues/17536 Closes https://github.com/zed-industries/zed/pull/17548 Release Notes: - Added a way to join all panes into one with `pane::JoinAll` action ([#17536](https://github.com/zed-industries/zed/issues/17536)) --------- Co-authored-by: Yogesh Dhamija --- crates/workspace/src/pane.rs | 19 +-- crates/workspace/src/workspace.rs | 201 ++++++++++++++++++++++++------ 2 files changed, 173 insertions(+), 47 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index cc752f7aec..d0fa411381 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -3,6 +3,7 @@ use crate::{ ClosePosition, Item, ItemHandle, ItemSettings, PreviewTabsSettings, TabContentParams, WeakItemHandle, }, + move_item, notifications::NotifyResultExt, toolbar::Toolbar, workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings}, @@ -149,6 +150,7 @@ actions!( GoBack, GoForward, JoinIntoNext, + JoinAll, ReopenClosedItem, SplitLeft, SplitUp, @@ -188,6 +190,7 @@ pub enum Event { item_id: EntityId, }, Split(SplitDirection), + JoinAll, JoinIntoNext, ChangeItemTitle, Focus, @@ -220,6 +223,7 @@ impl fmt::Debug for Event { .debug_struct("Split") .field("direction", direction) .finish(), + Event::JoinAll => f.write_str("JoinAll"), Event::JoinIntoNext => f.write_str("JoinIntoNext"), Event::ChangeItemTitle => f.write_str("ChangeItemTitle"), Event::Focus => f.write_str("Focus"), @@ -679,6 +683,10 @@ impl Pane { cx.emit(Event::JoinIntoNext); } + fn join_all(&mut self, cx: &mut ViewContext) { + cx.emit(Event::JoinAll); + } + fn history_updated(&mut self, cx: &mut ViewContext) { self.toolbar.update(cx, |_, cx| cx.notify()); } @@ -1757,9 +1765,7 @@ impl Pane { self.workspace .update(cx, |_, cx| { - cx.defer(move |this, cx| { - this.move_item(pane.clone(), pane, id, destination_index, cx) - }); + cx.defer(move |_, cx| move_item(&pane, &pane, id, destination_index, cx)); }) .ok()?; @@ -1777,9 +1783,7 @@ impl Pane { self.workspace .update(cx, |_, cx| { - cx.defer(move |this, cx| { - this.move_item(pane.clone(), pane, id, destination_index, cx) - }); + cx.defer(move |_, cx| move_item(&pane, &pane, id, destination_index, cx)); }) .ok()?; @@ -2349,7 +2353,7 @@ impl Pane { } }) } - workspace.move_item(from_pane.clone(), to_pane.clone(), item_id, ix, cx); + move_item(&from_pane, &to_pane, item_id, ix, cx); }); }) .log_err(); @@ -2556,6 +2560,7 @@ impl Render for Pane { .on_action(cx.listener(|pane, _: &GoBack, cx| pane.navigate_backward(cx))) .on_action(cx.listener(|pane, _: &GoForward, cx| pane.navigate_forward(cx))) .on_action(cx.listener(|pane, _: &JoinIntoNext, cx| pane.join_into_next(cx))) + .on_action(cx.listener(|pane, _: &JoinAll, cx| pane.join_all(cx))) .on_action(cx.listener(Pane::toggle_zoom)) .on_action(cx.listener(|pane: &mut Pane, action: &ActivateItem, cx| { pane.activate_item(action.0, true, true, cx); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 50b92326b2..e6358cfdb9 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -2965,6 +2965,7 @@ impl Workspace { self.split_and_clone(pane, *direction, cx); } pane::Event::JoinIntoNext => self.join_pane_into_next(pane, cx), + pane::Event::JoinAll => self.join_all_panes(cx), pane::Event::Remove { focus_on_pane } => { self.remove_pane(pane, focus_on_pane.clone(), cx) } @@ -3094,7 +3095,7 @@ impl Workspace { }; let new_pane = self.add_pane(cx); - self.move_item(from.clone(), new_pane.clone(), item_id_to_move, 0, cx); + move_item(&from, &new_pane, item_id_to_move, 0, cx); self.center .split(&pane_to_split, &new_pane, split_direction) .unwrap(); @@ -3122,6 +3123,17 @@ impl Workspace { })) } + pub fn join_all_panes(&mut self, cx: &mut ViewContext) { + let active_item = self.active_pane.read(cx).active_item(); + for pane in &self.panes { + join_pane_into_active(&self.active_pane, pane, cx); + } + if let Some(active_item) = active_item { + self.activate_item(active_item.as_ref(), true, true, cx); + } + cx.notify(); + } + pub fn join_pane_into_next(&mut self, pane: View, cx: &mut ViewContext) { let next_pane = self .find_pane_in_direction(SplitDirection::Right, cx) @@ -3131,48 +3143,10 @@ impl Workspace { let Some(next_pane) = next_pane else { return; }; - - let item_ids: Vec = pane.read(cx).items().map(|item| item.item_id()).collect(); - for item_id in item_ids { - self.move_item(pane.clone(), next_pane.clone(), item_id, 0, cx); - } + move_all_items(&pane, &next_pane, cx); cx.notify(); } - pub fn move_item( - &mut self, - source: View, - destination: View, - item_id_to_move: EntityId, - destination_index: usize, - cx: &mut ViewContext, - ) { - let Some((item_ix, item_handle)) = source - .read(cx) - .items() - .enumerate() - .find(|(_, item_handle)| item_handle.item_id() == item_id_to_move) - else { - // Tab was closed during drag - return; - }; - - let item_handle = item_handle.clone(); - - if source != destination { - // Close item from previous pane - source.update(cx, |source, cx| { - source.remove_item_and_focus_on_pane(item_ix, false, destination.clone(), cx); - }); - } - - // This automatically removes duplicate items in the pane - destination.update(cx, |destination, cx| { - destination.add_item(item_handle, true, true, Some(destination_index), cx); - destination.focus(cx) - }); - } - fn remove_pane( &mut self, pane: View, @@ -5944,6 +5918,79 @@ fn resize_edge( } } +fn join_pane_into_active(active_pane: &View, pane: &View, cx: &mut WindowContext<'_>) { + if pane == active_pane { + return; + } else if pane.read(cx).items_len() == 0 { + pane.update(cx, |_, cx| { + cx.emit(pane::Event::Remove { + focus_on_pane: None, + }); + }) + } else { + move_all_items(pane, active_pane, cx); + } +} + +fn move_all_items(from_pane: &View, to_pane: &View, cx: &mut WindowContext<'_>) { + let destination_is_different = from_pane != to_pane; + let mut moved_items = 0; + for (item_ix, item_handle) in from_pane + .read(cx) + .items() + .enumerate() + .map(|(ix, item)| (ix, item.clone())) + .collect::>() + { + let ix = item_ix - moved_items; + if destination_is_different { + // Close item from previous pane + from_pane.update(cx, |source, cx| { + source.remove_item_and_focus_on_pane(ix, false, to_pane.clone(), cx); + }); + moved_items += 1; + } + + // This automatically removes duplicate items in the pane + to_pane.update(cx, |destination, cx| { + destination.add_item(item_handle, true, true, None, cx); + destination.focus(cx) + }); + } +} + +pub fn move_item( + source: &View, + destination: &View, + item_id_to_move: EntityId, + destination_index: usize, + cx: &mut WindowContext<'_>, +) { + let Some((item_ix, item_handle)) = source + .read(cx) + .items() + .enumerate() + .find(|(_, item_handle)| item_handle.item_id() == item_id_to_move) + .map(|(ix, item)| (ix, item.clone())) + else { + // Tab was closed during drag + return; + }; + + if source != destination { + // Close item from previous pane + source.update(cx, |source, cx| { + source.remove_item_and_focus_on_pane(item_ix, false, destination.clone(), cx); + }); + } + + // This automatically removes duplicate items in the pane + destination.update(cx, |destination, cx| { + destination.add_item(item_handle, true, true, Some(destination_index), cx); + destination.focus(cx) + }); +} + #[cfg(test)] mod tests { use std::{cell::RefCell, rc::Rc}; @@ -6855,6 +6902,80 @@ mod tests { }); } + fn add_an_item_to_active_pane( + cx: &mut VisualTestContext, + workspace: &View, + item_id: u64, + ) -> View { + let item = cx.new_view(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new( + item_id, + "item{item_id}.txt", + cx, + )]) + }); + workspace.update(cx, |workspace, cx| { + workspace.add_item_to_active_pane(Box::new(item.clone()), None, false, cx); + }); + return item; + } + + fn split_pane(cx: &mut VisualTestContext, workspace: &View) -> View { + return workspace.update(cx, |workspace, cx| { + let new_pane = + workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); + new_pane + }); + } + + #[gpui::test] + async fn test_join_all_panes(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx)); + + add_an_item_to_active_pane(cx, &workspace, 1); + split_pane(cx, &workspace); + add_an_item_to_active_pane(cx, &workspace, 2); + split_pane(cx, &workspace); // empty pane + split_pane(cx, &workspace); + let last_item = add_an_item_to_active_pane(cx, &workspace, 3); + + cx.executor().run_until_parked(); + + workspace.update(cx, |workspace, cx| { + let num_panes = workspace.panes().len(); + let num_items_in_current_pane = workspace.active_pane().read(cx).items().count(); + let active_item = workspace + .active_pane() + .read(cx) + .active_item() + .expect("item is in focus"); + + assert_eq!(num_panes, 4); + assert_eq!(num_items_in_current_pane, 1); + assert_eq!(active_item.item_id(), last_item.item_id()); + }); + + workspace.update(cx, |workspace, cx| { + workspace.join_all_panes(cx); + }); + + workspace.update(cx, |workspace, cx| { + let num_panes = workspace.panes().len(); + let num_items_in_current_pane = workspace.active_pane().read(cx).items().count(); + let active_item = workspace + .active_pane() + .read(cx) + .active_item() + .expect("item is in focus"); + + assert_eq!(num_panes, 1); + assert_eq!(num_items_in_current_pane, 3); + assert_eq!(active_item.item_id(), last_item.item_id()); + }); + } struct TestModal(FocusHandle); impl TestModal { From 13c553c50f38af721cb51d775b898d4fda20ccb0 Mon Sep 17 00:00:00 2001 From: Nero Song Date: Wed, 11 Sep 2024 20:49:12 +0800 Subject: [PATCH 050/270] Doc Fix: Shortcut of "Go forward" in key-bindings.md (#17689) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit "Control + _" => "Control + Shift + _" Doc was edited so quickly using zed that caused a typo 😄 ### shot image ### doc url https://zed.dev/docs/key-bindings Release Notes: - N/A --- docs/src/key-bindings.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 627e3fc6c4..2a97bc62a8 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -426,7 +426,7 @@ TBD: Add Column with Linux shortcuts | Close clean items | Pane | `⌘ + K, U` | | Close inactive items | Pane | `Alt + ⌘ + T` | | Go back | Pane | `Control + -` | -| Go forward | Pane | `Control + _` | +| Go forward | Pane | `Control + Shift + _` | | Reopen closed item | Pane | `⌘ + Shift + T` | | Split down | Pane | `⌘ + K, Down` | | Split left | Pane | `⌘ + K, Left` | From b5c42edf1e0f0a42b6f14d24e8b065ef0959b3dd Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 11 Sep 2024 12:56:39 -0400 Subject: [PATCH 051/270] lsp: Fix noisy logs when starting language servers (#17713) We would log every time we'd lookup a language server for a file and we'd also log "starting language server" even though we were about to only download it and not start it. Release Notes: - N/A --- crates/language/src/language_registry.rs | 2 +- crates/lsp/src/lsp.rs | 2 +- crates/project/src/lsp_store.rs | 9 --------- 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index a65d20019f..dcce78d6ce 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -833,7 +833,7 @@ impl LanguageRegistry { ) -> Option { let server_id = self.state.write().next_language_server_id(); log::info!( - "starting language server {:?}, path: {root_path:?}, id: {server_id}", + "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", adapter.name.0 ); diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 0612917575..21671cd0b1 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -272,7 +272,7 @@ impl LanguageServer { }; log::info!( - "starting language server. binary path: {:?}, working directory: {:?}, args: {:?}", + "starting language server process. binary path: {:?}, working directory: {:?}, args: {:?}", binary.path, working_dir, &binary.arguments diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 3b6b9ebb0a..dbc7538969 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -26,7 +26,6 @@ use gpui::{ Task, WeakModel, }; use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; -use itertools::Itertools; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, @@ -4489,14 +4488,6 @@ impl LspStore { ); } - log::info!( - "starting language servers for {language}: {adapters}", - adapters = enabled_lsp_adapters - .iter() - .map(|adapter| adapter.name.0.as_ref()) - .join(", ") - ); - for adapter in &enabled_lsp_adapters { self.start_language_server(worktree, adapter.clone(), language.clone(), cx); } From 9407d86ce615fd13589f264e4c595a328effa0f1 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 11 Sep 2024 13:33:42 -0400 Subject: [PATCH 052/270] project: Use login shell to get environment per project (#17717) This is a follow-up to #17075 to spawn a login shell when getting the environment for projects. The reason why we didn't do it before is that we only used the environment for certain language servers and not a lot of other things, like tasks. But with #17075 we now use the project more often and use it as the _base_ environment for tasks/terminals. Before the change, terminals and tasks would inherit the Zed process' environment, including PATH and so on. After the change, we would set the environment, overwriting the PATH instead of merging. But the non-login shell environment is a subset of the login-shell environment. Release Notes: - Fixed environment variables used per project in terminals/tasks overwriting the base environment and not making use of a login-shell environment. --- crates/project/src/environment.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index b74b577b39..9742b8b6d5 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -219,7 +219,7 @@ async fn load_shell_environment( ); let output = smol::process::Command::new(&shell) - .args(["-i", "-c", &command]) + .args(["-l", "-i", "-c", &command]) .envs(direnv_environment) .output() .await From 3a6a29f1173e3cc1059d2220985a53581864388c Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 11 Sep 2024 16:13:17 -0400 Subject: [PATCH 053/270] vim: Fix inline completions showing up in normal mode (#17727) Booleans are hard. Release Notes: - Fixed inline completions showing up in Vim normal mode. --- crates/editor/src/editor.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f750abd95c..e50bf67ab0 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4975,9 +4975,10 @@ impl Editor { let cursor = self.selections.newest_anchor().head(); let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + if !user_requested - && self.enable_inline_completions - && !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx) + && (!self.enable_inline_completions + || !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx)) { self.discard_inline_completion(false, cx); return None; From 25b6e43b0f8faf3b24b4ab4ee02456d3933539ce Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 11 Sep 2024 16:22:10 -0400 Subject: [PATCH 054/270] bump eslint memory usage (#17724) Release Notes: - Increased memory limit for eslint to reduce crashes --- crates/languages/src/typescript.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 185c10be44..9b6d41451f 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -58,7 +58,11 @@ fn typescript_server_binary_arguments(server_path: &Path) -> Vec { } fn eslint_server_binary_arguments(server_path: &Path) -> Vec { - vec![server_path.into(), "--stdio".into()] + vec![ + "--max-old-space-size=8192".into(), + server_path.into(), + "--stdio".into(), + ] } pub struct TypeScriptLspAdapter { From 092f29d3944f969eea6175f6b654c7832ecdeceb Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Sep 2024 09:11:19 -0400 Subject: [PATCH 055/270] Use a bigger prefix for numeric sorting (#17752) Release Notes: - Fixed sorting of files with YYYYmmddHHMMSS prefix --- crates/util/src/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 40a5cf6212..a161b8bac2 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -644,7 +644,7 @@ impl RangeExt for RangeInclusive { /// This is useful for turning regular alphanumerically sorted sequences as `1-abc, 10, 11-def, .., 2, 21-abc` /// into `1-abc, 2, 10, 11-def, .., 21-abc` #[derive(Debug, PartialEq, Eq)] -pub struct NumericPrefixWithSuffix<'a>(Option, &'a str); +pub struct NumericPrefixWithSuffix<'a>(Option, &'a str); impl<'a> NumericPrefixWithSuffix<'a> { pub fn from_numeric_prefixed_str(str: &'a str) -> Self { From 9db68ee6ae0a5520ec44b9281a1eb0bd48eab9c9 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 12 Sep 2024 09:47:25 -0400 Subject: [PATCH 056/270] lsp: Use project-local settings if available (#17753) Release Notes: - Changed built-in language support (Rust, Go, C, YAML, ...) to lookup language-server specific settings locally in project directory first before falling back to global value. --------- Co-authored-by: Bennet --- crates/languages/src/c.rs | 8 ++------ crates/languages/src/go.rs | 8 ++------ crates/languages/src/python.rs | 10 ++++------ crates/languages/src/rust.rs | 8 ++------ crates/languages/src/tailwind.rs | 15 +++++---------- crates/languages/src/typescript.rs | 13 ++++--------- crates/languages/src/vtsls.rs | 24 ++++++------------------ crates/languages/src/yaml.rs | 12 ++++-------- crates/project/src/lsp_store.rs | 19 ++++++++++++++++++- 9 files changed, 47 insertions(+), 70 deletions(-) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index ea11b4e0d0..243f61b084 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -5,8 +5,7 @@ use gpui::AsyncAppContext; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use lsp::LanguageServerBinary; -use project::project_settings::{BinarySettings, ProjectSettings}; -use settings::Settings; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use smol::fs::{self, File}; use std::{any::Any, env::consts, path::PathBuf, sync::Arc}; use util::{fs::remove_matching, maybe, ResultExt}; @@ -29,10 +28,7 @@ impl super::LspAdapter for CLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a528f4f70c..55850411ca 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -5,10 +5,9 @@ use gpui::{AppContext, AsyncAppContext, Task}; use http_client::github::latest_github_release; pub use language::*; use lsp::LanguageServerBinary; -use project::project_settings::{BinarySettings, ProjectSettings}; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; use serde_json::json; -use settings::Settings; use smol::{fs, process}; use std::{ any::Any, @@ -71,10 +70,7 @@ impl super::LspAdapter for GoLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index f9baed63fb..a0005e6f97 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -5,9 +5,9 @@ use gpui::AsyncAppContext; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use serde_json::Value; -use settings::Settings; + use std::{ any::Any, borrow::Cow, @@ -177,13 +177,11 @@ impl LspAdapter for PythonLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + adapter: &Arc, cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(adapter.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() }) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 46b6ce475d..a32b4f55da 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -7,9 +7,8 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use language_settings::all_language_settings; use lsp::LanguageServerBinary; -use project::project_settings::{BinarySettings, ProjectSettings}; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; -use settings::Settings; use smol::fs::{self, File}; use std::{ any::Any, @@ -40,10 +39,7 @@ impl LspAdapter for RustLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 39ccc8afa1..524e4ce846 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -6,9 +6,8 @@ use gpui::AsyncAppContext; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use serde_json::{json, Value}; -use settings::Settings; use smol::fs; use std::{ any::Any, @@ -53,14 +52,12 @@ impl LspAdapter for TailwindLspAdapter { async fn check_if_user_installed( &self, - _delegate: &dyn LspAdapterDelegate, + delegate: &dyn LspAdapterDelegate, cx: &AsyncAppContext, ) -> Option { let configured_binary = cx .update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate, Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -171,13 +168,11 @@ impl LspAdapter for TailwindLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + delegate: &Arc, cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 9b6d41451f..14b6303f5c 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -8,10 +8,9 @@ use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use project::ContextProviderWithTasks; use serde_json::{json, Value}; -use settings::Settings; use smol::{fs, io::BufReader, stream::StreamExt}; use std::{ any::Any, @@ -236,13 +235,11 @@ impl LspAdapter for TypeScriptLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + delegate: &Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.initialization_options.clone()) })?; if let Some(options) = override_options { @@ -334,9 +331,7 @@ impl LspAdapter for EsLintLspAdapter { let workspace_root = delegate.worktree_root_path(); let eslint_user_settings = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 3dca82688c..744405642d 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -5,9 +5,8 @@ use gpui::AsyncAppContext; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; -use project::project_settings::{BinarySettings, ProjectSettings}; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use serde_json::{json, Value}; -use settings::{Settings, SettingsLocation}; use std::{ any::Any, ffi::OsString, @@ -75,10 +74,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { @@ -270,26 +266,18 @@ impl LspAdapter for VtslsLspAdapter { async fn workspace_configuration( self: Arc, - adapter: &Arc, + delegate: &Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - ProjectSettings::get( - Some(SettingsLocation { - worktree_id: adapter.worktree_id(), - path: adapter.worktree_root_path(), - }), - cx, - ) - .lsp - .get(SERVER_NAME) - .and_then(|s| s.initialization_options.clone()) + language_server_settings(delegate.as_ref(), SERVER_NAME, cx) + .and_then(|s| s.initialization_options.clone()) })?; if let Some(options) = override_options { return Ok(options); } let mut initialization_options = self - .initialization_options(adapter) + .initialization_options(delegate) .await .map(|o| o.unwrap())?; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 51a9913b24..b75b3c7226 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -7,7 +7,7 @@ use language::{ }; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use serde_json::Value; use settings::{Settings, SettingsLocation}; use smol::fs; @@ -44,14 +44,12 @@ impl LspAdapter for YamlLspAdapter { async fn check_if_user_installed( &self, - _delegate: &dyn LspAdapterDelegate, + delegate: &dyn LspAdapterDelegate, cx: &AsyncAppContext, ) -> Option { let configured_binary = cx .update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate, Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -147,9 +145,7 @@ impl LspAdapter for YamlLspAdapter { let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); let project_options = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.initialization_options.clone()) })?; if let Some(override_options) = project_options { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index dbc7538969..307e86de45 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3,7 +3,7 @@ use crate::{ environment::ProjectEnvironment, lsp_command::{self, *}, lsp_ext_command, - project_settings::ProjectSettings, + project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, @@ -7035,6 +7035,23 @@ impl HttpClient for BlockedHttpClient { None } } + +pub fn language_server_settings<'a, 'b: 'a>( + delegate: &'a dyn LspAdapterDelegate, + language: &str, + cx: &'b AppContext, +) -> Option<&'a LspSettings> { + ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: delegate.worktree_root_path(), + }), + cx, + ) + .lsp + .get(language) +} + #[async_trait] impl LspAdapterDelegate for ProjectLspAdapterDelegate { fn show_notification(&self, message: &str, cx: &mut AppContext) { From 02d5f320ad621e92cafaa7075f61e6496e12cd98 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 12 Sep 2024 10:02:45 -0400 Subject: [PATCH 057/270] lsp: Fix initialization_options being used as workspace configuration (#17757) Release Notes: - Fixed user-configured `initialization_options` being passed as `workspace/Configuration` for the vtsls, TypeScript, and YAML language servers. Co-authored-by: Bennet --- crates/languages/src/typescript.rs | 2 +- crates/languages/src/vtsls.rs | 17 +++-------------- crates/languages/src/yaml.rs | 2 +- 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 14b6303f5c..cc52df2922 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -240,7 +240,7 @@ impl LspAdapter for TypeScriptLspAdapter { ) -> Result { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) - .and_then(|s| s.initialization_options.clone()) + .and_then(|s| s.settings.clone()) })?; if let Some(options) = override_options { return Ok(options); diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 744405642d..f3b46d2685 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -13,7 +13,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::{maybe, merge_json_value_into, ResultExt}; +use util::{maybe, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -271,20 +271,9 @@ impl LspAdapter for VtslsLspAdapter { ) -> Result { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), SERVER_NAME, cx) - .and_then(|s| s.initialization_options.clone()) + .and_then(|s| s.settings.clone()) })?; - if let Some(options) = override_options { - return Ok(options); - } - let mut initialization_options = self - .initialization_options(delegate) - .await - .map(|o| o.unwrap())?; - - if let Some(override_options) = override_options { - merge_json_value_into(override_options, &mut initialization_options) - } - Ok(initialization_options) + Ok(override_options.unwrap_or_default()) } fn language_ids(&self) -> HashMap { diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index b75b3c7226..06360847ac 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -146,7 +146,7 @@ impl LspAdapter for YamlLspAdapter { let project_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) - .and_then(|s| s.initialization_options.clone()) + .and_then(|s| s.settings.clone()) })?; if let Some(override_options) = project_options { merge_json_value_into(override_options, &mut options); From b341079d8a0bd8293f0fb0c479a2af5f47476f6e Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Sep 2024 11:23:37 -0400 Subject: [PATCH 058/270] Fix maxTsServerMemory (#17758) Release Notes: - N/A --------- Co-authored-by: Thorsten Ball --- crates/languages/src/vtsls.rs | 19 +++++++++++++++---- docs/src/languages/typescript.md | 2 +- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index f3b46d2685..9499b5c54f 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -222,9 +222,6 @@ impl LspAdapter for VtslsLspAdapter { "suggest": { "completeFunctionCalls": true }, - "tsserver": { - "maxTsServerMemory": 8092 - }, "inlayHints": { "parameterNames": { "enabled": "all", @@ -273,7 +270,21 @@ impl LspAdapter for VtslsLspAdapter { language_server_settings(delegate.as_ref(), SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; - Ok(override_options.unwrap_or_default()) + + if let Some(options) = override_options { + return Ok(options); + } + + let config = serde_json::json!({ + "tsserver": { + "maxTsServerMemory": 8092 + }, + }); + + Ok(serde_json::json!({ + "typescript": config, + "javascript": config + })) } fn language_ids(&self) -> HashMap { diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index 12529500d8..bfe63c5b2c 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -52,7 +52,7 @@ Prettier will also be used for TypeScript files by default. To disable this: { "lsp": { "vtsls": { - "initialization_options": { + "settings": { // For TypeScript: "typescript": { "tsserver": { "maxTsServerMemory": 16184 } }, // For JavaScript: From 0043b0d9579fbc17e1362964a4f3922ebe1605eb Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 12 Sep 2024 11:32:24 -0400 Subject: [PATCH 059/270] editor: Render documentation popovers using UI font (#17761) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the documentation popovers to render non-code using the UI font: Screenshot 2024-09-12 at 11 10 46 AM Screenshot 2024-09-12 at 11 21 12 AM Requested by @davidbarsky. Release Notes: - Changed documentation popovers to render Markdown prose using the UI font instead of the buffer font. Code blocks still using the buffer font. --- crates/editor/src/hover_popover.rs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 0dc2f09839..adbb5899ff 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -518,19 +518,22 @@ async fn parse_blocks( let rendered_block = cx .new_view(|cx| { let settings = ThemeSettings::get_global(cx); + let ui_font_family = settings.ui_font.family.clone(); let buffer_font_family = settings.buffer_font.family.clone(); - let mut base_style = cx.text_style(); - base_style.refine(&TextStyleRefinement { - font_family: Some(buffer_font_family.clone()), + + let mut base_text_style = cx.text_style(); + base_text_style.refine(&TextStyleRefinement { + font_family: Some(ui_font_family.clone()), color: Some(cx.theme().colors().editor_foreground), ..Default::default() }); let markdown_style = MarkdownStyle { - base_text_style: base_style, - code_block: StyleRefinement::default().mt(rems(1.)).mb(rems(1.)), + base_text_style, + code_block: StyleRefinement::default().my(rems(1.)).font_buffer(cx), inline_code: TextStyleRefinement { background_color: Some(cx.theme().colors().background), + font_family: Some(buffer_font_family), ..Default::default() }, rule_color: Color::Muted.color(cx), From f39c175bd3a45a508b370e80492b8539abfa41e2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:52:59 -0400 Subject: [PATCH 060/270] Update Rust crate serde_json_lenient to 0.2 (#17732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [serde_json_lenient](https://redirect.github.com/google/serde_json_lenient) | workspace.dependencies | minor | `0.1` -> `0.2` | --- ### Release Notes
google/serde_json_lenient (serde_json_lenient) ### [`v0.2.1`](https://redirect.github.com/google/serde_json_lenient/releases/tag/v0.2.1) [Compare Source](https://redirect.github.com/google/serde_json_lenient/compare/v0.2.0...v0.2.1) - Fixed newline handling ([#​20](https://redirect.github.com/google/serde_json_lenient/issues/20)) ### [`v0.2.0`](https://redirect.github.com/google/serde_json_lenient/releases/tag/v0.2.0) [Compare Source](https://redirect.github.com/google/serde_json_lenient/compare/v0.1.8...v0.2.0) - Merged from upstream `serde_json` - Introduce cfg!(parse_negative_zero_as_int) to create a build-time option to parse -0 the same as other JSON parsers. ([#​16](https://redirect.github.com/google/serde_json_lenient/issues/16)) - Add an option to control escaped newlines separately from other control ([#​18](https://redirect.github.com/google/serde_json_lenient/issues/18))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2027c5421f..36691e72ab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9928,9 +9928,9 @@ dependencies = [ [[package]] name = "serde_json_lenient" -version = "0.1.8" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc61c66b53a4035fcce237ef38043f4b2f0ebf918fd0e69541a5166104065581" +checksum = "a5d0bae483150302560d7cb52e7932f39b69a6fbdd099e48d33ef060a8c9c078" dependencies = [ "indexmap 2.4.0", "itoa", diff --git a/Cargo.toml b/Cargo.toml index ea8284ccb1..23b17fd291 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -393,7 +393,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] } serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } -serde_json_lenient = { version = "0.1", features = [ +serde_json_lenient = { version = "0.2", features = [ "preserve_order", "raw_value", ] } From 6841f7b9d792f6f89ca1b1bcbfca9dc763b66531 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:53:31 -0400 Subject: [PATCH 061/270] Update Python to v3.12.6 (#17728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [python](https://redirect.github.com/containerbase/python-prebuild) | dependencies | patch | `3.12.5` -> `3.12.6` | --- ### Release Notes
containerbase/python-prebuild (python) ### [`v3.12.6`](https://redirect.github.com/containerbase/python-prebuild/releases/tag/3.12.6) [Compare Source](https://redirect.github.com/containerbase/python-prebuild/compare/3.12.5...3.12.6) ##### Bug Fixes - **deps:** update dependency python to v3.12.6
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- script/update_top_ranking_issues/poetry.lock | 4 ++-- script/update_top_ranking_issues/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/script/update_top_ranking_issues/poetry.lock b/script/update_top_ranking_issues/poetry.lock index ae2120c741..a85844e645 100644 --- a/script/update_top_ranking_issues/poetry.lock +++ b/script/update_top_ranking_issues/poetry.lock @@ -529,5 +529,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "3.12.5" -content-hash = "3e6aa4dc758eb933f7e2d1a305d1e397b13a960ac4846ef54c5a11b906b77015" +python-versions = "3.12.6" +content-hash = "7827704e06a8c195297507e0d05e7a7c3843ed299bd353f31570ee4c435c6896" diff --git a/script/update_top_ranking_issues/pyproject.toml b/script/update_top_ranking_issues/pyproject.toml index cb92ce9c53..15d8346bb9 100644 --- a/script/update_top_ranking_issues/pyproject.toml +++ b/script/update_top_ranking_issues/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] mypy = "1.6.0" PyGithub = "1.55" -python = "3.12.5" +python = "3.12.6" pytz = "2022.1" typer = "0.9.0" types-pytz = "2023.3.1.1" From bba380e41ac9d09b36f63abdaa13477e727c1d3a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:06:38 -0400 Subject: [PATCH 062/270] docs: Add copywriting tweaks to the Vim page (#17766) Quick writing refinements as we displayed this docs over at RustConf. Namely: - Removal of "here" links - Making link anchors generally bigger - Adding commas where suitable - Capitalizing "Vim" (although "vim mode" is still lowercased) --- Release Notes: - N/A --- docs/src/vim.md | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/docs/src/vim.md b/docs/src/vim.md index d4e41b5819..777534813f 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -12,7 +12,7 @@ So, Zed's vim mode does not replicate Vim one-to-one, but it meshes Vim's modal ## Enabling and disabling vim mode -When you first open Zed, a checkbox will appear on the welcome screen, allowing you to enable vim mode. +When you first open Zed, you'll see a checkbox on the welcome screen that allows you to enable vim mode. If you missed this, you can toggle vim mode on or off anytime by opening the command palette and using the workspace command `toggle vim mode`. @@ -83,7 +83,7 @@ ctrl-x ctrl-z Hides all suggestions :Ext[ensions] Open the extensions window ``` -Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like Javascript. +Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like JavaScript. Vim mode emulates visual block mode using Zed's multiple cursor support. This again leads to some differences, but is much more powerful. @@ -129,11 +129,13 @@ For vim-specific shortcuts, you may find the following template a good place to If you would like to emulate vim's `map` (`nmap` etc.) commands you can bind to the [`workspace::SendKeystrokes`](./key-bindings.md#remapping-keys) action in the correct context. -You can see the bindings that are enabled by default in vim mode [here](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). +Check out the [bindings that are enabled by default in vim mode](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). ### Contexts -Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing you're in the `"Workspace"` location is at the top, containing a `"Pane"` which contains an `"Editor"`. Contexts are matched only on one level at a time. So it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. +Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing, you're in the `"Workspace"` location, which is at the top, containing a `"Pane"` that contains an `"Editor"`. + +Contexts are matched only on one level at a time. So, it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. Vim mode adds several contexts to the `Editor`: @@ -164,13 +166,13 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. -Additionally, vim mode contains a number of aliases for popular vim commands to ensure that muscle memory works. For example `:w` will save the file. +Additionally, vim mode contains a number of aliases for popular Vim commands to ensure that muscle memory works. For example, `:w` will save the file. -We do not (yet) emulate the full power of vim’s command line, in particular, we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. +We do not (yet) emulate the full power of Vim’s command line, in particular, we do not support arguments to commands yet. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. -Currently supported vim-specific commands: +Currently supported Vim-specific commands: ``` # window management @@ -296,7 +298,7 @@ There are also a few Zed settings that you may also enjoy if you use vim mode: } ``` -If you want to navigate between the editor and docks (terminal, project panel, AI assistant, ...) just like you navigate between splits you can use the following key bindings: +If you want to navigate between the editor and docks (terminal, project panel, AI assistant panel, etc...), just like you navigate between splits, you can use the following key bindings: ```json { @@ -366,4 +368,4 @@ Notably: To help with the transition, the command palette will fix parentheses and replace groups for you when you run `:%s//`. So `%s:/\(a\)(b)/\1/` will be converted into a search for "(a)\(b\)" and a replacement of "$1". -For the full syntax supported by Zed's regex engine see the [regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). +For the full syntax supported by Zed's regex engine [see the regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). From 3b37db4140349d4120a4724afa9c41c429efd49b Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:17:59 -0400 Subject: [PATCH 063/270] Improve button copy on database load error toast (#17767) Minimal copywriting improvement as that just happened to me while working on Zed during ReactConf. Release Notes: - N/A --- crates/workspace/src/workspace.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index e6358cfdb9..0d77427794 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4644,7 +4644,7 @@ fn notify_if_database_failed(workspace: WindowHandle, cx: &mut AsyncA |cx| { cx.new_view(|_| { MessageNotification::new("Failed to load the database file.") - .with_click_message("Click to let us know about this error") + .with_click_message("File an issue") .on_click(|cx| cx.open_url(REPORT_ISSUE_URL)) }) }, From 4d26f83d23165d841ebe95f3c1123616b30709ce Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Sep 2024 14:46:08 -0400 Subject: [PATCH 064/270] Revert "settings: Remove auxiliary Content types where possible (#16744)" (#17768) This breaks setting `{"scrollbar": {"show":"never"}}` Release Notes: - N/A --- crates/auto_update/src/auto_update.rs | 23 +- crates/call/src/call_settings.rs | 22 +- crates/client/src/client.rs | 50 +-- crates/collab/src/tests/editor_tests.rs | 8 +- crates/collab/src/tests/following_tests.rs | 2 +- crates/collab_ui/src/chat_panel.rs | 2 +- .../src/chat_panel/message_editor.rs | 8 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/collab_ui/src/notification_panel.rs | 2 +- crates/collab_ui/src/panel_settings.rs | 78 ++--- .../src/project_diagnostics_settings.rs | 20 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/editor_settings.rs | 318 +++++++++++------- crates/editor/src/editor_settings_controls.rs | 22 +- crates/editor/src/editor_tests.rs | 18 +- crates/editor/src/element.rs | 17 +- crates/extension/src/extension_settings.rs | 13 +- crates/extensions_ui/src/extensions_ui.rs | 2 +- crates/go_to_line/src/cursor_position.rs | 16 +- crates/gpui/src/geometry.rs | 2 - crates/language/src/language_settings.rs | 8 +- crates/languages/src/json.rs | 29 +- crates/outline_panel/src/outline_panel.rs | 16 +- .../src/outline_panel_settings.rs | 76 +++-- crates/performance/src/performance.rs | 184 ---------- crates/project/src/project_settings.rs | 41 ++- crates/project_panel/src/project_panel.rs | 28 +- .../src/project_panel_settings.rs | 92 +++-- crates/recent_projects/src/dev_servers.rs | 3 +- crates/recent_projects/src/ssh_connections.rs | 25 +- crates/repl/src/jupyter_settings.rs | 28 +- crates/tasks_ui/src/settings.rs | 18 +- crates/vim/src/digraph.rs | 2 +- crates/vim/src/normal.rs | 6 +- crates/vim/src/normal/paste.rs | 12 +- crates/vim/src/normal/scroll.rs | 2 +- crates/vim/src/normal/search.rs | 4 +- crates/vim/src/test.rs | 2 +- crates/vim/src/test/vim_test_context.rs | 6 +- crates/vim/src/vim.rs | 32 +- crates/welcome/src/base_keymap_picker.rs | 2 +- crates/welcome/src/base_keymap_setting.rs | 6 +- crates/welcome/src/welcome.rs | 2 +- crates/workspace/src/item.rs | 70 ++-- crates/workspace/src/workspace.rs | 8 +- crates/workspace/src/workspace_settings.rs | 132 ++++---- crates/worktree/src/worktree_settings.rs | 43 +-- crates/worktree/src/worktree_tests.rs | 11 +- crates/zed/src/zed.rs | 2 +- 49 files changed, 686 insertions(+), 833 deletions(-) delete mode 100644 crates/performance/src/performance.rs diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 499df7fc29..8063ff4c40 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -116,30 +116,27 @@ impl Drop for MacOsUnmounter { } } -/// Whether or not to automatically check for updates. -#[derive(Clone, Copy, JsonSchema, Deserialize, Serialize)] -#[serde(default)] -#[serde(transparent)] struct AutoUpdateSetting(bool); -impl Default for AutoUpdateSetting { - fn default() -> Self { - Self(true) - } -} +/// Whether or not to automatically check for updates. +/// +/// Default: true +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] +#[serde(transparent)] +struct AutoUpdateSettingContent(bool); impl Settings for AutoUpdateSetting { const KEY: Option<&'static str> = Some("auto_update"); - type FileContent = Self; + type FileContent = Option; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let auto_update = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied()) - .unwrap_or(*sources.default); + .find_map(|value| value.copied().flatten()) + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); - Ok(auto_update) + Ok(Self(auto_update.0)) } } diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs index e10b711734..446178ffb9 100644 --- a/crates/call/src/call_settings.rs +++ b/crates/call/src/call_settings.rs @@ -4,20 +4,30 @@ use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -/// Configuration of voice calls in Zed. -#[derive(Clone, Debug, Default, Deserialize, Serialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct CallSettings { - /// Whether the microphone should be muted when joining a channel or a call. pub mute_on_join: bool, - /// Whether your current project should be shared when joining an empty channel. pub share_on_join: bool, } +/// Configuration of voice calls in Zed. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct CallSettingsContent { + /// Whether the microphone should be muted when joining a channel or a call. + /// + /// Default: false + pub mute_on_join: Option, + + /// Whether your current project should be shared when joining an empty channel. + /// + /// Default: true + pub share_on_join: Option, +} + impl Settings for CallSettings { const KEY: Option<&'static str> = Some("calls"); - type FileContent = Self; + type FileContent = CallSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 83eef45be8..8787e2ed96 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -99,26 +99,20 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20); actions!(client, [SignIn, SignOut, Reconnect]); -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] -pub struct ClientSettings { - /// The server to connect to. If the environment variable - /// ZED_SERVER_URL is set, it will override this setting. - pub server_url: String, +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct ClientSettingsContent { + server_url: Option, } -impl Default for ClientSettings { - fn default() -> Self { - Self { - server_url: "https://zed.dev".to_owned(), - } - } +#[derive(Deserialize)] +pub struct ClientSettings { + pub server_url: String, } impl Settings for ClientSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = ClientSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let mut result = sources.json_merge::()?; @@ -130,37 +124,19 @@ impl Settings for ClientSettings { } #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] -pub struct ProxySettings { - /// Set a proxy to use. The proxy protocol is specified by the URI scheme. - /// - /// Supported URI scheme: `http`, `https`, `socks4`, `socks4a`, `socks5`, - /// `socks5h`. `http` will be used when no scheme is specified. - /// - /// By default no proxy will be used, or Zed will try get proxy settings from - /// environment variables. - /// - /// Examples: - /// - "proxy": "socks5://localhost:10808" - /// - "proxy": "http://127.0.0.1:10809" - #[schemars(example = "Self::example_1")] - #[schemars(example = "Self::example_2")] - pub proxy: Option, +pub struct ProxySettingsContent { + proxy: Option, } -impl ProxySettings { - fn example_1() -> String { - "http://127.0.0.1:10809".to_owned() - } - fn example_2() -> String { - "socks5://localhost:10808".to_owned() - } +#[derive(Deserialize, Default)] +pub struct ProxySettings { + pub proxy: Option, } impl Settings for ProxySettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = ProxySettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { Ok(Self { diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index a214291752..3f205b7f93 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -2261,11 +2261,11 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA cx_a.update(editor::init); cx_b.update(editor::init); // Turn inline-blame-off by default so no state is transferred without us explicitly doing so - let inline_blame_off_settings = InlineBlameSettings { + let inline_blame_off_settings = Some(InlineBlameSettings { enabled: false, - delay_ms: 0, - min_column: 0, - }; + delay_ms: None, + min_column: None, + }); cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings::(cx, |settings| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 1bc3cd6917..e66b66a1b4 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1649,7 +1649,7 @@ async fn test_following_into_excluded_file( cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |settings| { - settings.file_scan_exclusions = vec!["**/.git".to_string()]; + settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]); }); }); }); diff --git a/crates/collab_ui/src/chat_panel.rs b/crates/collab_ui/src/chat_panel.rs index f6e6c7321f..5a79f364ff 100644 --- a/crates/collab_ui/src/chat_panel.rs +++ b/crates/collab_ui/src/chat_panel.rs @@ -1108,7 +1108,7 @@ impl Panel for ChatPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = position, + move |settings, _| settings.dock = Some(position), ); } diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 0b1a2dbe69..028e148cba 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -113,7 +113,9 @@ impl MessageEditor { editor.set_show_indent_guides(false, cx); editor.set_completion_provider(Box::new(MessageEditorCompletionProvider(this))); editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, + MessageEditorSettings::get_global(cx) + .auto_replace_emoji_shortcode + .unwrap_or_default(), ); }); @@ -128,7 +130,9 @@ impl MessageEditor { cx.observe_global::(|view, cx| { view.editor.update(cx, |editor, cx| { editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, + MessageEditorSettings::get_global(cx) + .auto_replace_emoji_shortcode + .unwrap_or_default(), ) }) }) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 3e6483c42d..7270110181 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2813,7 +2813,7 @@ impl Panel for CollabPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = position, + move |settings, _| settings.dock = Some(position), ); } diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs index 326e1f0f5b..33ca5a2952 100644 --- a/crates/collab_ui/src/notification_panel.rs +++ b/crates/collab_ui/src/notification_panel.rs @@ -672,7 +672,7 @@ impl Panel for NotificationPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = position, + move |settings, _| settings.dock = Some(position), ); } diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index a594f023bb..f9851d5797 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -2,84 +2,58 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -use ui::px; use workspace::dock::DockPosition; -#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct CollaborationPanelSettings { - /// Whether to show the panel button in the status bar. pub button: bool, - /// Where to dock the panel. pub dock: DockPosition, - /// Default width of the panel in pixels. pub default_width: Pixels, } -impl Default for CollaborationPanelSettings { - fn default() -> Self { - Self { - button: true, - dock: DockPosition::Left, - default_width: px(240.), - } - } -} - -#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct ChatPanelSettings { - /// Whether to show the panel button in the status bar. pub button: bool, - /// Where to dock the panel. pub dock: DockPosition, - /// Default width of the panel in pixels. pub default_width: Pixels, } -impl Default for ChatPanelSettings { - fn default() -> Self { - Self { - button: true, - dock: DockPosition::Right, - default_width: px(240.), - } - } -} - -#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct NotificationPanelSettings { - /// Whether to show the panel button in the status bar. pub button: bool, - /// Where to dock the panel. pub dock: DockPosition, - /// Default width of the panel in pixels. pub default_width: Pixels, } -impl Default for NotificationPanelSettings { - fn default() -> Self { - Self { - button: true, - dock: DockPosition::Right, - default_width: px(380.), - } - } -} - #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(default)] +pub struct PanelSettingsContent { + /// Whether to show the panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Where to dock the panel. + /// + /// Default: left + pub dock: Option, + /// Default width of the panel in pixels. + /// + /// Default: 240 + pub default_width: Option, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] pub struct MessageEditorSettings { /// Whether to automatically replace emoji shortcodes with emoji characters. /// For example: typing `:wave:` gets replaced with `👋`. - pub auto_replace_emoji_shortcode: bool, + /// + /// Default: false + pub auto_replace_emoji_shortcode: Option, } impl Settings for CollaborationPanelSettings { const KEY: Option<&'static str> = Some("collaboration_panel"); - type FileContent = Self; + type FileContent = PanelSettingsContent; fn load( sources: SettingsSources, @@ -92,7 +66,7 @@ impl Settings for CollaborationPanelSettings { impl Settings for ChatPanelSettings { const KEY: Option<&'static str> = Some("chat_panel"); - type FileContent = Self; + type FileContent = PanelSettingsContent; fn load( sources: SettingsSources, @@ -105,7 +79,7 @@ impl Settings for ChatPanelSettings { impl Settings for NotificationPanelSettings { const KEY: Option<&'static str> = Some("notification_panel"); - type FileContent = Self; + type FileContent = PanelSettingsContent; fn load( sources: SettingsSources, @@ -118,7 +92,7 @@ impl Settings for NotificationPanelSettings { impl Settings for MessageEditorSettings { const KEY: Option<&'static str> = Some("message_editor"); - type FileContent = Self; + type FileContent = MessageEditorSettings; fn load( sources: SettingsSources, diff --git a/crates/diagnostics/src/project_diagnostics_settings.rs b/crates/diagnostics/src/project_diagnostics_settings.rs index 34739bcd17..55879d0c42 100644 --- a/crates/diagnostics/src/project_diagnostics_settings.rs +++ b/crates/diagnostics/src/project_diagnostics_settings.rs @@ -4,25 +4,23 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(default)] -/// Diagnostics configuration. +#[derive(Deserialize, Debug)] pub struct ProjectDiagnosticsSettings { - /// Whether to show warnings or not by default. pub include_warnings: bool, } -impl Default for ProjectDiagnosticsSettings { - fn default() -> Self { - Self { - include_warnings: true, - } - } +/// Diagnostics configuration. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct ProjectDiagnosticsSettingsContent { + /// Whether to show warnings or not by default. + /// + /// Default: true + include_warnings: Option, } impl Settings for ProjectDiagnosticsSettings { const KEY: Option<&'static str> = Some("diagnostics"); - type FileContent = Self; + type FileContent = ProjectDiagnosticsSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e50bf67ab0..4792c6b2cb 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10640,7 +10640,7 @@ impl Editor { let fs = workspace.read(cx).app_state().fs.clone(); let current_show = TabBarSettings::get_global(cx).show; update_settings_file::(fs, cx, move |setting, _| { - setting.show = !current_show; + setting.show = Some(!current_show); }); } @@ -12563,7 +12563,7 @@ impl EditorSnapshot { let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - GitGutterSetting::TrackedFiles + Some(GitGutterSetting::TrackedFiles) ) }); let gutter_settings = EditorSettings::get_global(cx).gutter; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 0532fd7bdf..2614e4ea30 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -3,105 +3,38 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Clone)] pub struct EditorSettings { - /// Whether the cursor blinks in the editor. pub cursor_blink: bool, - /// How to highlight the current line in the editor. pub current_line_highlight: CurrentLineHighlight, - /// Whether to show the informational hover box when moving the mouse - /// over symbols in the editor. pub hover_popover_enabled: bool, - /// Whether to pop the completions menu while typing in an editor without - /// explicitly requesting it. pub show_completions_on_input: bool, - /// Whether to display inline and alongside documentation for items in the - /// completions menu. pub show_completion_documentation: bool, - /// The debounce delay before re-querying the language server for completion - /// documentation when not included in original completion list. pub completion_documentation_secondary_query_debounce: u64, - /// Whether to use additional LSP queries to format (and amend) the code after - /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, - /// Toolbar related settings pub toolbar: Toolbar, - /// Scrollbar related settings pub scrollbar: Scrollbar, - /// Gutter related settings pub gutter: Gutter, - /// Whether the editor will scroll beyond the last line. pub scroll_beyond_last_line: ScrollBeyondLastLine, - /// The number of lines to keep above/below the cursor when auto-scrolling. pub vertical_scroll_margin: f32, - /// Scroll sensitivity multiplier. This multiplier is applied - /// to both the horizontal and vertical delta values while scrolling. pub scroll_sensitivity: f32, - /// Whether the line numbers on editors gutter are relative or not. pub relative_line_numbers: bool, - /// When to populate a new search's query based on the text under the cursor. pub seed_search_query_from_cursor: SeedQuerySetting, pub use_smartcase_search: bool, - /// The key to use for adding multiple cursors pub multi_cursor_modifier: MultiCursorModifier, - /// Hide the values of variables in `private` files, as defined by the - /// private_files setting. This only changes the visual representation, - /// the values are still present in the file and can be selected / copied / pasted pub redact_private_values: bool, - - /// How many lines to expand the multibuffer excerpts by default pub expand_excerpt_lines: u32, pub middle_click_paste: bool, - /// What to do when multibuffer is double clicked in some of its excerpts - /// (parts of singleton buffers). #[serde(default)] pub double_click_in_multibuffer: DoubleClickInMultibuffer, - /// Whether the editor search results will loop pub search_wrap: bool, #[serde(default)] pub search: SearchSettings, - /// Show method signatures in the editor, when inside parentheses. pub auto_signature_help: bool, - /// Whether to show the signature help after completion or a bracket pair inserted. - /// If `auto_signature_help` is enabled, this setting will be treated as enabled also. pub show_signature_help_after_edits: bool, - /// Jupyter REPL settings. pub jupyter: Jupyter, } -impl Default for EditorSettings { - fn default() -> Self { - Self { - cursor_blink: true, - current_line_highlight: CurrentLineHighlight::All, - hover_popover_enabled: true, - show_completions_on_input: true, - show_completion_documentation: true, - completion_documentation_secondary_query_debounce: 300, - use_on_type_format: true, - toolbar: Default::default(), - scrollbar: Default::default(), - gutter: Default::default(), - scroll_beyond_last_line: ScrollBeyondLastLine::OnePage, - vertical_scroll_margin: 3., - scroll_sensitivity: 1.0, - relative_line_numbers: false, - seed_search_query_from_cursor: SeedQuerySetting::Always, - multi_cursor_modifier: MultiCursorModifier::Alt, - redact_private_values: false, - expand_excerpt_lines: 3, - double_click_in_multibuffer: DoubleClickInMultibuffer::Select, - search_wrap: true, - auto_signature_help: false, - show_signature_help_after_edits: true, - jupyter: Default::default(), - use_smartcase_search: false, - middle_click_paste: true, - search: SearchSettings::default(), - } - } -} #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CurrentLineHighlight { @@ -139,93 +72,48 @@ pub enum DoubleClickInMultibuffer { Open, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[derive(Debug, Clone, Deserialize)] pub struct Jupyter { /// Whether the Jupyter feature is enabled. + /// + /// Default: true pub enabled: bool, } -impl Default for Jupyter { - fn default() -> Self { - Self { enabled: true } - } +#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct JupyterContent { + /// Whether the Jupyter feature is enabled. + /// + /// Default: true + pub enabled: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -#[serde(default)] pub struct Toolbar { - /// Whether to display breadcrumbs in the editor toolbar. pub breadcrumbs: bool, - /// Whether to display quick action buttons in the editor toolbar. pub quick_actions: bool, - /// Whether to show the selections menu in the editor toolbar pub selections_menu: bool, } -impl Default for Toolbar { - fn default() -> Self { - Self { - breadcrumbs: true, - quick_actions: true, - selections_menu: true, - } - } -} - #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct Scrollbar { - /// When to show the scrollbar in the editor. pub show: ShowScrollbar, - /// Whether to show git diff indicators in the scrollbar. pub git_diff: bool, - /// Whether to show buffer search result indicators in the scrollbar. pub selected_symbol: bool, - /// Whether to show selected symbol occurrences in the scrollbar. pub search_results: bool, - /// Whether to show diagnostic indicators in the scrollbar. pub diagnostics: bool, - /// Whether to show cursor positions in the scrollbar. pub cursors: bool, } -impl Default for Scrollbar { - fn default() -> Self { - Self { - show: ShowScrollbar::Auto, - git_diff: true, - selected_symbol: true, - search_results: true, - diagnostics: true, - cursors: true, - } - } -} - -/// Gutter-related settings. #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -#[serde(default)] pub struct Gutter { - /// Whether to show line numbers in the gutter. pub line_numbers: bool, - /// Whether to show code action buttons in the gutter. pub code_actions: bool, - /// Whether to show runnable buttons in the gutter. pub runnables: bool, - /// Whether to show fold buttons in the gutter. pub folds: bool, } -impl Default for Gutter { - fn default() -> Self { - Self { - line_numbers: true, - code_actions: true, - runnables: true, - folds: true, - } - } -} - /// When to show the scrollbar in the editor. /// /// Default: auto @@ -283,6 +171,188 @@ pub struct SearchSettings { pub regex: bool, } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct EditorSettingsContent { + /// Whether the cursor blinks in the editor. + /// + /// Default: true + pub cursor_blink: Option, + /// How to highlight the current line in the editor. + /// + /// Default: all + pub current_line_highlight: Option, + /// Whether to show the informational hover box when moving the mouse + /// over symbols in the editor. + /// + /// Default: true + pub hover_popover_enabled: Option, + + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. + /// + /// Default: true + pub show_completions_on_input: Option, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. + /// + /// Default: true + pub show_completion_documentation: Option, + /// The debounce delay before re-querying the language server for completion + /// documentation when not included in original completion list. + /// + /// Default: 300 ms + pub completion_documentation_secondary_query_debounce: Option, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + /// + /// Default: true + pub use_on_type_format: Option, + /// Toolbar related settings + pub toolbar: Option, + /// Scrollbar related settings + pub scrollbar: Option, + /// Gutter related settings + pub gutter: Option, + /// Whether the editor will scroll beyond the last line. + /// + /// Default: one_page + pub scroll_beyond_last_line: Option, + /// The number of lines to keep above/below the cursor when auto-scrolling. + /// + /// Default: 3. + pub vertical_scroll_margin: Option, + /// Scroll sensitivity multiplier. This multiplier is applied + /// to both the horizontal and vertical delta values while scrolling. + /// + /// Default: 1.0 + pub scroll_sensitivity: Option, + /// Whether the line numbers on editors gutter are relative or not. + /// + /// Default: false + pub relative_line_numbers: Option, + /// When to populate a new search's query based on the text under the cursor. + /// + /// Default: always + pub seed_search_query_from_cursor: Option, + pub use_smartcase_search: Option, + /// The key to use for adding multiple cursors + /// + /// Default: alt + pub multi_cursor_modifier: Option, + /// Hide the values of variables in `private` files, as defined by the + /// private_files setting. This only changes the visual representation, + /// the values are still present in the file and can be selected / copied / pasted + /// + /// Default: false + pub redact_private_values: Option, + + /// How many lines to expand the multibuffer excerpts by default + /// + /// Default: 3 + pub expand_excerpt_lines: Option, + + /// Whether to enable middle-click paste on Linux + /// + /// Default: true + pub middle_click_paste: Option, + + /// What to do when multibuffer is double clicked in some of its excerpts + /// (parts of singleton buffers). + /// + /// Default: select + pub double_click_in_multibuffer: Option, + /// Whether the editor search results will loop + /// + /// Default: true + pub search_wrap: Option, + + /// Defaults to use when opening a new buffer and project search items. + /// + /// Default: nothing is enabled + pub search: Option, + + /// Whether to automatically show a signature help pop-up or not. + /// + /// Default: false + pub auto_signature_help: Option, + + /// Whether to show the signature help pop-up after completions or bracket pairs inserted. + /// + /// Default: true + pub show_signature_help_after_edits: Option, + + /// Jupyter REPL settings. + pub jupyter: Option, +} + +// Toolbar related settings +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ToolbarContent { + /// Whether to display breadcrumbs in the editor toolbar. + /// + /// Default: true + pub breadcrumbs: Option, + /// Whether to display quick action buttons in the editor toolbar. + /// + /// Default: true + pub quick_actions: Option, + + /// Whether to show the selections menu in the editor toolbar + /// + /// Default: true + pub selections_menu: Option, +} + +/// Scrollbar related settings +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct ScrollbarContent { + /// When to show the scrollbar in the editor. + /// + /// Default: auto + pub show: Option, + /// Whether to show git diff indicators in the scrollbar. + /// + /// Default: true + pub git_diff: Option, + /// Whether to show buffer search result indicators in the scrollbar. + /// + /// Default: true + pub search_results: Option, + /// Whether to show selected symbol occurrences in the scrollbar. + /// + /// Default: true + pub selected_symbol: Option, + /// Whether to show diagnostic indicators in the scrollbar. + /// + /// Default: true + pub diagnostics: Option, + /// Whether to show cursor positions in the scrollbar. + /// + /// Default: true + pub cursors: Option, +} + +/// Gutter related settings +#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct GutterContent { + /// Whether to show line numbers in the gutter. + /// + /// Default: true + pub line_numbers: Option, + /// Whether to show code action buttons in the gutter. + /// + /// Default: true + pub code_actions: Option, + /// Whether to show runnable buttons in the gutter. + /// + /// Default: true + pub runnables: Option, + /// Whether to show fold buttons in the gutter. + /// + /// Default: true + pub folds: Option, +} + impl EditorSettings { pub fn jupyter_enabled(cx: &AppContext) -> bool { EditorSettings::get_global(cx).jupyter.enabled @@ -292,7 +362,7 @@ impl EditorSettings { impl Settings for EditorSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = EditorSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/editor/src/editor_settings_controls.rs b/crates/editor/src/editor_settings_controls.rs index 36d471dfa2..bbe1b00324 100644 --- a/crates/editor/src/editor_settings_controls.rs +++ b/crates/editor/src/editor_settings_controls.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use gpui::{AppContext, FontFeatures, FontWeight}; -use project::project_settings::ProjectSettings; +use project::project_settings::{InlineBlameSettings, ProjectSettings}; use settings::{EditableSettingControl, Settings}; use theme::{FontFamilyCache, ThemeSettings}; use ui::{ @@ -296,7 +296,14 @@ impl EditableSettingControl for InlineGitBlameControl { value: Self::Value, _cx: &AppContext, ) { - settings.git.inline_blame.enabled = value; + if let Some(inline_blame) = settings.git.inline_blame.as_mut() { + inline_blame.enabled = value; + } else { + settings.git.inline_blame = Some(InlineBlameSettings { + enabled: false, + ..Default::default() + }); + } } } @@ -342,7 +349,14 @@ impl EditableSettingControl for LineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - settings.gutter.line_numbers = value; + if let Some(gutter) = settings.gutter.as_mut() { + gutter.line_numbers = Some(value); + } else { + settings.gutter = Some(crate::editor_settings::GutterContent { + line_numbers: Some(value), + ..Default::default() + }); + } } } @@ -388,7 +402,7 @@ impl EditableSettingControl for RelativeLineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - settings.relative_line_numbers = value; + settings.relative_line_numbers = Some(value); } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7d42dc7a85..0b1e0385de 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6964,7 +6964,7 @@ async fn test_handle_input_for_show_signature_help_auto_signature_help_true( cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = true; + settings.auto_signature_help = Some(true); }); }); }); @@ -7105,8 +7105,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = false; - settings.show_signature_help_after_edits = false; + settings.auto_signature_help = Some(false); + settings.show_signature_help_after_edits = Some(false); }); }); }); @@ -7232,8 +7232,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = false; - settings.show_signature_help_after_edits = true; + settings.auto_signature_help = Some(false); + settings.show_signature_help_after_edits = Some(true); }); }); }); @@ -7274,8 +7274,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = true; - settings.show_signature_help_after_edits = false; + settings.auto_signature_help = Some(true); + settings.show_signature_help_after_edits = Some(false); }); }); }); @@ -7318,7 +7318,7 @@ async fn test_signature_help(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = true; + settings.auto_signature_help = Some(true); }); }); }); @@ -7759,7 +7759,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.show_completions_on_input = false; + settings.show_completions_on_input = Some(false); }); }) }); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 1c0a325b76..d4f5c565c2 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1283,7 +1283,10 @@ impl EditorElement { .row, ); - let git_gutter_setting = ProjectSettings::get_global(cx).git.git_gutter; + let git_gutter_setting = ProjectSettings::get_global(cx) + .git + .git_gutter + .unwrap_or_default(); let display_hunks = buffer_snapshot .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) @@ -1363,10 +1366,12 @@ impl EditorElement { }; let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS; - let min_column_in_pixels = self.column_pixels( - ProjectSettings::get_global(cx).git.inline_blame.min_column as usize, - cx, - ); + let min_column_in_pixels = ProjectSettings::get_global(cx) + .git + .inline_blame + .and_then(|settings| settings.min_column) + .map(|col| self.column_pixels(col as usize, cx)) + .unwrap_or(px(0.)); let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels; cmp::max(padded_line_end, min_start) @@ -3326,7 +3331,7 @@ impl EditorElement { .unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - GitGutterSetting::TrackedFiles + Some(GitGutterSetting::TrackedFiles) ) }); if show_git_gutter { diff --git a/crates/extension/src/extension_settings.rs b/crates/extension/src/extension_settings.rs index 715dc3ca82..a2ab7ac9cc 100644 --- a/crates/extension/src/extension_settings.rs +++ b/crates/extension/src/extension_settings.rs @@ -6,25 +6,18 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use std::sync::Arc; -#[derive(Deserialize, Serialize, Debug, Clone, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] pub struct ExtensionSettings { /// The extensions that should be automatically installed by Zed. /// /// This is used to make functionality provided by extensions (e.g., language support) /// available out-of-the-box. + #[serde(default)] pub auto_install_extensions: HashMap, bool>, + #[serde(default)] pub auto_update_extensions: HashMap, bool>, } -impl Default for ExtensionSettings { - fn default() -> Self { - Self { - auto_install_extensions: HashMap::from_iter([("html".into(), true)]), - auto_update_extensions: Default::default(), - } - } -} impl ExtensionSettings { /// Returns whether the given extension should be auto-installed. pub fn should_auto_install(&self, extension_id: &str) -> bool { diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index b2d6d7f283..f246e3cf4f 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -1000,7 +1000,7 @@ impl ExtensionsPage { this.update_settings::( selection, cx, - |setting, value| *setting = VimModeSetting(value), + |setting, value| *setting = Some(value), ); }), )), diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index de3d1dc74d..63e0f2b079 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -180,10 +180,18 @@ pub(crate) enum LineIndicatorFormat { Long, } +/// Whether or not to automatically check for updates. +/// +/// Values: short, long +/// Default: short +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] +#[serde(transparent)] +pub(crate) struct LineIndicatorFormatContent(LineIndicatorFormat); + impl Settings for LineIndicatorFormat { const KEY: Option<&'static str> = Some("line_indicator_format"); - type FileContent = Self; + type FileContent = Option; fn load( sources: SettingsSources, @@ -191,9 +199,9 @@ impl Settings for LineIndicatorFormat { ) -> anyhow::Result { let format = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied()) - .unwrap_or(*sources.default); + .find_map(|value| value.copied().flatten()) + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); - Ok(format) + Ok(format.0) } } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index b203592360..8de9e6f009 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -5,7 +5,6 @@ use core::fmt::Debug; use derive_more::{Add, AddAssign, Div, DivAssign, Mul, Neg, Sub, SubAssign}; use refineable::Refineable; -use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use std::{ cmp::{self, PartialOrd}, @@ -2202,7 +2201,6 @@ impl From for Radians { PartialEq, Serialize, Deserialize, - JsonSchema, )] #[repr(transparent)] pub struct Pixels(pub f32); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 7a6b758a25..e1fcaaba28 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -70,10 +70,10 @@ pub struct LanguageSettings { /// The column at which to soft-wrap lines, for buffers where soft-wrap /// is enabled. pub preferred_line_length: u32, - /// Whether to show wrap guides (vertical rulers) in the editor. - /// Setting this to true will show a guide at the 'preferred_line_length' value - /// if softwrap is set to 'preferred_line_length', and will show any - /// additional guides as specified by the 'wrap_guides' setting. + // Whether to show wrap guides (vertical rulers) in the editor. + // Setting this to true will show a guide at the 'preferred_line_length' value + // if softwrap is set to 'preferred_line_length', and will show any + // additional guides as specified by the 'wrap_guides' setting. pub show_wrap_guides: bool, /// Character counts at which to show wrap guides (vertical rulers) in the editor. pub wrap_guides: Vec, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 102eb1ef2f..6b5f74c263 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,13 +7,10 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{ - CodeLabel, Language, LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate, -}; +use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; -use rope::Rope; use serde_json::{json, Value}; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; use smol::{ @@ -205,30 +202,6 @@ impl LspAdapter for JsonLspAdapter { }))) } - async fn label_for_completion( - &self, - item: &lsp::CompletionItem, - language: &Arc, - ) -> Option { - let text = if let Some(description) = item - .label_details - .as_ref() - .and_then(|label_details| label_details.description.as_ref()) - { - format!("{} {}", item.label, description) - } else if let Some(detail) = &item.detail { - format!("{} {}", item.label, detail) - } else { - item.label.clone() - }; - let rope = Rope::from(item.label.as_str()); - let runs = language.highlight_text(&rope, 0..item.label.len()); - Some(language::CodeLabel { - text, - runs, - filter_range: 0..item.label.len(), - }) - } async fn workspace_configuration( self: Arc, _: &Arc, diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 361607533b..c5f0187c22 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -24,12 +24,12 @@ use editor::{ use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, uniform_list, Action, AnyElement, AppContext, - AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, EventEmitter, - FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, KeyContext, Model, - MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, SharedString, Stateful, - Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext, - WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, + AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, + EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, + KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, + SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, + VisualContext, WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; @@ -1938,7 +1938,7 @@ impl OutlinePanel { .child( ListItem::new(item_id) .indent_level(depth) - .indent_step_size(settings.indent_size) + .indent_step_size(px(settings.indent_size)) .selected(is_active) .when_some(icon_element, |list_item, icon_element| { list_item.child(h_flex().child(icon_element)) @@ -3801,7 +3801,7 @@ impl Panel for OutlinePanel { DockPosition::Left | DockPosition::Bottom => OutlinePanelDockPosition::Left, DockPosition::Right => OutlinePanelDockPosition::Right, }; - settings.dock = dock; + settings.dock = Some(dock); }, ); } diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index a8e51b96c5..e19fc3c008 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,5 +1,4 @@ -use anyhow; -use gpui::{px, Pixels}; +use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -11,51 +10,66 @@ pub enum OutlinePanelDockPosition { Right, } -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct OutlinePanelSettings { - /// Whether to show the outline panel button in the status bar. pub button: bool, - /// Customize default width (in pixels) taken by outline panel pub default_width: Pixels, - /// The position of outline panel pub dock: OutlinePanelDockPosition, - /// Whether to show file icons in the outline panel. pub file_icons: bool, - /// Whether to show folder icons or chevrons for directories in the outline panel. pub folder_icons: bool, - /// Whether to show the git status in the outline panel. pub git_status: bool, - /// Amount of indentation (in pixels) for nested items. - pub indent_size: Pixels, - /// Whether to reveal it in the outline panel automatically, - /// when a corresponding project entry becomes active. - /// Gitignored entries are never auto revealed. + pub indent_size: f32, pub auto_reveal_entries: bool, - /// Whether to fold directories automatically - /// when directory has only one directory inside. pub auto_fold_dirs: bool, } -impl Default for OutlinePanelSettings { - fn default() -> Self { - Self { - button: true, - default_width: px(240.), - dock: OutlinePanelDockPosition::Left, - file_icons: true, - folder_icons: true, - auto_fold_dirs: true, - auto_reveal_entries: true, - indent_size: px(20.), - git_status: true, - } - } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct OutlinePanelSettingsContent { + /// Whether to show the outline panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Customize default width (in pixels) taken by outline panel + /// + /// Default: 240 + pub default_width: Option, + /// The position of outline panel + /// + /// Default: left + pub dock: Option, + /// Whether to show file icons in the outline panel. + /// + /// Default: true + pub file_icons: Option, + /// Whether to show folder icons or chevrons for directories in the outline panel. + /// + /// Default: true + pub folder_icons: Option, + /// Whether to show the git status in the outline panel. + /// + /// Default: true + pub git_status: Option, + /// Amount of indentation (in pixels) for nested items. + /// + /// Default: 20 + pub indent_size: Option, + /// Whether to reveal it in the outline panel automatically, + /// when a corresponding project entry becomes active. + /// Gitignored entries are never auto revealed. + /// + /// Default: true + pub auto_reveal_entries: Option, + /// Whether to fold directories automatically + /// when directory has only one directory inside. + /// + /// Default: true + pub auto_fold_dirs: Option, } impl Settings for OutlinePanelSettings { const KEY: Option<&'static str> = Some("outline_panel"); - type FileContent = Self; + type FileContent = OutlinePanelSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/performance/src/performance.rs b/crates/performance/src/performance.rs deleted file mode 100644 index db2388c59a..0000000000 --- a/crates/performance/src/performance.rs +++ /dev/null @@ -1,184 +0,0 @@ -use std::time::Instant; - -use anyhow::Result; -use gpui::{ - div, AppContext, InteractiveElement as _, Render, StatefulInteractiveElement as _, - Subscription, ViewContext, VisualContext, -}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore}; -use workspace::{ - ui::{Label, LabelCommon, LabelSize, Tooltip}, - ItemHandle, StatusItemView, Workspace, -}; - -const SHOW_STARTUP_TIME_DURATION: std::time::Duration = std::time::Duration::from_secs(5); - -pub fn init(cx: &mut AppContext) { - PerformanceSettings::register(cx); - - let mut enabled = PerformanceSettings::get_global(cx).show_in_status_bar; - let start_time = Instant::now(); - let mut _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); - - cx.observe_global::(move |cx| { - let new_value = PerformanceSettings::get_global(cx).show_in_status_bar; - if new_value != enabled { - enabled = new_value; - _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); - } - }) - .detach(); -} - -fn toggle_status_bar_items( - enabled: bool, - start_time: Instant, - cx: &mut AppContext, -) -> Option { - for window in cx.windows() { - if let Some(workspace) = window.downcast::() { - workspace - .update(cx, |workspace, cx| { - toggle_status_bar_item(workspace, enabled, start_time, cx); - }) - .ok(); - } - } - - if enabled { - log::info!("performance metrics display enabled"); - Some(cx.observe_new_views::(move |workspace, cx| { - toggle_status_bar_item(workspace, true, start_time, cx); - })) - } else { - log::info!("performance metrics display disabled"); - None - } -} - -struct PerformanceStatusBarItem { - display_mode: DisplayMode, -} - -#[derive(Copy, Clone, Debug)] -enum DisplayMode { - StartupTime, - Fps, -} - -impl PerformanceStatusBarItem { - fn new(start_time: Instant, cx: &mut ViewContext) -> Self { - let now = Instant::now(); - let display_mode = if now < start_time + SHOW_STARTUP_TIME_DURATION { - DisplayMode::StartupTime - } else { - DisplayMode::Fps - }; - - let this = Self { display_mode }; - - if let DisplayMode::StartupTime = display_mode { - cx.spawn(|this, mut cx| async move { - let now = Instant::now(); - let remaining_duration = - (start_time + SHOW_STARTUP_TIME_DURATION).saturating_duration_since(now); - cx.background_executor().timer(remaining_duration).await; - this.update(&mut cx, |this, cx| { - this.display_mode = DisplayMode::Fps; - cx.notify(); - }) - .ok(); - }) - .detach(); - } - - this - } -} - -impl Render for PerformanceStatusBarItem { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl gpui::IntoElement { - let text = match self.display_mode { - DisplayMode::StartupTime => cx - .time_to_first_window_draw() - .map_or("Pending".to_string(), |duration| { - format!("{}ms", duration.as_millis()) - }), - DisplayMode::Fps => cx.fps().map_or("".to_string(), |fps| { - format!("{:3} FPS", fps.round() as u32) - }), - }; - - use gpui::ParentElement; - let display_mode = self.display_mode; - div() - .id("performance status") - .child(Label::new(text).size(LabelSize::Small)) - .tooltip(move |cx| match display_mode { - DisplayMode::StartupTime => Tooltip::text("Time to first window draw", cx), - DisplayMode::Fps => cx - .new_view(|cx| { - let tooltip = Tooltip::new("Current FPS"); - if let Some(time_to_first) = cx.time_to_first_window_draw() { - tooltip.meta(format!( - "Time to first window draw: {}ms", - time_to_first.as_millis() - )) - } else { - tooltip - } - }) - .into(), - }) - } -} - -impl StatusItemView for PerformanceStatusBarItem { - fn set_active_pane_item( - &mut self, - _active_pane_item: Option<&dyn ItemHandle>, - _cx: &mut gpui::ViewContext, - ) { - // This is not currently used. - } -} - -fn toggle_status_bar_item( - workspace: &mut Workspace, - enabled: bool, - start_time: Instant, - cx: &mut ViewContext, -) { - if enabled { - workspace.status_bar().update(cx, |bar, cx| { - bar.add_right_item( - cx.new_view(|cx| PerformanceStatusBarItem::new(start_time, cx)), - cx, - ) - }); - } else { - workspace.status_bar().update(cx, |bar, cx| { - bar.remove_items_of_type::(cx); - }); - } -} - -/// Configuration of the display of performance details. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(default)] -pub struct PerformanceSettings { - /// Display the time to first window draw and frame rate in the status bar. - pub show_in_status_bar: bool, -} - -impl Settings for PerformanceSettings { - const KEY: Option<&'static str> = Some("performance"); - - type FileContent = Self; - - fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - sources.json_merge() - } -} diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 3c21b1c5e8..70b2eccf23 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -20,7 +20,6 @@ use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; #[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] pub struct ProjectSettings { /// Configuration for language servers. /// @@ -42,6 +41,7 @@ pub struct ProjectSettings { pub load_direnv: DirenvSettings, /// Configuration for session-related features + #[serde(default)] pub session: SessionSettings, } @@ -59,31 +59,36 @@ pub enum DirenvSettings { } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] pub struct GitSettings { /// Whether or not to show the git gutter. /// /// Default: tracked_files - pub git_gutter: GitGutterSetting, + pub git_gutter: Option, pub gutter_debounce: Option, /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: on - pub inline_blame: InlineBlameSettings, + pub inline_blame: Option, } impl GitSettings { pub fn inline_blame_enabled(&self) -> bool { #[allow(unknown_lints, clippy::manual_unwrap_or_default)] - self.inline_blame.enabled + match self.inline_blame { + Some(InlineBlameSettings { enabled, .. }) => enabled, + _ => false, + } } pub fn inline_blame_delay(&self) -> Option { - self.inline_blame - .delay_ms - .gt(&0) - .then(|| Duration::from_millis(self.inline_blame.delay_ms)) + match self.inline_blame { + Some(InlineBlameSettings { + delay_ms: Some(delay_ms), + .. + }) if delay_ms > 0 => Some(Duration::from_millis(delay_ms)), + _ => None, + } } } @@ -97,34 +102,28 @@ pub enum GitGutterSetting { Hide, } -#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] -#[serde(default)] pub struct InlineBlameSettings { /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: true + #[serde(default = "true_value")] pub enabled: bool, /// Whether to only show the inline blame information /// after a delay once the cursor stops moving. /// /// Default: 0 - pub delay_ms: u64, + pub delay_ms: Option, /// The minimum column number to show the inline blame information at /// /// Default: 0 - pub min_column: u32, + pub min_column: Option, } -impl Default for InlineBlameSettings { - fn default() -> Self { - Self { - enabled: true, - delay_ms: 0, - min_column: 0, - } - } +const fn true_value() -> bool { + true } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 6ca843875b..c77a2170dd 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2289,7 +2289,7 @@ impl ProjectPanel { .child( ListItem::new(entry_id.to_proto() as usize) .indent_level(depth) - .indent_step_size(settings.indent_size) + .indent_step_size(px(settings.indent_size)) .selected(is_marked || is_active) .when_some(canonical_path, |this, path| { this.end_slot::( @@ -2817,7 +2817,7 @@ impl Render for DraggedProjectEntryView { this.bg(cx.theme().colors().background).w(self.width).child( ListItem::new(self.selection.entry_id.to_proto() as usize) .indent_level(self.details.depth) - .indent_step_size(settings.indent_size) + .indent_step_size(px(settings.indent_size)) .child(if let Some(icon) = &self.details.icon { div().child(Icon::from_path(icon.clone())) } else { @@ -2855,7 +2855,7 @@ impl Panel for ProjectPanel { DockPosition::Left | DockPosition::Bottom => ProjectPanelDockPosition::Left, DockPosition::Right => ProjectPanelDockPosition::Right, }; - settings.dock = dock; + settings.dock = Some(dock); }, ); } @@ -3029,7 +3029,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { worktree_settings.file_scan_exclusions = - vec!["**/.git".to_string(), "**/4/**".to_string()]; + Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); }); }); }); @@ -4818,10 +4818,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = false + project_panel_settings.auto_reveal_entries = Some(false) }); }) }); @@ -4940,7 +4940,7 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = true + project_panel_settings.auto_reveal_entries = Some(true) }); }) }); @@ -5054,10 +5054,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = false + project_panel_settings.auto_reveal_entries = Some(false) }); }) }); @@ -5256,7 +5256,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - vec!["excluded_dir".to_string(), "**/.git".to_string()]; + Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); }); }); }); @@ -5569,10 +5569,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = false; + project_panel_settings.auto_fold_dirs = Some(false); }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); }); }); @@ -5591,10 +5591,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = false; + project_panel_settings.auto_fold_dirs = Some(false); }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); }); }); diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 6910b4627a..4d73ae9245 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,7 +2,6 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -use ui::px; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -11,50 +10,20 @@ pub enum ProjectPanelDockPosition { Right, } -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct ProjectPanelSettings { - /// Whether to show the project panel button in the status bar. pub button: bool, - /// Customize default width (in pixels) taken by project panel pub default_width: Pixels, - /// The position of project panel pub dock: ProjectPanelDockPosition, - /// Whether to show file icons in the project panel. pub file_icons: bool, - /// Whether to show folder icons or chevrons for directories in the project panel. pub folder_icons: bool, - /// Whether to show the git status in the project panel. pub git_status: bool, - /// Amount of indentation (in pixels) for nested items. - pub indent_size: Pixels, - /// Whether to reveal it in the project panel automatically, - /// when a corresponding project entry becomes active. - /// Gitignored entries are never auto revealed. + pub indent_size: f32, pub auto_reveal_entries: bool, - /// Whether to fold directories automatically - /// when directory has only one directory inside. pub auto_fold_dirs: bool, - /// Scrollbar-related settings pub scrollbar: ScrollbarSettings, } -impl Default for ProjectPanelSettings { - fn default() -> Self { - Self { - button: true, - default_width: px(240.), - dock: ProjectPanelDockPosition::Left, - file_icons: true, - folder_icons: true, - git_status: true, - indent_size: px(20.), - auto_reveal_entries: true, - auto_fold_dirs: true, - scrollbar: Default::default(), - } - } -} /// When to show the scrollbar in the project panel. /// /// Default: always @@ -68,7 +37,7 @@ pub enum ShowScrollbar { Never, } -#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. /// @@ -76,10 +45,63 @@ pub struct ScrollbarSettings { pub show: ShowScrollbar, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ScrollbarSettingsContent { + /// When to show the scrollbar in the project panel. + /// + /// Default: always + pub show: Option, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct ProjectPanelSettingsContent { + /// Whether to show the project panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Customize default width (in pixels) taken by project panel + /// + /// Default: 240 + pub default_width: Option, + /// The position of project panel + /// + /// Default: left + pub dock: Option, + /// Whether to show file icons in the project panel. + /// + /// Default: true + pub file_icons: Option, + /// Whether to show folder icons or chevrons for directories in the project panel. + /// + /// Default: true + pub folder_icons: Option, + /// Whether to show the git status in the project panel. + /// + /// Default: true + pub git_status: Option, + /// Amount of indentation (in pixels) for nested items. + /// + /// Default: 20 + pub indent_size: Option, + /// Whether to reveal it in the project panel automatically, + /// when a corresponding project entry becomes active. + /// Gitignored entries are never auto revealed. + /// + /// Default: true + pub auto_reveal_entries: Option, + /// Whether to fold directories automatically + /// when directory has only one directory inside. + /// + /// Default: false + pub auto_fold_dirs: Option, + /// Scrollbar-related settings + pub scrollbar: Option, +} + impl Settings for ProjectPanelSettings { const KEY: Option<&'static str> = Some("project_panel"); - type FileContent = Self; + type FileContent = ProjectPanelSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index b7fa635945..d8b10f31f9 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -48,6 +48,7 @@ use workspace::{notifications::DetachAndPromptErr, AppState, ModalView, Workspac use crate::open_dev_server_project; use crate::ssh_connections::connect_over_ssh; use crate::ssh_connections::open_ssh_project; +use crate::ssh_connections::RemoteSettingsContent; use crate::ssh_connections::SshConnection; use crate::ssh_connections::SshConnectionModal; use crate::ssh_connections::SshProject; @@ -1023,7 +1024,7 @@ impl DevServerProjects { fn update_settings_file( &mut self, cx: &mut ViewContext, - f: impl FnOnce(&mut SshSettings) + Send + Sync + 'static, + f: impl FnOnce(&mut RemoteSettingsContent) + Send + Sync + 'static, ) { let Some(fs) = self .workspace diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index b54196022d..8da4284b7f 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -22,24 +22,8 @@ use ui::{ use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct SshSettings { - /// ssh_connections is an array of ssh connections. - /// By default this setting is null, which disables the direct ssh connection support. - /// You can configure these from `project: Open Remote` in the command palette. - /// Zed's ssh support will pull configuration from your ~/.ssh too. - /// Examples: - /// [ - /// { - /// "host": "example-box", - /// "projects": [ - /// { - /// "paths": ["/home/user/code/zed"] - /// } - /// ] - /// } - /// ] pub ssh_connections: Option>, } @@ -78,10 +62,15 @@ pub struct SshProject { pub paths: Vec, } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct RemoteSettingsContent { + pub ssh_connections: Option>, +} + impl Settings for SshSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = RemoteSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/repl/src/jupyter_settings.rs b/crates/repl/src/jupyter_settings.rs index f441da4790..aefef6cec5 100644 --- a/crates/repl/src/jupyter_settings.rs +++ b/crates/repl/src/jupyter_settings.rs @@ -6,10 +6,8 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Debug, Default)] pub struct JupyterSettings { - /// Default kernels to select for each language. pub kernel_selections: HashMap, } @@ -22,10 +20,26 @@ impl JupyterSettings { } } +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +pub struct JupyterSettingsContent { + /// Default kernels to select for each language. + /// + /// Default: `{}` + pub kernel_selections: Option>, +} + +impl Default for JupyterSettingsContent { + fn default() -> Self { + JupyterSettingsContent { + kernel_selections: Some(HashMap::new()), + } + } +} + impl Settings for JupyterSettings { const KEY: Option<&'static str> = Some("jupyter"); - type FileContent = Self; + type FileContent = JupyterSettingsContent; fn load( sources: SettingsSources, @@ -37,8 +51,10 @@ impl Settings for JupyterSettings { let mut settings = JupyterSettings::default(); for value in sources.defaults_and_customizations() { - for (k, v) in &value.kernel_selections { - settings.kernel_selections.insert(k.clone(), v.clone()); + if let Some(source) = &value.kernel_selections { + for (k, v) in source { + settings.kernel_selections.insert(k.clone(), v.clone()); + } } } diff --git a/crates/tasks_ui/src/settings.rs b/crates/tasks_ui/src/settings.rs index 4ad6f607b7..1bcd496264 100644 --- a/crates/tasks_ui/src/settings.rs +++ b/crates/tasks_ui/src/settings.rs @@ -2,26 +2,22 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, PartialEq, JsonSchema)] -#[serde(default)] -/// Task-related settings. +#[derive(Serialize, Deserialize, PartialEq, Default)] pub(crate) struct TaskSettings { - /// Whether to show task status indicator in the status bar. Default: true pub(crate) show_status_indicator: bool, } -impl Default for TaskSettings { - fn default() -> Self { - Self { - show_status_indicator: true, - } - } +/// Task-related settings. +#[derive(Serialize, Deserialize, PartialEq, Default, Clone, JsonSchema)] +pub(crate) struct TaskSettingsContent { + /// Whether to show task status indicator in the status bar. Default: true + show_status_indicator: Option, } impl Settings for TaskSettings { const KEY: Option<&'static str> = Some("task"); - type FileContent = Self; + type FileContent = TaskSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/vim/src/digraph.rs b/crates/vim/src/digraph.rs index 282016cfda..443b7ff378 100644 --- a/crates/vim/src/digraph.rs +++ b/crates/vim/src/digraph.rs @@ -132,7 +132,7 @@ mod test { let mut custom_digraphs = HashMap::default(); custom_digraphs.insert("|-".into(), "⊢".into()); custom_digraphs.insert(":)".into(), "👨‍💻".into()); - s.custom_digraphs = custom_digraphs; + s.custom_digraphs = Some(custom_digraphs); }); }); diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 815086d0be..8198c0da53 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -1184,7 +1184,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = true; + s.use_multiline_find = Some(true); }); }); @@ -1226,7 +1226,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = true; + s.use_multiline_find = Some(true); }); }); @@ -1268,7 +1268,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_smartcase_find = true; + s.use_smartcase_find = Some(true); }); }); diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 6465e33e0f..05469dbf9f 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -291,7 +291,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -327,7 +327,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::OnYank + s.use_system_clipboard = Some(UseSystemClipboard::OnYank) }); }); @@ -584,7 +584,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -630,7 +630,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -659,7 +659,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -707,7 +707,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 6a20ea4eb3..f89faa3748 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -294,7 +294,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.scroll_beyond_last_line = ScrollBeyondLastLine::Off + s.scroll_beyond_last_line = Some(ScrollBeyondLastLine::Off) }); }); diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 6418475ad2..28f33d49d8 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -542,7 +542,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = false); + store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); }); cx.set_state("ˇhi\nhigh\nhi\n", Mode::Normal); @@ -655,7 +655,7 @@ mod test { // check that searching with unable search wrap cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = false); + store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); }); cx.set_state("aa\nbˇb\ncc\ncc\ncc\n", Mode::Normal); cx.simulate_keystrokes("/ c c enter"); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index be7db47315..9c61e9cd93 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -1300,7 +1300,7 @@ async fn test_command_alias(cx: &mut gpui::TestAppContext) { store.update_user_settings::(cx, |s| { let mut aliases = HashMap::default(); aliases.insert("Q".to_string(), "upper".to_string()); - s.command_aliases = aliases + s.command_aliases = Some(aliases) }); }); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index b68d2ede8b..c985f68e70 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -57,7 +57,7 @@ impl VimTestContext { pub fn new_with_lsp(mut cx: EditorLspTestContext, enabled: bool) -> VimTestContext { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = VimModeSetting(enabled)); + store.update_user_settings::(cx, |s| *s = Some(enabled)); }); settings::KeymapFile::load_asset("keymaps/default-macos.json", cx).unwrap(); if enabled { @@ -105,7 +105,7 @@ impl VimTestContext { pub fn enable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = VimModeSetting(true)); + store.update_user_settings::(cx, |s| *s = Some(true)); }); }) } @@ -113,7 +113,7 @@ impl VimTestContext { pub fn disable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = VimModeSetting(false)); + store.update_user_settings::(cx, |s| *s = Some(false)); }); }) } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6baca17948..6e03374c22 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -46,8 +46,6 @@ use crate::state::ReplayableAction; /// Whether or not to enable Vim mode. /// /// Default: false -#[derive(Copy, Clone, Default, Deserialize, Serialize, JsonSchema)] -#[serde(default, transparent)] pub struct VimModeSetting(pub bool); /// An Action to Switch between modes @@ -101,7 +99,7 @@ pub fn init(cx: &mut AppContext) { let fs = workspace.app_state().fs.clone(); let currently_enabled = Vim::enabled(cx); update_settings_file::(fs, cx, move |setting, _| { - *setting = VimModeSetting(!currently_enabled); + *setting = Some(!currently_enabled) }) }); @@ -1070,10 +1068,12 @@ impl Vim { impl Settings for VimModeSetting { const KEY: Option<&'static str> = Some("vim_mode"); - type FileContent = Self; + type FileContent = Option; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - Ok(sources.user.copied().unwrap_or(*sources.default)) + Ok(Self(sources.user.copied().flatten().unwrap_or( + sources.default.ok_or_else(Self::missing_default)?, + ))) } } @@ -1089,8 +1089,7 @@ pub enum UseSystemClipboard { OnYank, } -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] struct VimSettings { pub toggle_relative_line_numbers: bool, pub use_system_clipboard: UseSystemClipboard, @@ -1099,22 +1098,19 @@ struct VimSettings { pub custom_digraphs: HashMap>, } -impl Default for VimSettings { - fn default() -> Self { - Self { - toggle_relative_line_numbers: false, - use_system_clipboard: UseSystemClipboard::Always, - use_multiline_find: false, - use_smartcase_find: false, - custom_digraphs: Default::default(), - } - } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +struct VimSettingsContent { + pub toggle_relative_line_numbers: Option, + pub use_system_clipboard: Option, + pub use_multiline_find: Option, + pub use_smartcase_find: Option, + pub custom_digraphs: Option>>, } impl Settings for VimSettings { const KEY: Option<&'static str> = Some("vim"); - type FileContent = Self; + type FileContent = VimSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/welcome/src/base_keymap_picker.rs b/crates/welcome/src/base_keymap_picker.rs index fd7361f9b3..96a9df9c3c 100644 --- a/crates/welcome/src/base_keymap_picker.rs +++ b/crates/welcome/src/base_keymap_picker.rs @@ -177,7 +177,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { .report_setting_event("keymap", base_keymap.to_string()); update_settings_file::(self.fs.clone(), cx, move |setting, _| { - *setting = base_keymap; + *setting = Some(base_keymap) }); } diff --git a/crates/welcome/src/base_keymap_setting.rs b/crates/welcome/src/base_keymap_setting.rs index 0c1724627c..1b52bbc9f9 100644 --- a/crates/welcome/src/base_keymap_setting.rs +++ b/crates/welcome/src/base_keymap_setting.rs @@ -87,15 +87,15 @@ impl BaseKeymap { impl Settings for BaseKeymap { const KEY: Option<&'static str> = Some("base_keymap"); - type FileContent = Self; + type FileContent = Option; fn load( sources: SettingsSources, _: &mut gpui::AppContext, ) -> anyhow::Result { - if let Some(user_value) = sources.user.copied() { + if let Some(Some(user_value)) = sources.user.copied() { return Ok(user_value); } - Ok(*sources.default) + sources.default.ok_or_else(Self::missing_default) } } diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index 787c2e589b..fc837c6867 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -188,7 +188,7 @@ impl Render for WelcomePage { this.update_settings::( selection, cx, - |setting, value| *setting = VimModeSetting(value), + |setting, value| *setting = Some(value), ); }), )) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 46b8f3bf7f..935f0268b6 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -36,49 +36,20 @@ use util::ResultExt; pub const LEADER_UPDATE_THROTTLE: Duration = Duration::from_millis(200); -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct ItemSettings { - /// Whether to show the Git file status on a tab item. pub git_status: bool, - /// Position of the close button in a tab. pub close_position: ClosePosition, - /// Whether to show the file icon for a tab. pub file_icons: bool, } -impl Default for ItemSettings { - fn default() -> Self { - Self { - git_status: false, - close_position: ClosePosition::Right, - file_icons: false, - } - } -} - -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct PreviewTabsSettings { - /// Whether to show opened editors as preview tabs. - /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. pub enabled: bool, - /// Whether to open tabs in preview mode when selected from the file finder. pub enable_preview_from_file_finder: bool, - /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. pub enable_preview_from_code_navigation: bool, } -impl Default for PreviewTabsSettings { - fn default() -> Self { - Self { - enabled: true, - enable_preview_from_file_finder: false, - enable_preview_from_code_navigation: false, - } - } -} - #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "lowercase")] pub enum ClosePosition { @@ -96,10 +67,43 @@ impl ClosePosition { } } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct ItemSettingsContent { + /// Whether to show the Git file status on a tab item. + /// + /// Default: false + git_status: Option, + /// Position of the close button in a tab. + /// + /// Default: right + close_position: Option, + /// Whether to show the file icon for a tab. + /// + /// Default: false + file_icons: Option, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct PreviewTabsSettingsContent { + /// Whether to show opened editors as preview tabs. + /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. + /// + /// Default: true + enabled: Option, + /// Whether to open tabs in preview mode when selected from the file finder. + /// + /// Default: false + enable_preview_from_file_finder: Option, + /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. + /// + /// Default: false + enable_preview_from_code_navigation: Option, +} + impl Settings for ItemSettings { const KEY: Option<&'static str> = Some("tabs"); - type FileContent = Self; + type FileContent = ItemSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -109,7 +113,7 @@ impl Settings for ItemSettings { impl Settings for PreviewTabsSettings { const KEY: Option<&'static str> = Some("preview_tabs"); - type FileContent = Self; + type FileContent = PreviewTabsSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0d77427794..a7c63c57f6 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -6465,7 +6465,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::OnWindowChange; + settings.autosave = Some(AutosaveSetting::OnWindowChange); }) }); item.is_dirty = true; @@ -6485,7 +6485,7 @@ mod tests { cx.focus_self(); SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::OnFocusChange; + settings.autosave = Some(AutosaveSetting::OnFocusChange); }) }); item.is_dirty = true; @@ -6508,7 +6508,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::AfterDelay { milliseconds: 500 }; + settings.autosave = Some(AutosaveSetting::AfterDelay { milliseconds: 500 }); }) }); item.is_dirty = true; @@ -6527,7 +6527,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::OnFocusChange; + settings.autosave = Some(AutosaveSetting::OnFocusChange); }) }); item.is_dirty = true; diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index f87840eb30..52827c6941 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -5,58 +5,22 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct WorkspaceSettings { - /// Scale by which to zoom the active pane. - /// When set to 1.0, the active pane has the same size as others, - /// but when set to a larger value, the active pane takes up more space. pub active_pane_magnification: f32, - /// Direction to split horizontally. pub pane_split_direction_horizontal: PaneSplitDirectionHorizontal, - /// Direction to split vertically. pub pane_split_direction_vertical: PaneSplitDirectionVertical, - /// Centered layout related settings. pub centered_layout: CenteredLayoutSettings, - /// Whether or not to prompt the user to confirm before closing the application. pub confirm_quit: bool, - /// Whether or not to show the call status icon in the status bar. pub show_call_status_icon: bool, - /// When to automatically save edited buffers. pub autosave: AutosaveSetting, - /// Controls previous session restoration in freshly launched Zed instance. pub restore_on_startup: RestoreOnStartupBehavior, - /// The size of the workspace split drop targets on the outer edges. - /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. pub drop_target_size: f32, - /// Whether to close the window when using 'close active item' on a workspace with no tabs pub when_closing_with_no_tabs: CloseWindowWhenNoItems, - /// Whether to use the system provided dialogs for Open and Save As. - /// When set to false, Zed will use the built-in keyboard-first pickers. pub use_system_path_prompts: bool, - /// Aliases for the command palette. When you type a key in this map, - /// it will be assumed to equal the value. pub command_aliases: HashMap, } -impl Default for WorkspaceSettings { - fn default() -> Self { - Self { - active_pane_magnification: 1.0, - pane_split_direction_horizontal: PaneSplitDirectionHorizontal::Up, - pane_split_direction_vertical: PaneSplitDirectionVertical::Left, - centered_layout: CenteredLayoutSettings::default(), - confirm_quit: false, - show_call_status_icon: true, - autosave: AutosaveSetting::Off, - restore_on_startup: RestoreOnStartupBehavior::default(), - drop_target_size: 0.2, - when_closing_with_no_tabs: CloseWindowWhenNoItems::default(), - use_system_path_prompts: true, - command_aliases: HashMap::default(), - } - } -} #[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CloseWindowWhenNoItems { @@ -91,22 +55,77 @@ pub enum RestoreOnStartupBehavior { LastSession, } -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct WorkspaceSettingsContent { + /// Scale by which to zoom the active pane. + /// When set to 1.0, the active pane has the same size as others, + /// but when set to a larger value, the active pane takes up more space. + /// + /// Default: `1.0` + pub active_pane_magnification: Option, + // Direction to split horizontally. + // + // Default: "up" + pub pane_split_direction_horizontal: Option, + // Direction to split vertically. + // + // Default: "left" + pub pane_split_direction_vertical: Option, + // Centered layout related settings. + pub centered_layout: Option, + /// Whether or not to prompt the user to confirm before closing the application. + /// + /// Default: false + pub confirm_quit: Option, + /// Whether or not to show the call status icon in the status bar. + /// + /// Default: true + pub show_call_status_icon: Option, + /// When to automatically save edited buffers. + /// + /// Default: off + pub autosave: Option, + /// Controls previous session restoration in freshly launched Zed instance. + /// Values: none, last_workspace, last_session + /// Default: last_session + pub restore_on_startup: Option, + /// The size of the workspace split drop targets on the outer edges. + /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. + /// + /// Default: `0.2` (20% of the smaller dimension of the workspace) + pub drop_target_size: Option, + /// Whether to close the window when using 'close active item' on a workspace with no tabs + /// + /// Default: auto ("on" on macOS, "off" otherwise) + pub when_closing_with_no_tabs: Option, + /// Whether to use the system provided dialogs for Open and Save As. + /// When set to false, Zed will use the built-in keyboard-first pickers. + /// + /// Default: true + pub use_system_path_prompts: Option, + /// Aliases for the command palette. When you type a key in this map, + /// it will be assumed to equal the value. + /// + /// Default: true + pub command_aliases: Option>, +} + +#[derive(Deserialize)] pub struct TabBarSettings { - /// Whether or not to show the tab bar in the editor. pub show: bool, - /// Whether or not to show the navigation history buttons in the tab bar. pub show_nav_history_buttons: bool, } -impl Default for TabBarSettings { - fn default() -> Self { - Self { - show_nav_history_buttons: true, - show: true, - } - } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct TabBarSettingsContent { + /// Whether or not to show the tab bar in the editor. + /// + /// Default: true + pub show: Option, + /// Whether or not to show the navigation history buttons in the tab bar. + /// + /// Default: true + pub show_nav_history_buttons: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] @@ -144,26 +163,17 @@ pub struct CenteredLayoutSettings { /// /// Default: 0.2 pub left_padding: Option, - /// The relative width of the right padding of the central pane from the - /// workspace when the centered layout is used. + // The relative width of the right padding of the central pane from the + // workspace when the centered layout is used. /// /// Default: 0.2 pub right_padding: Option, } -impl Default for CenteredLayoutSettings { - fn default() -> Self { - Self { - left_padding: Some(0.2), - right_padding: Some(0.2), - } - } -} - impl Settings for WorkspaceSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = WorkspaceSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -173,7 +183,7 @@ impl Settings for WorkspaceSettings { impl Settings for TabBarSettings { const KEY: Option<&'static str> = Some("tab_bar"); - type FileContent = Self; + type FileContent = TabBarSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 82be3a8028..32851d963a 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -25,8 +25,7 @@ impl WorktreeSettings { } } -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct WorktreeSettingsContent { /// Completely ignore files matching globs from `file_scan_exclusions` /// @@ -40,42 +39,12 @@ pub struct WorktreeSettingsContent { /// "**/.classpath", /// "**/.settings" /// ] - pub file_scan_exclusions: Vec, + #[serde(default)] + pub file_scan_exclusions: Option>, /// Treat the files matching these globs as `.env` files. /// Default: [ "**/.env*" ] - pub private_files: Vec, -} - -impl Default for WorktreeSettingsContent { - fn default() -> Self { - Self { - private_files: [ - "**/.env*", - "**/*.pem", - "**/*.key", - "**/*.cert", - "**/*.crt", - "**/secrets.yml", - ] - .into_iter() - .map(str::to_owned) - .collect(), - file_scan_exclusions: [ - "**/.git", - "**/.svn", - "**/.hg", - "**/CVS", - "**/.DS_Store", - "**/Thumbs.db", - "**/.classpath", - "**/.settings", - ] - .into_iter() - .map(str::to_owned) - .collect(), - } - } + pub private_files: Option>, } impl Settings for WorktreeSettings { @@ -88,8 +57,8 @@ impl Settings for WorktreeSettings { _: &mut AppContext, ) -> anyhow::Result { let result: WorktreeSettingsContent = sources.json_merge()?; - let mut file_scan_exclusions = result.file_scan_exclusions; - let mut private_files = result.private_files; + let mut file_scan_exclusions = result.file_scan_exclusions.unwrap_or_default(); + let mut private_files = result.private_files.unwrap_or_default(); file_scan_exclusions.sort(); private_files.sort(); Ok(Self { diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 455bc62a79..929dc01c6d 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -673,7 +673,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Vec::new(); + project_settings.file_scan_exclusions = Some(Vec::new()); }); }); }); @@ -910,7 +910,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; + Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); }); }); }); @@ -945,7 +945,8 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = vec!["**/node_modules/**".to_string()]; + project_settings.file_scan_exclusions = + Some(vec!["**/node_modules/**".to_string()]); }); }); }); @@ -1008,11 +1009,11 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = vec![ + project_settings.file_scan_exclusions = Some(vec![ "**/.git".to_string(), "node_modules/".to_string(), "build_output".to_string(), - ]; + ]); }); }); }); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 9f670efcd7..93fee57ecd 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1996,7 +1996,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - vec!["excluded_dir".to_string(), "**/.git".to_string()]; + Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); }); }); }); From b9b62842f8e9ccd70dba5fd3fa2f3e4b0f2aaab3 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 12 Sep 2024 15:23:27 -0400 Subject: [PATCH 065/270] lsp: Treat unrooted paths as relative to the worktree root (#17769) gopls would send us watch patterns like `**/*.mod` and we'd fall back to watching `/`. Release Notes: - Fix file watching for go projects resorting to watching the fs root. Co-authored-by: Thorsten --- crates/project/src/lsp_store.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 307e86de45..cdf1fa4be4 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3499,7 +3499,7 @@ impl LspStore { .to_owned(); let path = if Path::new(path).components().next().is_none() { - Arc::from(Path::new("/")) + Arc::from(Path::new(worktree_root_path)) } else { PathBuf::from(path).into() }; From ee96d69e37790a37603ec0d558f0d3f08b9a6661 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Fri, 13 Sep 2024 03:55:03 +0800 Subject: [PATCH 066/270] gpui: Fix CJK line wrap for GPUI text render (#17737) Release Notes: - N/A This changes is going to let GPUI render correct text wrapping for CJK characters. We was done this in PR #11296 for Editor, but this is also need support for other text renders. | Before | After | | --- | --- | | SCR-20240912-jtvo | image | --- crates/gpui/examples/text_wrapper.rs | 3 ++- crates/gpui/src/text_system/line_layout.rs | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/gpui/examples/text_wrapper.rs b/crates/gpui/examples/text_wrapper.rs index 063d60d198..cb06425928 100644 --- a/crates/gpui/examples/text_wrapper.rs +++ b/crates/gpui/examples/text_wrapper.rs @@ -4,7 +4,7 @@ struct HelloWorld {} impl Render for HelloWorld { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - let text = "The longest word in any of the major English language 以及中文的测试 dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that refers to a lung disease contracted from the inhalation of very fine silica particles, specifically from a volcano; medically, it is the same as silicosis."; + let text = "The longest word 你好世界这段是中文,こんにちはこの段落は日本語です in any of the major English language dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that refers to a lung disease contracted from the inhalation of very fine silica particles, specifically from a volcano; medically, it is the same as silicosis."; div() .id("page") .size_full() @@ -40,6 +40,7 @@ impl Render for HelloWorld { .border_1() .border_color(gpui::red()) .text_ellipsis() + .w_full() .child("A short text in normal div"), ), ) diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index af01eb70ed..7e5a43dee8 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -9,6 +9,8 @@ use std::{ sync::Arc, }; +use super::LineWrapper; + /// A laid out and styled line of text #[derive(Default, Debug)] pub struct LineLayout { @@ -152,9 +154,18 @@ impl LineLayout { continue; } - if prev_ch == ' ' && ch != ' ' && first_non_whitespace_ix.is_some() { - last_candidate_ix = Some(boundary); - last_candidate_x = x; + // Here is very similar to `LineWrapper::wrap_line` to determine text wrapping, + // but there are some differences, so we have to duplicate the code here. + if LineWrapper::is_word_char(ch) { + if prev_ch == ' ' && ch != ' ' && first_non_whitespace_ix.is_some() { + last_candidate_ix = Some(boundary); + last_candidate_x = x; + } + } else { + if ch != ' ' && first_non_whitespace_ix.is_some() { + last_candidate_ix = Some(boundary); + last_candidate_x = x; + } } if ch != ' ' && first_non_whitespace_ix.is_none() { From af819bf661242a3e6c6f54782f36b7ac68ab0294 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 13 Sep 2024 04:14:53 +0800 Subject: [PATCH 067/270] windows: Implement `fs::trash_file` and `fs::trash_dir` (#17711) https://github.com/user-attachments/assets/43370cee-26a5-4d27-b86f-656127e03b4a Release Notes: - N/A --- Cargo.toml | 1 + crates/fs/src/fs.rs | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index 23b17fd291..79f5ce2dcf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -483,6 +483,7 @@ version = "0.58" features = [ "implement", "Foundation_Numerics", + "Storage", "System", "System_Threading", "UI_ViewManagement", diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index a463773e7e..0ec5a4c601 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -342,6 +342,24 @@ impl Fs for RealFs { } } + #[cfg(target_os = "windows")] + async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { + use windows::{ + core::HSTRING, + Storage::{StorageDeleteOption, StorageFile}, + }; + // todo(windows) + // When new version of `windows-rs` release, make this operation `async` + let path = path.canonicalize()?.to_string_lossy().to_string(); + let path_str = path.trim_start_matches("\\\\?\\"); + if path_str.is_empty() { + anyhow::bail!("File path is empty!"); + } + let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_str))?.get()?; + file.DeleteAsync(StorageDeleteOption::Default)?.get()?; + Ok(()) + } + #[cfg(target_os = "macos")] async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { self.trash_file(path, options).await @@ -352,6 +370,25 @@ impl Fs for RealFs { self.trash_file(path, options).await } + #[cfg(target_os = "windows")] + async fn trash_dir(&self, path: &Path, _options: RemoveOptions) -> Result<()> { + use windows::{ + core::HSTRING, + Storage::{StorageDeleteOption, StorageFolder}, + }; + + let path = path.canonicalize()?.to_string_lossy().to_string(); + let path_str = path.trim_start_matches("\\\\?\\"); + if path_str.is_empty() { + anyhow::bail!("Folder path is empty!"); + } + // todo(windows) + // When new version of `windows-rs` release, make this operation `async` + let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_str))?.get()?; + folder.DeleteAsync(StorageDeleteOption::Default)?.get()?; + Ok(()) + } + async fn open_sync(&self, path: &Path) -> Result> { Ok(Box::new(std::fs::File::open(path)?)) } From 461812d7b6de0064e2bffd224f82a95351d0f1cb Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 13 Sep 2024 04:15:20 +0800 Subject: [PATCH 068/270] windows: Use the existing `open_target` function for `platform::open_with_system` (#17705) Release Notes: - N/A --- crates/gpui/src/platform/windows/platform.rs | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index f8b3924e62..934d9336d2 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -401,14 +401,19 @@ impl Platform for WindowsPlatform { } fn open_with_system(&self, path: &Path) { - let executor = self.background_executor().clone(); - let path = path.to_owned(); - executor + let Ok(full_path) = path.canonicalize() else { + log::error!("unable to parse file full path: {}", path.display()); + return; + }; + self.background_executor() .spawn(async move { - let _ = std::process::Command::new("cmd") - .args(&["/c", "start", "", path.to_str().expect("path to string")]) - .spawn() - .expect("Failed to open file"); + let Some(full_path_str) = full_path.to_str() else { + return; + }; + if full_path_str.is_empty() { + return; + }; + open_target(full_path_str); }) .detach(); } From 3613ebd93c137578d947b482fe108e81313a8b47 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Fri, 13 Sep 2024 04:55:59 +0800 Subject: [PATCH 069/270] editor: Fix an error when cut with vim visual line select (#17591) Becuause in vim visual mode, we will always select next char, hit [here](https://github.com/zed-industries/zed/blob/66ef31882341852229c74996867916fbd4a2fe2a/crates/vim/src/visual.rs#L174), when using editor method for `cut` this selection, will hit this error. Closes #17585 Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- crates/editor/src/editor.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4792c6b2cb..515cde1908 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6671,7 +6671,11 @@ impl Editor { let is_entire_line = selection.is_empty() || self.selections.line_mode; if is_entire_line { selection.start = Point::new(selection.start.row, 0); - selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); + if !selection.is_empty() && selection.end.column == 0 { + selection.end = cmp::min(max_point, selection.end); + } else { + selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); + } selection.goal = SelectionGoal::None; } if is_first { From 3aeea93847acd4c28f97d2c4de35364a98cb98e4 Mon Sep 17 00:00:00 2001 From: Albert Marashi Date: Fri, 13 Sep 2024 21:41:27 +0930 Subject: [PATCH 070/270] typescript: Highlight `is` predicate keyword & `...` spread pattern (#17787) Release Notes: - Fixed the `is` and `...` highlights for TypeScript --- crates/languages/src/tsx/highlights.scm | 1 + crates/languages/src/typescript/highlights.scm | 2 ++ 2 files changed, 3 insertions(+) diff --git a/crates/languages/src/tsx/highlights.scm b/crates/languages/src/tsx/highlights.scm index 296a66c10a..bbdd83bb4d 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/languages/src/tsx/highlights.scm @@ -181,6 +181,7 @@ "import" "in" "instanceof" + "is" "let" "new" "of" diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index 19def8d93d..eedcf79aed 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -100,6 +100,7 @@ ] @punctuation.delimiter [ + "..." "-" "--" "-=" @@ -181,6 +182,7 @@ "import" "in" "instanceof" + "is" "let" "new" "of" From 93a3e8bc9478572cc07404d9ed3556b6842f00f2 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 13 Sep 2024 10:54:23 -0400 Subject: [PATCH 071/270] zed_extension_api: Fork new version of extension API (#17795) This PR forks a new version of the `zed_extension_api` in preparation for new changes. Release Notes: - N/A --------- Co-authored-by: Max --- Cargo.lock | 78 +-- crates/extension/build.rs | 9 +- crates/extension/src/wasm_host/wit.rs | 85 ++- .../src/wasm_host/wit/since_v0_1_0.rs | 229 +++----- .../src/wasm_host/wit/since_v0_2_0.rs | 551 ++++++++++++++++++ crates/extension_api/Cargo.toml | 5 +- .../extension_api/wit/since_v0.2.0/common.wit | 9 + .../wit/since_v0.2.0/extension.wit | 147 +++++ .../extension_api/wit/since_v0.2.0/github.wit | 33 ++ .../wit/since_v0.2.0/http-client.wit | 67 +++ crates/extension_api/wit/since_v0.2.0/lsp.wit | 83 +++ .../extension_api/wit/since_v0.2.0/nodejs.wit | 13 + .../wit/since_v0.2.0/platform.wit | 24 + .../wit/since_v0.2.0/settings.rs | 29 + .../wit/since_v0.2.0/slash-command.wit | 41 ++ extensions/test-extension/Cargo.toml | 2 +- 16 files changed, 1222 insertions(+), 183 deletions(-) create mode 100644 crates/extension/src/wasm_host/wit/since_v0_2_0.rs create mode 100644 crates/extension_api/wit/since_v0.2.0/common.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/extension.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/github.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/http-client.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/lsp.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/nodejs.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/platform.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/settings.rs create mode 100644 crates/extension_api/wit/since_v0.2.0/slash-command.wit diff --git a/Cargo.lock b/Cargo.lock index 36691e72ab..5eaf3ddde1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7873,7 +7873,7 @@ name = "perplexity" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0", + "zed_extension_api 0.2.0", ] [[package]] @@ -10254,7 +10254,7 @@ dependencies = [ name = "slash_commands_example" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14317,72 +14317,63 @@ name = "zed_astro" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_clojure" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_csharp" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_dart" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_deno" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_elixir" version = "0.0.9" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_elm" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_emmet" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_erlang" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "zed_extension_api" -version = "0.1.0" -dependencies = [ - "serde", - "serde_json", - "wit-bindgen", + "zed_extension_api 0.1.0", ] [[package]] @@ -14396,82 +14387,91 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "zed_extension_api" +version = "0.2.0" +dependencies = [ + "serde", + "serde_json", + "wit-bindgen", +] + [[package]] name = "zed_gleam" version = "0.2.0" dependencies = [ "html_to_markdown 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_glsl" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_haskell" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_html" version = "0.1.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_lua" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ocaml" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_php" version = "0.2.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_prisma" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_purescript" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ruby" version = "0.2.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ruff" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14479,42 +14479,42 @@ name = "zed_snippets" version = "0.0.5" dependencies = [ "serde_json", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_svelte" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_terraform" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_test_extension" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.2.0", ] [[package]] name = "zed_toml" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_uiua" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14522,14 +14522,14 @@ name = "zed_vue" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_zig" version = "0.3.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] diff --git a/crates/extension/build.rs b/crates/extension/build.rs index c5f94abaa8..f2c2b19998 100644 --- a/crates/extension/build.rs +++ b/crates/extension/build.rs @@ -6,17 +6,21 @@ fn main() -> Result<(), Box> { copy_extension_api_rust_files() } -// rust-analyzer doesn't support include! for files from outside the crate. -// Copy them to the OUT_DIR, so we can include them from there, which is supported. +/// rust-analyzer doesn't support include! for files from outside the crate. +/// Copy them to the OUT_DIR, so we can include them from there, which is supported. fn copy_extension_api_rust_files() -> Result<(), Box> { let out_dir = env::var("OUT_DIR")?; let input_dir = PathBuf::from("../extension_api/wit"); let output_dir = PathBuf::from(out_dir); + println!("cargo:rerun-if-changed={}", input_dir.display()); + for entry in fs::read_dir(&input_dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { + println!("cargo:rerun-if-changed={}", path.display()); + for subentry in fs::read_dir(&path)? { let subentry = subentry?; let subpath = subentry.path(); @@ -26,7 +30,6 @@ fn copy_extension_api_rust_files() -> Result<(), Box> { fs::create_dir_all(destination.parent().unwrap())?; fs::copy(&subpath, &destination)?; - println!("cargo:rerun-if-changed={}", subpath.display()); } } } else if path.extension() == Some(std::ffi::OsStr::new("rs")) { diff --git a/crates/extension/src/wasm_host/wit.rs b/crates/extension/src/wasm_host/wit.rs index 7c7d71be3a..1c3cdd77f6 100644 --- a/crates/extension/src/wasm_host/wit.rs +++ b/crates/extension/src/wasm_host/wit.rs @@ -2,9 +2,10 @@ mod since_v0_0_1; mod since_v0_0_4; mod since_v0_0_6; mod since_v0_1_0; +mod since_v0_2_0; use indexed_docs::IndexedDocsDatabase; use release_channel::ReleaseChannel; -use since_v0_1_0 as latest; +use since_v0_2_0 as latest; use super::{wasm_engine, WasmState}; use anyhow::{anyhow, Context, Result}; @@ -52,10 +53,16 @@ pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive // Note: The release channel can be used to stage a new version of the extension API. let _ = release_channel; - since_v0_0_1::MIN_VERSION..=latest::MAX_VERSION + let max_version = match release_channel { + ReleaseChannel::Dev | ReleaseChannel::Nightly => latest::MAX_VERSION, + ReleaseChannel::Stable | ReleaseChannel::Preview => since_v0_1_0::MAX_VERSION, + }; + + since_v0_0_1::MIN_VERSION..=max_version } pub enum Extension { + V020(since_v0_2_0::Extension), V010(since_v0_1_0::Extension), V006(since_v0_0_6::Extension), V004(since_v0_0_4::Extension), @@ -72,11 +79,25 @@ impl Extension { // Note: The release channel can be used to stage a new version of the extension API. let _ = release_channel; - if version >= latest::MIN_VERSION { + let allow_latest_version = match release_channel { + ReleaseChannel::Dev | ReleaseChannel::Nightly => true, + ReleaseChannel::Stable | ReleaseChannel::Preview => false, + }; + + if allow_latest_version && version >= latest::MIN_VERSION { let (extension, instance) = latest::Extension::instantiate_async(store, component, latest::linker()) .await .context("failed to instantiate wasm extension")?; + Ok((Self::V020(extension), instance)) + } else if version >= since_v0_1_0::MIN_VERSION { + let (extension, instance) = since_v0_1_0::Extension::instantiate_async( + store, + component, + since_v0_1_0::linker(), + ) + .await + .context("failed to instantiate wasm extension")?; Ok((Self::V010(extension), instance)) } else if version >= since_v0_0_6::MIN_VERSION { let (extension, instance) = since_v0_0_6::Extension::instantiate_async( @@ -110,6 +131,7 @@ impl Extension { pub async fn call_init_extension(&self, store: &mut Store) -> Result<()> { match self { + Extension::V020(ext) => ext.call_init_extension(store).await, Extension::V010(ext) => ext.call_init_extension(store).await, Extension::V006(ext) => ext.call_init_extension(store).await, Extension::V004(ext) => ext.call_init_extension(store).await, @@ -125,10 +147,14 @@ impl Extension { resource: Resource>, ) -> Result> { match self { - Extension::V010(ext) => { + Extension::V020(ext) => { ext.call_language_server_command(store, &language_server_id.0, resource) .await } + Extension::V010(ext) => Ok(ext + .call_language_server_command(store, &language_server_id.0, resource) + .await? + .map(|command| command.into())), Extension::V006(ext) => Ok(ext .call_language_server_command(store, &language_server_id.0, resource) .await? @@ -152,6 +178,14 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V020(ext) => { + ext.call_language_server_initialization_options( + store, + &language_server_id.0, + resource, + ) + .await + } Extension::V010(ext) => { ext.call_language_server_initialization_options( store, @@ -190,6 +224,14 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V020(ext) => { + ext.call_language_server_workspace_configuration( + store, + &language_server_id.0, + resource, + ) + .await + } Extension::V010(ext) => { ext.call_language_server_workspace_configuration( store, @@ -217,10 +259,19 @@ impl Extension { completions: Vec, ) -> Result>, String>> { match self { - Extension::V010(ext) => { + Extension::V020(ext) => { ext.call_labels_for_completions(store, &language_server_id.0, &completions) .await } + Extension::V010(ext) => Ok(ext + .call_labels_for_completions(store, &language_server_id.0, &completions) + .await? + .map(|labels| { + labels + .into_iter() + .map(|label| label.map(Into::into)) + .collect() + })), Extension::V006(ext) => Ok(ext .call_labels_for_completions(store, &language_server_id.0, &completions) .await? @@ -241,10 +292,19 @@ impl Extension { symbols: Vec, ) -> Result>, String>> { match self { - Extension::V010(ext) => { + Extension::V020(ext) => { ext.call_labels_for_symbols(store, &language_server_id.0, &symbols) .await } + Extension::V010(ext) => Ok(ext + .call_labels_for_symbols(store, &language_server_id.0, &symbols) + .await? + .map(|labels| { + labels + .into_iter() + .map(|label| label.map(Into::into)) + .collect() + })), Extension::V006(ext) => Ok(ext .call_labels_for_symbols(store, &language_server_id.0, &symbols) .await? @@ -265,6 +325,10 @@ impl Extension { arguments: &[String], ) -> Result, String>> { match self { + Extension::V020(ext) => { + ext.call_complete_slash_command_argument(store, command, arguments) + .await + } Extension::V010(ext) => { ext.call_complete_slash_command_argument(store, command, arguments) .await @@ -281,6 +345,10 @@ impl Extension { resource: Option>>, ) -> Result> { match self { + Extension::V020(ext) => { + ext.call_run_slash_command(store, command, arguments, resource) + .await + } Extension::V010(ext) => { ext.call_run_slash_command(store, command, arguments, resource) .await @@ -297,6 +365,7 @@ impl Extension { provider: &str, ) -> Result, String>> { match self { + Extension::V020(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V010(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => Err(anyhow!( "`suggest_docs_packages` not available prior to v0.1.0" @@ -312,6 +381,10 @@ impl Extension { database: Resource>, ) -> Result> { match self { + Extension::V020(ext) => { + ext.call_index_docs(store, provider, package_name, database) + .await + } Extension::V010(ext) => { ext.call_index_docs(store, provider, package_name, database) .await diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 337bb8afb0..88d860391a 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -16,13 +16,14 @@ use language::{ use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ - env, path::{Path, PathBuf}, sync::{Arc, OnceLock}, }; use util::maybe; use wasmtime::component::{Linker, Resource}; +use super::latest; + pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0); pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0); @@ -33,7 +34,12 @@ wasmtime::component::bindgen!({ with: { "worktree": ExtensionWorktree, "key-value-store": ExtensionKeyValueStore, - "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream + "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream, + "zed:extension/github": latest::zed::extension::github, + "zed:extension/lsp": latest::zed::extension::lsp, + "zed:extension/nodejs": latest::zed::extension::nodejs, + "zed:extension/platform": latest::zed::extension::platform, + "zed:extension/slash-command": latest::zed::extension::slash_command, }, }); @@ -49,7 +55,94 @@ pub type ExtensionHttpResponseStream = Arc &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(|linker, f| { + Extension::add_to_linker(linker, f)?; + latest::zed::extension::github::add_to_linker(linker, f)?; + latest::zed::extension::nodejs::add_to_linker(linker, f)?; + latest::zed::extension::platform::add_to_linker(linker, f)?; + latest::zed::extension::slash_command::add_to_linker(linker, f)?; + Ok(()) + }) + }) +} + +impl From for latest::Command { + fn from(value: Command) -> Self { + Self { + command: value.command, + args: value.args, + env: value.env, + } + } +} + +impl From for latest::SettingsLocation { + fn from(value: SettingsLocation) -> Self { + Self { + worktree_id: value.worktree_id, + path: value.path, + } + } +} + +impl From for latest::LanguageServerInstallationStatus { + fn from(value: LanguageServerInstallationStatus) -> Self { + match value { + LanguageServerInstallationStatus::None => Self::None, + LanguageServerInstallationStatus::Downloading => Self::Downloading, + LanguageServerInstallationStatus::CheckingForUpdate => Self::CheckingForUpdate, + LanguageServerInstallationStatus::Failed(message) => Self::Failed(message), + } + } +} + +impl From for latest::DownloadedFileType { + fn from(value: DownloadedFileType) -> Self { + match value { + DownloadedFileType::Gzip => Self::Gzip, + DownloadedFileType::GzipTar => Self::GzipTar, + DownloadedFileType::Zip => Self::Zip, + DownloadedFileType::Uncompressed => Self::Uncompressed, + } + } +} + +impl From for latest::Range { + fn from(value: Range) -> Self { + Self { + start: value.start, + end: value.end, + } + } +} + +impl From for latest::CodeLabelSpan { + fn from(value: CodeLabelSpan) -> Self { + match value { + CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()), + CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()), + } + } +} + +impl From for latest::CodeLabelSpanLiteral { + fn from(value: CodeLabelSpanLiteral) -> Self { + Self { + text: value.text, + highlight_name: value.highlight_name, + } + } +} + +impl From for latest::CodeLabel { + fn from(value: CodeLabel) -> Self { + Self { + code: value.code, + spans: value.spans.into_iter().map(Into::into).collect(), + filter_range: value.filter_range.into(), + } + } } #[async_trait] @@ -251,136 +344,6 @@ async fn convert_response( Ok(extension_response) } -#[async_trait] -impl nodejs::Host for WasmState { - async fn node_binary_path(&mut self) -> wasmtime::Result> { - self.host - .node_runtime - .binary_path() - .await - .map(|path| path.to_string_lossy().to_string()) - .to_wasmtime_result() - } - - async fn npm_package_latest_version( - &mut self, - package_name: String, - ) -> wasmtime::Result> { - self.host - .node_runtime - .npm_package_latest_version(&package_name) - .await - .to_wasmtime_result() - } - - async fn npm_package_installed_version( - &mut self, - package_name: String, - ) -> wasmtime::Result, String>> { - self.host - .node_runtime - .npm_package_installed_version(&self.work_dir(), &package_name) - .await - .to_wasmtime_result() - } - - async fn npm_install_package( - &mut self, - package_name: String, - version: String, - ) -> wasmtime::Result> { - self.host - .node_runtime - .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) - .await - .to_wasmtime_result() - } -} - -#[async_trait] -impl lsp::Host for WasmState {} - -impl From<::http_client::github::GithubRelease> for github::GithubRelease { - fn from(value: ::http_client::github::GithubRelease) -> Self { - Self { - version: value.tag_name, - assets: value.assets.into_iter().map(Into::into).collect(), - } - } -} - -impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset { - fn from(value: ::http_client::github::GithubReleaseAsset) -> Self { - Self { - name: value.name, - download_url: value.browser_download_url, - } - } -} - -#[async_trait] -impl github::Host for WasmState { - async fn latest_github_release( - &mut self, - repo: String, - options: github::GithubReleaseOptions, - ) -> wasmtime::Result> { - maybe!(async { - let release = ::http_client::github::latest_github_release( - &repo, - options.require_assets, - options.pre_release, - self.host.http_client.clone(), - ) - .await?; - Ok(release.into()) - }) - .await - .to_wasmtime_result() - } - - async fn github_release_by_tag_name( - &mut self, - repo: String, - tag: String, - ) -> wasmtime::Result> { - maybe!(async { - let release = ::http_client::github::get_release_by_tag_name( - &repo, - &tag, - self.host.http_client.clone(), - ) - .await?; - Ok(release.into()) - }) - .await - .to_wasmtime_result() - } -} - -#[async_trait] -impl platform::Host for WasmState { - async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { - Ok(( - match env::consts::OS { - "macos" => platform::Os::Mac, - "linux" => platform::Os::Linux, - "windows" => platform::Os::Windows, - _ => panic!("unsupported os"), - }, - match env::consts::ARCH { - "aarch64" => platform::Architecture::Aarch64, - "x86" => platform::Architecture::X86, - "x86_64" => platform::Architecture::X8664, - _ => panic!("unsupported architecture"), - }, - )) - } -} - -#[async_trait] -impl slash_command::Host for WasmState {} - #[async_trait] impl ExtensionImports for WasmState { async fn get_settings( diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs new file mode 100644 index 0000000000..7fa79c2544 --- /dev/null +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -0,0 +1,551 @@ +use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; +use ::http_client::AsyncBody; +use ::settings::{Settings, WorktreeId}; +use anyhow::{anyhow, bail, Context, Result}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use async_trait::async_trait; +use futures::{io::BufReader, FutureExt as _}; +use futures::{lock::Mutex, AsyncReadExt}; +use indexed_docs::IndexedDocsDatabase; +use isahc::config::{Configurable, RedirectPolicy}; +use language::LanguageName; +use language::{ + language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, +}; +use project::project_settings::ProjectSettings; +use semantic_version::SemanticVersion; +use std::{ + env, + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use util::maybe; +use wasmtime::component::{Linker, Resource}; + +pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0); +pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0); + +wasmtime::component::bindgen!({ + async: true, + trappable_imports: true, + path: "../extension_api/wit/since_v0.2.0", + with: { + "worktree": ExtensionWorktree, + "key-value-store": ExtensionKeyValueStore, + "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream + }, +}); + +pub use self::zed::extension::*; + +mod settings { + include!(concat!(env!("OUT_DIR"), "/since_v0.2.0/settings.rs")); +} + +pub type ExtensionWorktree = Arc; +pub type ExtensionKeyValueStore = Arc; +pub type ExtensionHttpResponseStream = Arc>>; + +pub fn linker() -> &'static Linker { + static LINKER: OnceLock> = OnceLock::new(); + LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) +} + +#[async_trait] +impl HostKeyValueStore for WasmState { + async fn insert( + &mut self, + kv_store: Resource, + key: String, + value: String, + ) -> wasmtime::Result> { + let kv_store = self.table.get(&kv_store)?; + kv_store.insert(key, value).await.to_wasmtime_result() + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of key-value stores. + Ok(()) + } +} + +#[async_trait] +impl HostWorktree for WasmState { + async fn id( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.worktree_id().to_proto()) + } + + async fn root_path( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.worktree_root_path().to_string_lossy().to_string()) + } + + async fn read_text_file( + &mut self, + delegate: Resource>, + path: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .read_text_file(path.into()) + .await + .map_err(|error| error.to_string())) + } + + async fn shell_env( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.shell_env().await.into_iter().collect()) + } + + async fn which( + &mut self, + delegate: Resource>, + binary_name: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .which(binary_name.as_ref()) + .await + .map(|path| path.to_string_lossy().to_string())) + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of worktrees. + Ok(()) + } +} + +#[async_trait] +impl common::Host for WasmState {} + +#[async_trait] +impl http_client::Host for WasmState { + async fn fetch( + &mut self, + request: http_client::HttpRequest, + ) -> wasmtime::Result> { + maybe!(async { + let url = &request.url; + let request = convert_request(&request)?; + let mut response = self.host.http_client.send(request).await?; + + if response.status().is_client_error() || response.status().is_server_error() { + bail!("failed to fetch '{url}': status code {}", response.status()) + } + convert_response(&mut response).await + }) + .await + .to_wasmtime_result() + } + + async fn fetch_stream( + &mut self, + request: http_client::HttpRequest, + ) -> wasmtime::Result, String>> { + let request = convert_request(&request)?; + let response = self.host.http_client.send(request); + maybe!(async { + let response = response.await?; + let stream = Arc::new(Mutex::new(response)); + let resource = self.table.push(stream)?; + Ok(resource) + }) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl http_client::HostHttpResponseStream for WasmState { + async fn next_chunk( + &mut self, + resource: Resource, + ) -> wasmtime::Result>, String>> { + let stream = self.table.get(&resource)?.clone(); + maybe!(async move { + let mut response = stream.lock().await; + let mut buffer = vec![0; 8192]; // 8KB buffer + let bytes_read = response.body_mut().read(&mut buffer).await?; + if bytes_read == 0 { + Ok(None) + } else { + buffer.truncate(bytes_read); + Ok(Some(buffer)) + } + }) + .await + .to_wasmtime_result() + } + + fn drop(&mut self, _resource: Resource) -> Result<()> { + Ok(()) + } +} + +impl From for ::http_client::Method { + fn from(value: http_client::HttpMethod) -> Self { + match value { + http_client::HttpMethod::Get => Self::GET, + http_client::HttpMethod::Post => Self::POST, + http_client::HttpMethod::Put => Self::PUT, + http_client::HttpMethod::Delete => Self::DELETE, + http_client::HttpMethod::Head => Self::HEAD, + http_client::HttpMethod::Options => Self::OPTIONS, + http_client::HttpMethod::Patch => Self::PATCH, + } + } +} + +fn convert_request( + extension_request: &http_client::HttpRequest, +) -> Result<::http_client::Request, anyhow::Error> { + let mut request = ::http_client::Request::builder() + .method(::http_client::Method::from(extension_request.method)) + .uri(&extension_request.url) + .redirect_policy(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => RedirectPolicy::None, + http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit), + http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow, + }); + for (key, value) in &extension_request.headers { + request = request.header(key, value); + } + let body = extension_request + .body + .clone() + .map(AsyncBody::from) + .unwrap_or_default(); + request.body(body).map_err(anyhow::Error::from) +} + +async fn convert_response( + response: &mut ::http_client::Response, +) -> Result { + let mut extension_response = http_client::HttpResponse { + body: Vec::new(), + headers: Vec::new(), + }; + + for (key, value) in response.headers() { + extension_response + .headers + .push((key.to_string(), value.to_str().unwrap_or("").to_string())); + } + + response + .body_mut() + .read_to_end(&mut extension_response.body) + .await?; + + Ok(extension_response) +} + +#[async_trait] +impl nodejs::Host for WasmState { + async fn node_binary_path(&mut self) -> wasmtime::Result> { + self.host + .node_runtime + .binary_path() + .await + .map(|path| path.to_string_lossy().to_string()) + .to_wasmtime_result() + } + + async fn npm_package_latest_version( + &mut self, + package_name: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_package_latest_version(&package_name) + .await + .to_wasmtime_result() + } + + async fn npm_package_installed_version( + &mut self, + package_name: String, + ) -> wasmtime::Result, String>> { + self.host + .node_runtime + .npm_package_installed_version(&self.work_dir(), &package_name) + .await + .to_wasmtime_result() + } + + async fn npm_install_package( + &mut self, + package_name: String, + version: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl lsp::Host for WasmState {} + +impl From<::http_client::github::GithubRelease> for github::GithubRelease { + fn from(value: ::http_client::github::GithubRelease) -> Self { + Self { + version: value.tag_name, + assets: value.assets.into_iter().map(Into::into).collect(), + } + } +} + +impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset { + fn from(value: ::http_client::github::GithubReleaseAsset) -> Self { + Self { + name: value.name, + download_url: value.browser_download_url, + } + } +} + +#[async_trait] +impl github::Host for WasmState { + async fn latest_github_release( + &mut self, + repo: String, + options: github::GithubReleaseOptions, + ) -> wasmtime::Result> { + maybe!(async { + let release = ::http_client::github::latest_github_release( + &repo, + options.require_assets, + options.pre_release, + self.host.http_client.clone(), + ) + .await?; + Ok(release.into()) + }) + .await + .to_wasmtime_result() + } + + async fn github_release_by_tag_name( + &mut self, + repo: String, + tag: String, + ) -> wasmtime::Result> { + maybe!(async { + let release = ::http_client::github::get_release_by_tag_name( + &repo, + &tag, + self.host.http_client.clone(), + ) + .await?; + Ok(release.into()) + }) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl platform::Host for WasmState { + async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { + Ok(( + match env::consts::OS { + "macos" => platform::Os::Mac, + "linux" => platform::Os::Linux, + "windows" => platform::Os::Windows, + _ => panic!("unsupported os"), + }, + match env::consts::ARCH { + "aarch64" => platform::Architecture::Aarch64, + "x86" => platform::Architecture::X86, + "x86_64" => platform::Architecture::X8664, + _ => panic!("unsupported architecture"), + }, + )) + } +} + +#[async_trait] +impl slash_command::Host for WasmState {} + +#[async_trait] +impl ExtensionImports for WasmState { + async fn get_settings( + &mut self, + location: Option, + category: String, + key: Option, + ) -> wasmtime::Result> { + self.on_main_thread(|cx| { + async move { + let location = location + .as_ref() + .map(|location| ::settings::SettingsLocation { + worktree_id: WorktreeId::from_proto(location.worktree_id), + path: Path::new(&location.path), + }); + + cx.update(|cx| match category.as_str() { + "language" => { + let key = key.map(|k| LanguageName::new(&k)); + let settings = + AllLanguageSettings::get(location, cx).language(key.as_ref()); + Ok(serde_json::to_string(&settings::LanguageSettings { + tab_size: settings.tab_size, + })?) + } + "lsp" => { + let settings = key + .and_then(|key| { + ProjectSettings::get(location, cx) + .lsp + .get(&Arc::::from(key)) + }) + .cloned() + .unwrap_or_default(); + Ok(serde_json::to_string(&settings::LspSettings { + binary: settings.binary.map(|binary| settings::BinarySettings { + path: binary.path, + arguments: binary.arguments, + }), + settings: settings.settings, + initialization_options: settings.initialization_options, + })?) + } + _ => { + bail!("Unknown settings category: {}", category); + } + }) + } + .boxed_local() + }) + .await? + .to_wasmtime_result() + } + + async fn set_language_server_installation_status( + &mut self, + server_name: String, + status: LanguageServerInstallationStatus, + ) -> wasmtime::Result<()> { + let status = match status { + LanguageServerInstallationStatus::CheckingForUpdate => { + LanguageServerBinaryStatus::CheckingForUpdate + } + LanguageServerInstallationStatus::Downloading => { + LanguageServerBinaryStatus::Downloading + } + LanguageServerInstallationStatus::None => LanguageServerBinaryStatus::None, + LanguageServerInstallationStatus::Failed(error) => { + LanguageServerBinaryStatus::Failed { error } + } + }; + + self.host + .language_registry + .update_lsp_status(language::LanguageServerName(server_name.into()), status); + Ok(()) + } + + async fn download_file( + &mut self, + url: String, + path: String, + file_type: DownloadedFileType, + ) -> wasmtime::Result> { + maybe!(async { + let path = PathBuf::from(path); + let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref()); + + self.host.fs.create_dir(&extension_work_dir).await?; + + let destination_path = self + .host + .writeable_path_from_extension(&self.manifest.id, &path)?; + + let mut response = self + .host + .http_client + .get(&url, Default::default(), true) + .await + .map_err(|err| anyhow!("error downloading release: {}", err))?; + + if !response.status().is_success() { + Err(anyhow!( + "download failed with status {}", + response.status().to_string() + ))?; + } + let body = BufReader::new(response.body_mut()); + + match file_type { + DownloadedFileType::Uncompressed => { + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::Gzip => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::GzipTar => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .extract_tar_file(&destination_path, Archive::new(body)) + .await?; + } + DownloadedFileType::Zip => { + futures::pin_mut!(body); + node_runtime::extract_zip(&destination_path, body) + .await + .with_context(|| format!("failed to unzip {} archive", path.display()))?; + } + } + + Ok(()) + }) + .await + .to_wasmtime_result() + } + + async fn make_file_executable(&mut self, path: String) -> wasmtime::Result> { + #[allow(unused)] + let path = self + .host + .writeable_path_from_extension(&self.manifest.id, Path::new(&path))?; + + #[cfg(unix)] + { + use std::fs::{self, Permissions}; + use std::os::unix::fs::PermissionsExt; + + return fs::set_permissions(&path, Permissions::from_mode(0o755)) + .map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}")) + .to_wasmtime_result(); + } + + #[cfg(not(unix))] + Ok(Ok(())) + } +} diff --git a/crates/extension_api/Cargo.toml b/crates/extension_api/Cargo.toml index 89d7ed947b..1a2b25b0f6 100644 --- a/crates/extension_api/Cargo.toml +++ b/crates/extension_api/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_extension_api" -version = "0.1.0" +version = "0.2.0" description = "APIs for creating Zed extensions in Rust" repository = "https://github.com/zed-industries/zed" documentation = "https://docs.rs/zed_extension_api" @@ -8,6 +8,9 @@ keywords = ["zed", "extension"] edition = "2021" license = "Apache-2.0" +# Remove when we're ready to publish v0.2.0. +publish = false + [lints] workspace = true diff --git a/crates/extension_api/wit/since_v0.2.0/common.wit b/crates/extension_api/wit/since_v0.2.0/common.wit new file mode 100644 index 0000000000..c4f321f4c7 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/common.wit @@ -0,0 +1,9 @@ +interface common { + /// A (half-open) range (`[start, end)`). + record range { + /// The start of the range (inclusive). + start: u32, + /// The end of the range (exclusive). + end: u32, + } +} diff --git a/crates/extension_api/wit/since_v0.2.0/extension.wit b/crates/extension_api/wit/since_v0.2.0/extension.wit new file mode 100644 index 0000000000..c7599f93ff --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/extension.wit @@ -0,0 +1,147 @@ +package zed:extension; + +world extension { + import github; + import http-client; + import platform; + import nodejs; + + use common.{range}; + use lsp.{completion, symbol}; + use slash-command.{slash-command, slash-command-argument-completion, slash-command-output}; + + /// Initializes the extension. + export init-extension: func(); + + /// The type of a downloaded file. + enum downloaded-file-type { + /// A gzipped file (`.gz`). + gzip, + /// A gzipped tar archive (`.tar.gz`). + gzip-tar, + /// A ZIP file (`.zip`). + zip, + /// An uncompressed file. + uncompressed, + } + + /// The installation status for a language server. + variant language-server-installation-status { + /// The language server has no installation status. + none, + /// The language server is being downloaded. + downloading, + /// The language server is checking for updates. + checking-for-update, + /// The language server installation failed for specified reason. + failed(string), + } + + record settings-location { + worktree-id: u64, + path: string, + } + + import get-settings: func(path: option, category: string, key: option) -> result; + + /// Downloads a file from the given URL and saves it to the given path within the extension's + /// working directory. + /// + /// The file will be extracted according to the given file type. + import download-file: func(url: string, file-path: string, file-type: downloaded-file-type) -> result<_, string>; + + /// Makes the file at the given path executable. + import make-file-executable: func(filepath: string) -> result<_, string>; + + /// Updates the installation status for the given language server. + import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status); + + /// A list of environment variables. + type env-vars = list>; + + /// A command. + record command { + /// The command to execute. + command: string, + /// The arguments to pass to the command. + args: list, + /// The environment variables to set for the command. + env: env-vars, + } + + /// A Zed worktree. + resource worktree { + /// Returns the ID of the worktree. + id: func() -> u64; + /// Returns the root path of the worktree. + root-path: func() -> string; + /// Returns the textual contents of the specified file in the worktree. + read-text-file: func(path: string) -> result; + /// Returns the path to the given binary name, if one is present on the `$PATH`. + which: func(binary-name: string) -> option; + /// Returns the current shell environment. + shell-env: func() -> env-vars; + } + + /// A key-value store. + resource key-value-store { + /// Inserts an entry under the specified key. + insert: func(key: string, value: string) -> result<_, string>; + } + + /// Returns the command used to start up the language server. + export language-server-command: func(language-server-id: string, worktree: borrow) -> result; + + /// Returns the initialization options to pass to the language server on startup. + /// + /// The initialization options are represented as a JSON string. + export language-server-initialization-options: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// Returns the workspace configuration options to pass to the language server. + export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// A label containing some code. + record code-label { + /// The source code to parse with Tree-sitter. + code: string, + /// The spans to display in the label. + spans: list, + /// The range of the displayed label to include when filtering. + filter-range: range, + } + + /// A span within a code label. + variant code-label-span { + /// A range into the parsed code. + code-range(range), + /// A span containing a code literal. + literal(code-label-span-literal), + } + + /// A span containing a code literal. + record code-label-span-literal { + /// The literal text. + text: string, + /// The name of the highlight to use for this literal. + highlight-name: option, + } + + export labels-for-completions: func(language-server-id: string, completions: list) -> result>, string>; + export labels-for-symbols: func(language-server-id: string, symbols: list) -> result>, string>; + + /// Returns the completions that should be shown when completing the provided slash command with the given query. + export complete-slash-command-argument: func(command: slash-command, args: list) -> result, string>; + + /// Returns the output from running the provided slash command. + export run-slash-command: func(command: slash-command, args: list, worktree: option>) -> result; + + /// Returns a list of packages as suggestions to be included in the `/docs` + /// search results. + /// + /// This can be used to provide completions for known packages (e.g., from the + /// local project or a registry) before a package has been indexed. + export suggest-docs-packages: func(provider-name: string) -> result, string>; + + /// Indexes the docs for the specified package. + export index-docs: func(provider-name: string, package-name: string, database: borrow) -> result<_, string>; +} diff --git a/crates/extension_api/wit/since_v0.2.0/github.wit b/crates/extension_api/wit/since_v0.2.0/github.wit new file mode 100644 index 0000000000..bb138f5d31 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/github.wit @@ -0,0 +1,33 @@ +interface github { + /// A GitHub release. + record github-release { + /// The version of the release. + version: string, + /// The list of assets attached to the release. + assets: list, + } + + /// An asset from a GitHub release. + record github-release-asset { + /// The name of the asset. + name: string, + /// The download URL for the asset. + download-url: string, + } + + /// The options used to filter down GitHub releases. + record github-release-options { + /// Whether releases without assets should be included. + require-assets: bool, + /// Whether pre-releases should be included. + pre-release: bool, + } + + /// Returns the latest release for the given GitHub repository. + latest-github-release: func(repo: string, options: github-release-options) -> result; + + /// Returns the GitHub release with the specified tag name for the given GitHub repository. + /// + /// Returns an error if a release with the given tag name does not exist. + github-release-by-tag-name: func(repo: string, tag: string) -> result; +} diff --git a/crates/extension_api/wit/since_v0.2.0/http-client.wit b/crates/extension_api/wit/since_v0.2.0/http-client.wit new file mode 100644 index 0000000000..bb0206c17a --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/http-client.wit @@ -0,0 +1,67 @@ +interface http-client { + /// An HTTP request. + record http-request { + /// The HTTP method for the request. + method: http-method, + /// The URL to which the request should be made. + url: string, + /// The headers for the request. + headers: list>, + /// The request body. + body: option>, + /// The policy to use for redirects. + redirect-policy: redirect-policy, + } + + /// HTTP methods. + enum http-method { + /// `GET` + get, + /// `HEAD` + head, + /// `POST` + post, + /// `PUT` + put, + /// `DELETE` + delete, + /// `OPTIONS` + options, + /// `PATCH` + patch, + } + + /// The policy for dealing with redirects received from the server. + variant redirect-policy { + /// Redirects from the server will not be followed. + /// + /// This is the default behavior. + no-follow, + /// Redirects from the server will be followed up to the specified limit. + follow-limit(u32), + /// All redirects from the server will be followed. + follow-all, + } + + /// An HTTP response. + record http-response { + /// The response headers. + headers: list>, + /// The response body. + body: list, + } + + /// Performs an HTTP request and returns the response. + fetch: func(req: http-request) -> result; + + /// An HTTP response stream. + resource http-response-stream { + /// Retrieves the next chunk of data from the response stream. + /// + /// Returns `Ok(None)` if the stream has ended. + next-chunk: func() -> result>, string>; + } + + /// Performs an HTTP request and returns a response stream. + fetch-stream: func(req: http-request) -> result; +} diff --git a/crates/extension_api/wit/since_v0.2.0/lsp.wit b/crates/extension_api/wit/since_v0.2.0/lsp.wit new file mode 100644 index 0000000000..19e81b6b14 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/lsp.wit @@ -0,0 +1,83 @@ +interface lsp { + /// An LSP completion. + record completion { + label: string, + detail: option, + kind: option, + insert-text-format: option, + } + + /// The kind of an LSP completion. + variant completion-kind { + text, + method, + function, + %constructor, + field, + variable, + class, + %interface, + module, + property, + unit, + value, + %enum, + keyword, + snippet, + color, + file, + reference, + folder, + enum-member, + constant, + struct, + event, + operator, + type-parameter, + other(s32), + } + + /// Defines how to interpret the insert text in a completion item. + variant insert-text-format { + plain-text, + snippet, + other(s32), + } + + /// An LSP symbol. + record symbol { + kind: symbol-kind, + name: string, + } + + /// The kind of an LSP symbol. + variant symbol-kind { + file, + module, + namespace, + %package, + class, + method, + property, + field, + %constructor, + %enum, + %interface, + function, + variable, + constant, + %string, + number, + boolean, + array, + object, + key, + null, + enum-member, + struct, + event, + operator, + type-parameter, + other(s32), + } +} diff --git a/crates/extension_api/wit/since_v0.2.0/nodejs.wit b/crates/extension_api/wit/since_v0.2.0/nodejs.wit new file mode 100644 index 0000000000..c814548314 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/nodejs.wit @@ -0,0 +1,13 @@ +interface nodejs { + /// Returns the path to the Node binary used by Zed. + node-binary-path: func() -> result; + + /// Returns the latest version of the given NPM package. + npm-package-latest-version: func(package-name: string) -> result; + + /// Returns the installed version of the given NPM package, if it exists. + npm-package-installed-version: func(package-name: string) -> result, string>; + + /// Installs the specified NPM package. + npm-install-package: func(package-name: string, version: string) -> result<_, string>; +} diff --git a/crates/extension_api/wit/since_v0.2.0/platform.wit b/crates/extension_api/wit/since_v0.2.0/platform.wit new file mode 100644 index 0000000000..48472a99bc --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/platform.wit @@ -0,0 +1,24 @@ +interface platform { + /// An operating system. + enum os { + /// macOS. + mac, + /// Linux. + linux, + /// Windows. + windows, + } + + /// A platform architecture. + enum architecture { + /// AArch64 (e.g., Apple Silicon). + aarch64, + /// x86. + x86, + /// x86-64. + x8664, + } + + /// Gets the current operating system and architecture. + current-platform: func() -> tuple; +} diff --git a/crates/extension_api/wit/since_v0.2.0/settings.rs b/crates/extension_api/wit/since_v0.2.0/settings.rs new file mode 100644 index 0000000000..5c6cae7064 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/settings.rs @@ -0,0 +1,29 @@ +use serde::{Deserialize, Serialize}; +use std::num::NonZeroU32; + +/// The settings for a particular language. +#[derive(Debug, Serialize, Deserialize)] +pub struct LanguageSettings { + /// How many columns a tab should occupy. + pub tab_size: NonZeroU32, +} + +/// The settings for a particular language server. +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct LspSettings { + /// The settings for the language server binary. + pub binary: Option, + /// The initialization options to pass to the language server. + pub initialization_options: Option, + /// The settings to pass to language server. + pub settings: Option, +} + +/// The settings for a language server binary. +#[derive(Debug, Serialize, Deserialize)] +pub struct BinarySettings { + /// The path to the binary. + pub path: Option, + /// The arguments to pass to the binary. + pub arguments: Option>, +} diff --git a/crates/extension_api/wit/since_v0.2.0/slash-command.wit b/crates/extension_api/wit/since_v0.2.0/slash-command.wit new file mode 100644 index 0000000000..f52561c2ef --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/slash-command.wit @@ -0,0 +1,41 @@ +interface slash-command { + use common.{range}; + + /// A slash command for use in the Assistant. + record slash-command { + /// The name of the slash command. + name: string, + /// The description of the slash command. + description: string, + /// The tooltip text to display for the run button. + tooltip-text: string, + /// Whether this slash command requires an argument. + requires-argument: bool, + } + + /// The output of a slash command. + record slash-command-output { + /// The text produced by the slash command. + text: string, + /// The list of sections to show in the slash command placeholder. + sections: list, + } + + /// A section in the slash command output. + record slash-command-output-section { + /// The range this section occupies. + range: range, + /// The label to display in the placeholder for this section. + label: string, + } + + /// A completion for a slash command argument. + record slash-command-argument-completion { + /// The label to display for this completion. + label: string, + /// The new text that should be inserted into the command when this completion is accepted. + new-text: string, + /// Whether the command should be run when accepting this completion. + run-command: bool, + } +} diff --git a/extensions/test-extension/Cargo.toml b/extensions/test-extension/Cargo.toml index 094302e89f..5e17a9a6a3 100644 --- a/extensions/test-extension/Cargo.toml +++ b/extensions/test-extension/Cargo.toml @@ -13,4 +13,4 @@ path = "src/test_extension.rs" crate-type = ["cdylib"] [dependencies] -zed_extension_api = "0.1.0" +zed_extension_api = { path = "../../crates/extension_api" } From 91ffa02e2c7ee30b9a172ce5944ad96a747a453e Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 13 Sep 2024 13:17:49 -0400 Subject: [PATCH 072/270] /auto (#16696) Add `/auto` behind a feature flag that's disabled for now, even for staff. We've decided on a different design for context inference, but there are parts of /auto that will be useful for that, so we want them in the code base even if they're unused for now. Release Notes: - N/A --------- Co-authored-by: Antonio Scandurra Co-authored-by: Marshall Bowers --- Cargo.lock | 27 + Cargo.toml | 2 + crates/assistant/src/assistant.rs | 23 +- crates/assistant/src/assistant_panel.rs | 14 + crates/assistant/src/assistant_settings.rs | 1 + crates/assistant/src/slash_command.rs | 1 + .../src/slash_command/auto_command.rs | 360 ++++++ .../slash_command/prompt_after_summary.txt | 24 + .../slash_command/prompt_before_summary.txt | 31 + .../src/slash_command/search_command.rs | 9 +- crates/collab/k8s/collab.template.yml | 10 +- crates/collab/src/db/queries/projects.rs | 5 + crates/collab/src/db/queries/rooms.rs | 5 + crates/collab/src/lib.rs | 8 +- crates/collab/src/llm.rs | 4 +- crates/collab/src/llm/db/queries/providers.rs | 13 +- crates/collab/src/llm/db/seed.rs | 9 + crates/collab/src/tests/test_server.rs | 4 +- crates/feature_flags/Cargo.toml | 1 + crates/feature_flags/src/feature_flags.rs | 58 +- crates/fs/src/fs.rs | 20 +- crates/git/src/status.rs | 1 - crates/http_client/src/http_client.rs | 4 + .../language_model/src/model/cloud_model.rs | 4 +- .../language_model/src/provider/anthropic.rs | 2 +- crates/language_model/src/provider/google.rs | 4 +- crates/language_model/src/provider/open_ai.rs | 2 +- crates/language_model/src/registry.rs | 6 +- crates/project_panel/src/project_panel.rs | 1 + crates/proto/proto/zed.proto | 1 + crates/semantic_index/Cargo.toml | 4 + crates/semantic_index/examples/index.rs | 5 +- crates/semantic_index/src/embedding.rs | 12 +- crates/semantic_index/src/embedding_index.rs | 469 +++++++ crates/semantic_index/src/indexing.rs | 49 + crates/semantic_index/src/project_index.rs | 523 ++++++++ .../src/project_index_debug_view.rs | 16 +- crates/semantic_index/src/semantic_index.rs | 1135 ++--------------- crates/semantic_index/src/summary_backlog.rs | 48 + crates/semantic_index/src/summary_index.rs | 693 ++++++++++ crates/semantic_index/src/worktree_index.rs | 217 ++++ crates/worktree/src/worktree.rs | 5 + 42 files changed, 2776 insertions(+), 1054 deletions(-) create mode 100644 crates/assistant/src/slash_command/auto_command.rs create mode 100644 crates/assistant/src/slash_command/prompt_after_summary.txt create mode 100644 crates/assistant/src/slash_command/prompt_before_summary.txt create mode 100644 crates/semantic_index/src/embedding_index.rs create mode 100644 crates/semantic_index/src/indexing.rs create mode 100644 crates/semantic_index/src/project_index.rs create mode 100644 crates/semantic_index/src/summary_backlog.rs create mode 100644 crates/semantic_index/src/summary_index.rs create mode 100644 crates/semantic_index/src/worktree_index.rs diff --git a/Cargo.lock b/Cargo.lock index 5eaf3ddde1..793cb66ad7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -304,6 +304,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "as-raw-xcb-connection" @@ -1709,6 +1712,19 @@ dependencies = [ "profiling", ] +[[package]] +name = "blake3" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + [[package]] name = "block" version = "0.1.6" @@ -2752,6 +2768,12 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "context_servers" version = "0.1.0" @@ -4187,6 +4209,7 @@ dependencies = [ name = "feature_flags" version = "0.1.0" dependencies = [ + "futures 0.3.30", "gpui", ] @@ -9814,10 +9837,13 @@ name = "semantic_index" version = "0.1.0" dependencies = [ "anyhow", + "arrayvec", + "blake3", "client", "clock", "collections", "env_logger", + "feature_flags", "fs", "futures 0.3.30", "futures-batch", @@ -9825,6 +9851,7 @@ dependencies = [ "heed", "http_client", "language", + "language_model", "languages", "log", "open_ai", diff --git a/Cargo.toml b/Cargo.toml index 79f5ce2dcf..53109002fa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -309,6 +309,7 @@ aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "91d034ff8b53867143c005acfaa14609147c9a2c" } any_vec = "0.14" anyhow = "1.0.86" +arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = "0.9.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" @@ -325,6 +326,7 @@ bitflags = "2.6.0" blade-graphics = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } blade-macros = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } blade-util = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } +blake3 = "1.5.3" cargo_metadata = "0.18" cargo_toml = "0.20" chrono = { version = "0.4", features = ["serde"] } diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 70e37ba239..7a73c188ec 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -37,13 +37,13 @@ use language_model::{ pub(crate) use model_selector::*; pub use prompts::PromptBuilder; use prompts::PromptLoadingParams; -use semantic_index::{CloudEmbeddingProvider, SemanticIndex}; +use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - context_server_command, default_command, diagnostics_command, docs_command, fetch_command, - file_command, now_command, project_command, prompt_command, search_command, symbols_command, - tab_command, terminal_command, workflow_command, + auto_command, context_server_command, default_command, diagnostics_command, docs_command, + fetch_command, file_command, now_command, project_command, prompt_command, search_command, + symbols_command, tab_command, terminal_command, workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -210,12 +210,13 @@ pub fn init( let client = client.clone(); async move { let embedding_provider = CloudEmbeddingProvider::new(client.clone()); - let semantic_index = SemanticIndex::new( + let semantic_index = SemanticDb::new( paths::embeddings_dir().join("semantic-index-db.0.mdb"), Arc::new(embedding_provider), &mut cx, ) .await?; + cx.update(|cx| cx.set_global(semantic_index)) } }) @@ -364,6 +365,7 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) { fn register_slash_commands(prompt_builder: Option>, cx: &mut AppContext) { let slash_command_registry = SlashCommandRegistry::global(cx); + slash_command_registry.register_command(file_command::FileSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); @@ -382,6 +384,17 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut } slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + // [#auto-staff-ship] TODO remove this when /auto is no longer staff-shipped + slash_command_registry.register_command(auto_command::AutoCommand, true); + } + } + }) + .detach(); + update_slash_commands_from_settings(cx); cx.observe_global::(update_slash_commands_from_settings) .detach(); diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 634f2231cd..51c9aa9b4e 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4723,6 +4723,20 @@ impl Render for ContextEditorToolbarItem { let weak_self = cx.view().downgrade(); let right_side = h_flex() .gap_2() + // TODO display this in a nicer way, once we have a design for it. + // .children({ + // let project = self + // .workspace + // .upgrade() + // .map(|workspace| workspace.read(cx).project().downgrade()); + // + // let scan_items_remaining = cx.update_global(|db: &mut SemanticDb, cx| { + // project.and_then(|project| db.remaining_summaries(&project, cx)) + // }); + + // scan_items_remaining + // .map(|remaining_items| format!("Files to scan: {}", remaining_items)) + // }) .child( ModelSelector::new( self.fs.clone(), diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index 3e326886d5..7939eacd93 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -519,6 +519,7 @@ impl Settings for AssistantSettings { &mut settings.default_model, value.default_model.map(Into::into), ); + // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference } Ok(settings) diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index b1a97688b2..387e8231e4 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -19,6 +19,7 @@ use std::{ use ui::ActiveTheme; use workspace::Workspace; +pub mod auto_command; pub mod context_server_command; pub mod default_command; pub mod diagnostics_command; diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs new file mode 100644 index 0000000000..cedfc63702 --- /dev/null +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -0,0 +1,360 @@ +use super::create_label_for_command; +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Result}; +use assistant_slash_command::ArgumentCompletion; +use feature_flags::FeatureFlag; +use futures::StreamExt; +use gpui::{AppContext, AsyncAppContext, Task, WeakView}; +use language::{CodeLabel, LspAdapterDelegate}; +use language_model::{ + LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, +}; +use semantic_index::{FileSummary, SemanticDb}; +use smol::channel; +use std::sync::{atomic::AtomicBool, Arc}; +use ui::{BorrowAppContext, WindowContext}; +use util::ResultExt; +use workspace::Workspace; + +pub struct AutoSlashCommandFeatureFlag; + +impl FeatureFlag for AutoSlashCommandFeatureFlag { + const NAME: &'static str = "auto-slash-command"; +} + +pub(crate) struct AutoCommand; + +impl SlashCommand for AutoCommand { + fn name(&self) -> String { + "auto".into() + } + + fn description(&self) -> String { + "Automatically infer what context to add, based on your prompt".into() + } + + fn menu_text(&self) -> String { + "Automatically Infer Context".into() + } + + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("auto", &["--prompt"], cx) + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + workspace: Option>, + cx: &mut WindowContext, + ) -> Task>> { + // There's no autocomplete for a prompt, since it's arbitrary text. + // However, we can use this opportunity to kick off a drain of the backlog. + // That way, it can hopefully be done resummarizing by the time we've actually + // typed out our prompt. This re-runs on every keystroke during autocomplete, + // but in the future, we could instead do it only once, when /auto is first entered. + let Some(workspace) = workspace.and_then(|ws| ws.upgrade()) else { + log::warn!("workspace was dropped or unavailable during /auto autocomplete"); + + return Task::ready(Ok(Vec::new())); + }; + + let project = workspace.read(cx).project().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow!("No project indexer, cannot use /auto"))); + }; + + let cx: &mut AppContext = cx; + + cx.spawn(|cx: gpui::AsyncAppContext| async move { + let task = project_index.read_with(&cx, |project_index, cx| { + project_index.flush_summary_backlogs(cx) + })?; + + cx.background_executor().spawn(task).await; + + anyhow::Ok(Vec::new()) + }) + } + + fn requires_argument(&self) -> bool { + true + } + + fn run( + self: Arc, + arguments: &[String], + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + if arguments.is_empty() { + return Task::ready(Err(anyhow!("missing prompt"))); + }; + let argument = arguments.join(" "); + let original_prompt = argument.to_string(); + let project = workspace.read(cx).project().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow!("no project indexer"))); + }; + + let task = cx.spawn(|cx: gpui::AsyncWindowContext| async move { + let summaries = project_index + .read_with(&cx, |project_index, cx| project_index.all_summaries(cx))? + .await?; + + commands_for_summaries(&summaries, &original_prompt, &cx).await + }); + + // As a convenience, append /auto's argument to the end of the prompt + // so you don't have to write it again. + let original_prompt = argument.to_string(); + + cx.background_executor().spawn(async move { + let commands = task.await?; + let mut prompt = String::new(); + + log::info!( + "Translating this response into slash-commands: {:?}", + commands + ); + + for command in commands { + prompt.push('/'); + prompt.push_str(&command.name); + prompt.push(' '); + prompt.push_str(&command.arg); + prompt.push('\n'); + } + + prompt.push('\n'); + prompt.push_str(&original_prompt); + + Ok(SlashCommandOutput { + text: prompt, + sections: Vec::new(), + run_commands_in_text: true, + }) + }) + } +} + +const PROMPT_INSTRUCTIONS_BEFORE_SUMMARY: &str = include_str!("prompt_before_summary.txt"); +const PROMPT_INSTRUCTIONS_AFTER_SUMMARY: &str = include_str!("prompt_after_summary.txt"); + +fn summaries_prompt(summaries: &[FileSummary], original_prompt: &str) -> String { + let json_summaries = serde_json::to_string(summaries).unwrap(); + + format!("{PROMPT_INSTRUCTIONS_BEFORE_SUMMARY}\n{json_summaries}\n{PROMPT_INSTRUCTIONS_AFTER_SUMMARY}\n{original_prompt}") +} + +/// The slash commands that the model is told about, and which we look for in the inference response. +const SUPPORTED_SLASH_COMMANDS: &[&str] = &["search", "file"]; + +#[derive(Debug, Clone)] +struct CommandToRun { + name: String, + arg: String, +} + +/// Given the pre-indexed file summaries for this project, as well as the original prompt +/// string passed to `/auto`, get a list of slash commands to run, along with their arguments. +/// +/// The prompt's output does not include the slashes (to reduce the chance that it makes a mistake), +/// so taking one of these returned Strings and turning it into a real slash-command-with-argument +/// involves prepending a slash to it. +/// +/// This function will validate that each of the returned lines begins with one of SUPPORTED_SLASH_COMMANDS. +/// Any other lines it encounters will be discarded, with a warning logged. +async fn commands_for_summaries( + summaries: &[FileSummary], + original_prompt: &str, + cx: &AsyncAppContext, +) -> Result> { + if summaries.is_empty() { + log::warn!("Inferring no context because there were no summaries available."); + return Ok(Vec::new()); + } + + // Use the globally configured model to translate the summaries into slash-commands, + // because Qwen2-7B-Instruct has not done a good job at that task. + let Some(model) = cx.update(|cx| LanguageModelRegistry::read_global(cx).active_model())? else { + log::warn!("Can't infer context because there's no active model."); + return Ok(Vec::new()); + }; + // Only go up to 90% of the actual max token count, to reduce chances of + // exceeding the token count due to inaccuracies in the token counting heuristic. + let max_token_count = (model.max_token_count() * 9) / 10; + + // Rather than recursing (which would require this async function use a pinned box), + // we use an explicit stack of arguments and answers for when we need to "recurse." + let mut stack = vec![summaries]; + let mut final_response = Vec::new(); + let mut prompts = Vec::new(); + + // TODO We only need to create multiple Requests because we currently + // don't have the ability to tell if a CompletionProvider::complete response + // was a "too many tokens in this request" error. If we had that, then + // we could try the request once, instead of having to make separate requests + // to check the token count and then afterwards to run the actual prompt. + let make_request = |prompt: String| LanguageModelRequest { + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + // Nothing in here will benefit from caching + cache: false, + }], + tools: Vec::new(), + stop: Vec::new(), + temperature: 1.0, + }; + + while let Some(current_summaries) = stack.pop() { + // The split can result in one slice being empty and the other having one element. + // Whenever that happens, skip the empty one. + if current_summaries.is_empty() { + continue; + } + + log::info!( + "Inferring prompt context using {} file summaries", + current_summaries.len() + ); + + let prompt = summaries_prompt(¤t_summaries, original_prompt); + let start = std::time::Instant::now(); + // Per OpenAI, 1 token ~= 4 chars in English (we go with 4.5 to overestimate a bit, because failed API requests cost a lot of perf) + // Verifying this against an actual model.count_tokens() confirms that it's usually within ~5% of the correct answer, whereas + // getting the correct answer from tiktoken takes hundreds of milliseconds (compared to this arithmetic being ~free). + // source: https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them + let token_estimate = prompt.len() * 2 / 9; + let duration = start.elapsed(); + log::info!( + "Time taken to count tokens for prompt of length {:?}B: {:?}", + prompt.len(), + duration + ); + + if token_estimate < max_token_count { + prompts.push(prompt); + } else if current_summaries.len() == 1 { + log::warn!("Inferring context for a single file's summary failed because the prompt's token length exceeded the model's token limit."); + } else { + log::info!( + "Context inference using file summaries resulted in a prompt containing {token_estimate} tokens, which exceeded the model's max of {max_token_count}. Retrying as two separate prompts, each including half the number of summaries.", + ); + let (left, right) = current_summaries.split_at(current_summaries.len() / 2); + stack.push(right); + stack.push(left); + } + } + + let all_start = std::time::Instant::now(); + + let (tx, rx) = channel::bounded(1024); + + let completion_streams = prompts + .into_iter() + .map(|prompt| { + let request = make_request(prompt.clone()); + let model = model.clone(); + let tx = tx.clone(); + let stream = model.stream_completion(request, &cx); + + (stream, tx) + }) + .collect::>(); + + cx.background_executor() + .spawn(async move { + let futures = completion_streams + .into_iter() + .enumerate() + .map(|(ix, (stream, tx))| async move { + let start = std::time::Instant::now(); + let events = stream.await?; + log::info!("Time taken for awaiting /await chunk stream #{ix}: {:?}", start.elapsed()); + + let completion: String = events + .filter_map(|event| async { + if let Ok(LanguageModelCompletionEvent::Text(text)) = event { + Some(text) + } else { + None + } + }) + .collect() + .await; + + log::info!("Time taken for all /auto chunks to come back for #{ix}: {:?}", start.elapsed()); + + for line in completion.split('\n') { + if let Some(first_space) = line.find(' ') { + let command = &line[..first_space].trim(); + let arg = &line[first_space..].trim(); + + tx.send(CommandToRun { + name: command.to_string(), + arg: arg.to_string(), + }) + .await?; + } else if !line.trim().is_empty() { + // All slash-commands currently supported in context inference need a space for the argument. + log::warn!( + "Context inference returned a non-blank line that contained no spaces (meaning no argument for the slash command): {:?}", + line + ); + } + } + + anyhow::Ok(()) + }) + .collect::>(); + + let _ = futures::future::try_join_all(futures).await.log_err(); + + let duration = all_start.elapsed(); + eprintln!("All futures completed in {:?}", duration); + }) + .await; + + drop(tx); // Close the channel so that rx.collect() won't hang. This is safe because all futures have completed. + let results = rx.collect::>().await; + eprintln!( + "Finished collecting from the channel with {} results", + results.len() + ); + for command in results { + // Don't return empty or duplicate commands + if !command.name.is_empty() + && !final_response + .iter() + .any(|cmd: &CommandToRun| cmd.name == command.name && cmd.arg == command.arg) + { + if SUPPORTED_SLASH_COMMANDS + .iter() + .any(|supported| &command.name == supported) + { + final_response.push(command); + } else { + log::warn!( + "Context inference returned an unrecognized slash command: {:?}", + command + ); + } + } + } + + // Sort the commands by name (reversed just so that /search appears before /file) + final_response.sort_by(|cmd1, cmd2| cmd1.name.cmp(&cmd2.name).reverse()); + + Ok(final_response) +} diff --git a/crates/assistant/src/slash_command/prompt_after_summary.txt b/crates/assistant/src/slash_command/prompt_after_summary.txt new file mode 100644 index 0000000000..fc139a1fcb --- /dev/null +++ b/crates/assistant/src/slash_command/prompt_after_summary.txt @@ -0,0 +1,24 @@ +Actions have a cost, so only include actions that you think +will be helpful to you in doing a great job answering the +prompt in the future. + +You must respond ONLY with a list of actions you would like to +perform. Each action should be on its own line, and followed by a space and then its parameter. + +Actions can be performed more than once with different parameters. +Here is an example valid response: + +``` +file path/to/my/file.txt +file path/to/another/file.txt +search something to search for +search something else to search for +``` + +Once again, do not forget: you must respond ONLY in the format of +one action per line, and the action name should be followed by +its parameter. Your response must not include anything other +than a list of actions, with one action per line, in this format. +It is extremely important that you do not deviate from this format even slightly! + +This is the end of my instructions for how to respond. The rest is the prompt: diff --git a/crates/assistant/src/slash_command/prompt_before_summary.txt b/crates/assistant/src/slash_command/prompt_before_summary.txt new file mode 100644 index 0000000000..5d8db1b8f7 --- /dev/null +++ b/crates/assistant/src/slash_command/prompt_before_summary.txt @@ -0,0 +1,31 @@ +I'm going to give you a prompt. I don't want you to respond +to the prompt itself. I want you to figure out which of the following +actions on my project, if any, would help you answer the prompt. + +Here are the actions: + +## file + +This action's parameter is a file path to one of the files +in the project. If you ask for this action, I will tell you +the full contents of the file, so you can learn all the +details of the file. + +## search + +This action's parameter is a string to do a semantic search for +across the files in the project. (You will have a JSON summary +of all the files in the project.) It will tell you which files this string +(or similar strings; it is a semantic search) appear in, +as well as some context of the lines surrounding each result. +It's very important that you only use this action when you think +that searching across the specific files in this project for the query +in question will be useful. For example, don't use this command to search +for queries you might put into a general Web search engine, because those +will be too general to give useful results in this project-specific search. + +--- + +That was the end of the list of actions. + +Here is a JSON summary of each of the files in my project: diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 4da8a5585f..3a513ed9ad 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -8,7 +8,7 @@ use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; use language::{CodeLabel, LineEnding, LspAdapterDelegate}; -use semantic_index::SemanticIndex; +use semantic_index::SemanticDb; use std::{ fmt::Write, path::PathBuf, @@ -92,8 +92,11 @@ impl SlashCommand for SearchSlashCommand { let project = workspace.read(cx).project().clone(); let fs = project.read(cx).fs().clone(); - let project_index = - cx.update_global(|index: &mut SemanticIndex, cx| index.project_index(project, cx)); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; cx.spawn(|cx| async move { let results = project_index diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml index dcd935166a..f5e454c3fc 100644 --- a/crates/collab/k8s/collab.template.yml +++ b/crates/collab/k8s/collab.template.yml @@ -149,16 +149,16 @@ spec: secretKeyRef: name: google-ai key: api_key - - name: QWEN2_7B_API_KEY + - name: RUNPOD_API_KEY valueFrom: secretKeyRef: - name: hugging-face + name: runpod key: api_key - - name: QWEN2_7B_API_URL + - name: RUNPOD_API_SUMMARY_URL valueFrom: secretKeyRef: - name: hugging-face - key: qwen2_api_url + name: runpod + key: summary - name: BLOB_STORE_ACCESS_KEY valueFrom: secretKeyRef: diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index a6956c8496..c6db54b572 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -728,6 +728,11 @@ impl Database { is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, git_status: db_entry.git_status.map(|status| status as i32), + // This is only used in the summarization backlog, so if it's None, + // that just means we won't be able to detect when to resummarize + // based on total number of backlogged bytes - instead, we'd go + // on number of files only. That shouldn't be a huge deal in practice. + size: None, is_fifo: db_entry.is_fifo, }); } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 1669ddbb3b..635e2d232f 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -663,6 +663,11 @@ impl Database { is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, git_status: db_entry.git_status.map(|status| status as i32), + // This is only used in the summarization backlog, so if it's None, + // that just means we won't be able to detect when to resummarize + // based on total number of backlogged bytes - instead, we'd go + // on number of files only. That shouldn't be a huge deal in practice. + size: None, is_fifo: db_entry.is_fifo, }); } diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 461adc3575..81ff3ff21f 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -170,8 +170,8 @@ pub struct Config { pub anthropic_api_key: Option>, pub anthropic_staff_api_key: Option>, pub llm_closed_beta_model_name: Option>, - pub qwen2_7b_api_key: Option>, - pub qwen2_7b_api_url: Option>, + pub runpod_api_key: Option>, + pub runpod_api_summary_url: Option>, pub zed_client_checksum_seed: Option, pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, @@ -235,8 +235,8 @@ impl Config { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - qwen2_7b_api_key: None, - qwen2_7b_api_url: None, + runpod_api_key: None, + runpod_api_summary_url: None, user_backfiller_github_access_token: None, } } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index e1a3454368..def4499ae4 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -402,12 +402,12 @@ async fn perform_completion( LanguageModelProvider::Zed => { let api_key = state .config - .qwen2_7b_api_key + .runpod_api_key .as_ref() .context("no Qwen2-7B API key configured on the server")?; let api_url = state .config - .qwen2_7b_api_url + .runpod_api_summary_url .as_ref() .context("no Qwen2-7B URL configured on the server")?; let chunks = open_ai::stream_completion( diff --git a/crates/collab/src/llm/db/queries/providers.rs b/crates/collab/src/llm/db/queries/providers.rs index 8a73b399c6..7e51061cee 100644 --- a/crates/collab/src/llm/db/queries/providers.rs +++ b/crates/collab/src/llm/db/queries/providers.rs @@ -1,5 +1,5 @@ use super::*; -use sea_orm::QueryOrder; +use sea_orm::{sea_query::OnConflict, QueryOrder}; use std::str::FromStr; use strum::IntoEnumIterator as _; @@ -99,6 +99,17 @@ impl LlmDatabase { ..Default::default() } })) + .on_conflict( + OnConflict::columns([model::Column::ProviderId, model::Column::Name]) + .update_columns([ + model::Column::MaxRequestsPerMinute, + model::Column::MaxTokensPerMinute, + model::Column::MaxTokensPerDay, + model::Column::PricePerMillionInputTokens, + model::Column::PricePerMillionOutputTokens, + ]) + .to_owned(), + ) .exec_without_returning(&*tx) .await?; Ok(()) diff --git a/crates/collab/src/llm/db/seed.rs b/crates/collab/src/llm/db/seed.rs index 55c6c30cd5..24bc224227 100644 --- a/crates/collab/src/llm/db/seed.rs +++ b/crates/collab/src/llm/db/seed.rs @@ -40,6 +40,15 @@ pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool) price_per_million_input_tokens: 25, // $0.25/MTok price_per_million_output_tokens: 125, // $1.25/MTok }, + ModelParams { + provider: LanguageModelProvider::Zed, + name: "Qwen/Qwen2-7B-Instruct".into(), + max_requests_per_minute: 5, + max_tokens_per_minute: 25_000, // These are arbitrary limits we've set to cap costs; we control this number + max_tokens_per_day: 300_000, + price_per_million_input_tokens: 25, + price_per_million_output_tokens: 125, + }, ]) .await } diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index e691afceda..1421e4c7f7 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -679,8 +679,8 @@ impl TestServer { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - qwen2_7b_api_key: None, - qwen2_7b_api_url: None, + runpod_api_key: None, + runpod_api_summary_url: None, user_backfiller_github_access_token: None, }, }) diff --git a/crates/feature_flags/Cargo.toml b/crates/feature_flags/Cargo.toml index 101e90c646..834e315af3 100644 --- a/crates/feature_flags/Cargo.toml +++ b/crates/feature_flags/Cargo.toml @@ -13,3 +13,4 @@ path = "src/feature_flags.rs" [dependencies] gpui.workspace = true +futures.workspace = true diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index 29768138af..fb4e192023 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -1,4 +1,10 @@ +use futures::{channel::oneshot, FutureExt as _}; use gpui::{AppContext, Global, Subscription, ViewContext}; +use std::{ + future::Future, + pin::Pin, + task::{Context, Poll}, +}; #[derive(Default)] struct FeatureFlags { @@ -53,6 +59,15 @@ impl FeatureFlag for ZedPro { const NAME: &'static str = "zed-pro"; } +pub struct AutoCommand {} +impl FeatureFlag for AutoCommand { + const NAME: &'static str = "auto-command"; + + fn enabled_for_staff() -> bool { + false + } +} + pub trait FeatureFlagViewExt { fn observe_flag(&mut self, callback: F) -> Subscription where @@ -75,6 +90,7 @@ where } pub trait FeatureFlagAppExt { + fn wait_for_flag(&mut self) -> WaitForFlag; fn update_flags(&mut self, staff: bool, flags: Vec); fn set_staff(&mut self, staff: bool); fn has_flag(&self) -> bool; @@ -82,7 +98,7 @@ pub trait FeatureFlagAppExt { fn observe_flag(&mut self, callback: F) -> Subscription where - F: Fn(bool, &mut AppContext) + 'static; + F: FnMut(bool, &mut AppContext) + 'static; } impl FeatureFlagAppExt for AppContext { @@ -109,13 +125,49 @@ impl FeatureFlagAppExt for AppContext { .unwrap_or(false) } - fn observe_flag(&mut self, callback: F) -> Subscription + fn observe_flag(&mut self, mut callback: F) -> Subscription where - F: Fn(bool, &mut AppContext) + 'static, + F: FnMut(bool, &mut AppContext) + 'static, { self.observe_global::(move |cx| { let feature_flags = cx.global::(); callback(feature_flags.has_flag::(), cx); }) } + + fn wait_for_flag(&mut self) -> WaitForFlag { + let (tx, rx) = oneshot::channel::(); + let mut tx = Some(tx); + let subscription: Option; + + match self.try_global::() { + Some(feature_flags) => { + subscription = None; + tx.take().unwrap().send(feature_flags.has_flag::()).ok(); + } + None => { + subscription = Some(self.observe_global::(move |cx| { + let feature_flags = cx.global::(); + if let Some(tx) = tx.take() { + tx.send(feature_flags.has_flag::()).ok(); + } + })); + } + } + + WaitForFlag(rx, subscription) + } +} + +pub struct WaitForFlag(oneshot::Receiver, Option); + +impl Future for WaitForFlag { + type Output = bool; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + self.0.poll_unpin(cx).map(|result| { + self.1.take(); + result.unwrap_or(false) + }) + } } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 0ec5a4c601..b649831fd2 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -171,6 +171,7 @@ pub struct Metadata { pub mtime: SystemTime, pub is_symlink: bool, pub is_dir: bool, + pub len: u64, pub is_fifo: bool, } @@ -497,6 +498,7 @@ impl Fs for RealFs { Ok(Some(Metadata { inode, mtime: metadata.modified().unwrap(), + len: metadata.len(), is_symlink, is_dir: metadata.file_type().is_dir(), is_fifo, @@ -800,11 +802,13 @@ enum FakeFsEntry { File { inode: u64, mtime: SystemTime, + len: u64, content: Vec, }, Dir { inode: u64, mtime: SystemTime, + len: u64, entries: BTreeMap>>, git_repo_state: Option>>, }, @@ -935,6 +939,7 @@ impl FakeFs { root: Arc::new(Mutex::new(FakeFsEntry::Dir { inode: 0, mtime: SystemTime::UNIX_EPOCH, + len: 0, entries: Default::default(), git_repo_state: None, })), @@ -969,6 +974,7 @@ impl FakeFs { inode: new_inode, mtime: new_mtime, content: Vec::new(), + len: 0, }))); } btree_map::Entry::Occupied(mut e) => match &mut *e.get_mut().lock() { @@ -1016,6 +1022,7 @@ impl FakeFs { let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, + len: content.len() as u64, content, })); let mut kind = None; @@ -1369,6 +1376,7 @@ impl Fs for FakeFs { Arc::new(Mutex::new(FakeFsEntry::Dir { inode, mtime, + len: 0, entries: Default::default(), git_repo_state: None, })) @@ -1391,6 +1399,7 @@ impl Fs for FakeFs { let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, + len: 0, content: Vec::new(), })); let mut kind = Some(PathEventKind::Created); @@ -1539,6 +1548,7 @@ impl Fs for FakeFs { e.insert(Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, + len: content.len() as u64, content: Vec::new(), }))) .clone(), @@ -1694,16 +1704,22 @@ impl Fs for FakeFs { let entry = entry.lock(); Ok(Some(match &*entry { - FakeFsEntry::File { inode, mtime, .. } => Metadata { + FakeFsEntry::File { + inode, mtime, len, .. + } => Metadata { inode: *inode, mtime: *mtime, + len: *len, is_dir: false, is_symlink, is_fifo: false, }, - FakeFsEntry::Dir { inode, mtime, .. } => Metadata { + FakeFsEntry::Dir { + inode, mtime, len, .. + } => Metadata { inode: *inode, mtime: *mtime, + len: *len, is_dir: true, is_symlink, is_fifo: false, diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index e6098ffd3c..6eb98ecefe 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -57,7 +57,6 @@ impl GitStatus { let stderr = String::from_utf8_lossy(&output.stderr); return Err(anyhow!("git status process failed: {}", stderr)); } - let stdout = String::from_utf8_lossy(&output.stdout); let mut entries = stdout .split('\0') diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 452be0a243..1841a1f394 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -221,6 +221,10 @@ impl HttpClient for HttpClientWithUrl { pub fn client(user_agent: Option, proxy: Option) -> Arc { let mut builder = isahc::HttpClient::builder() + // Some requests to Qwen2 models on Runpod can take 32+ seconds, + // especially if there's a cold boot involved. We may need to have + // those requests use a different http client, because global timeouts + // of 50 and 60 seconds, respectively, would be very high! .connect_timeout(Duration::from_secs(5)) .low_speed_timeout(100, Duration::from_secs(5)) .proxy(proxy.clone()); diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index f36b6b2788..be0812eab9 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -17,14 +17,14 @@ pub enum CloudModel { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)] pub enum ZedModel { - #[serde(rename = "qwen2-7b-instruct")] + #[serde(rename = "Qwen/Qwen2-7B-Instruct")] Qwen2_7bInstruct, } impl ZedModel { pub fn id(&self) -> &str { match self { - ZedModel::Qwen2_7bInstruct => "qwen2-7b-instruct", + ZedModel::Qwen2_7bInstruct => "Qwen/Qwen2-7B-Instruct", } } diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index eac4ad3021..9f7135aef7 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -319,7 +319,7 @@ impl AnthropicModel { }; async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?; let request = anthropic::stream_completion( http_client.as_ref(), &api_url, diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs index fc4a7a7a34..005f35ff8b 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_model/src/provider/google.rs @@ -265,7 +265,7 @@ impl LanguageModel for GoogleLanguageModel { let low_speed_timeout = settings.low_speed_timeout; async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing Google API key"))?; let response = google_ai::count_tokens( http_client.as_ref(), &api_url, @@ -304,7 +304,7 @@ impl LanguageModel for GoogleLanguageModel { }; let future = self.rate_limiter.stream(async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing Google API Key"))?; let response = stream_generate_content( http_client.as_ref(), &api_url, diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 3a371499eb..fe5e60caec 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -239,7 +239,7 @@ impl OpenAiLanguageModel { }; let future = self.request_limiter.stream(async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing OpenAI API Key"))?; let request = stream_completion( http_client.as_ref(), &api_url, diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 589dfe776a..b3c8ef5f57 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -159,11 +159,13 @@ impl LanguageModelRegistry { providers } - pub fn available_models(&self, cx: &AppContext) -> Vec> { + pub fn available_models<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator> + 'a { self.providers .values() .flat_map(|provider| provider.provided_models(cx)) - .collect() } pub fn provider(&self, id: &LanguageModelProviderId) -> Option> { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c77a2170dd..c8e1ce28eb 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1823,6 +1823,7 @@ impl ProjectPanel { path: entry.path.join("\0").into(), inode: 0, mtime: entry.mtime, + size: entry.size, is_ignored: entry.is_ignored, is_external: false, is_private: false, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index e5d767fffb..f59e8146b6 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1855,6 +1855,7 @@ message Entry { bool is_external = 8; optional GitStatus git_status = 9; bool is_fifo = 10; + optional uint64 size = 11; } message RepositoryEntry { diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 4fd3a86b29..c8dbb6a9f5 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -19,14 +19,18 @@ crate-type = ["bin"] [dependencies] anyhow.workspace = true +arrayvec.workspace = true +blake3.workspace = true client.workspace = true clock.workspace = true collections.workspace = true +feature_flags.workspace = true fs.workspace = true futures.workspace = true futures-batch.workspace = true gpui.workspace = true language.workspace = true +language_model.workspace = true log.workspace = true heed.workspace = true http_client.workspace = true diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index e536ea1db6..977473d1dc 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -4,7 +4,7 @@ use gpui::App; use http_client::HttpClientWithUrl; use language::language_settings::AllLanguageSettings; use project::Project; -use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticIndex}; +use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; use settings::SettingsStore; use std::{ path::{Path, PathBuf}, @@ -50,7 +50,7 @@ fn main() { )); cx.spawn(|mut cx| async move { - let semantic_index = SemanticIndex::new( + let semantic_index = SemanticDb::new( PathBuf::from("/tmp/semantic-index-db.mdb"), embedding_provider, &mut cx, @@ -71,6 +71,7 @@ fn main() { let project_index = cx .update(|cx| semantic_index.project_index(project.clone(), cx)) + .unwrap() .unwrap(); let (tx, rx) = oneshot::channel(); diff --git a/crates/semantic_index/src/embedding.rs b/crates/semantic_index/src/embedding.rs index b5195c8911..b05c4ac9da 100644 --- a/crates/semantic_index/src/embedding.rs +++ b/crates/semantic_index/src/embedding.rs @@ -12,6 +12,12 @@ use futures::{future::BoxFuture, FutureExt}; use serde::{Deserialize, Serialize}; use std::{fmt, future}; +/// Trait for embedding providers. Texts in, vectors out. +pub trait EmbeddingProvider: Sync + Send { + fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result>>; + fn batch_size(&self) -> usize; +} + #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub struct Embedding(Vec); @@ -68,12 +74,6 @@ impl fmt::Display for Embedding { } } -/// Trait for embedding providers. Texts in, vectors out. -pub trait EmbeddingProvider: Sync + Send { - fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result>>; - fn batch_size(&self) -> usize; -} - #[derive(Debug)] pub struct TextToEmbed<'a> { pub text: &'a str, diff --git a/crates/semantic_index/src/embedding_index.rs b/crates/semantic_index/src/embedding_index.rs new file mode 100644 index 0000000000..dd7c58dc11 --- /dev/null +++ b/crates/semantic_index/src/embedding_index.rs @@ -0,0 +1,469 @@ +use crate::{ + chunking::{self, Chunk}, + embedding::{Embedding, EmbeddingProvider, TextToEmbed}, + indexing::{IndexingEntryHandle, IndexingEntrySet}, +}; +use anyhow::{anyhow, Context as _, Result}; +use collections::Bound; +use fs::Fs; +use futures::stream::StreamExt; +use futures_batch::ChunksTimeoutStreamExt; +use gpui::{AppContext, Model, Task}; +use heed::types::{SerdeBincode, Str}; +use language::LanguageRegistry; +use log; +use project::{Entry, UpdatedEntriesSet, Worktree}; +use serde::{Deserialize, Serialize}; +use smol::channel; +use std::{ + cmp::Ordering, + future::Future, + iter, + path::Path, + sync::Arc, + time::{Duration, SystemTime}, +}; +use util::ResultExt; +use worktree::Snapshot; + +pub struct EmbeddingIndex { + worktree: Model, + db_connection: heed::Env, + db: heed::Database>, + fs: Arc, + language_registry: Arc, + embedding_provider: Arc, + entry_ids_being_indexed: Arc, +} + +impl EmbeddingIndex { + pub fn new( + worktree: Model, + fs: Arc, + db_connection: heed::Env, + embedding_db: heed::Database>, + language_registry: Arc, + embedding_provider: Arc, + entry_ids_being_indexed: Arc, + ) -> Self { + Self { + worktree, + fs, + db_connection, + db: embedding_db, + language_registry, + embedding_provider, + entry_ids_being_indexed, + } + } + + pub fn db(&self) -> &heed::Database> { + &self.db + } + + pub fn index_entries_changed_on_disk( + &self, + cx: &AppContext, + ) -> impl Future> { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + let scan = self.scan_entries(worktree, cx); + let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); + let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); + let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); + async move { + futures::try_join!(scan.task, chunk.task, embed.task, persist)?; + Ok(()) + } + } + + pub fn index_updated_entries( + &self, + updated_entries: UpdatedEntriesSet, + cx: &AppContext, + ) -> impl Future> { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx); + let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); + let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); + let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); + async move { + futures::try_join!(scan.task, chunk.task, embed.task, persist)?; + Ok(()) + } + } + + fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> ScanEntries { + let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); + let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); + let db_connection = self.db_connection.clone(); + let db = self.db; + let entries_being_indexed = self.entry_ids_being_indexed.clone(); + let task = cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + let mut db_entries = db + .iter(&txn) + .context("failed to create iterator")? + .move_between_keys() + .peekable(); + + let mut deletion_range: Option<(Bound<&str>, Bound<&str>)> = None; + for entry in worktree.files(false, 0) { + log::trace!("scanning for embedding index: {:?}", &entry.path); + + let entry_db_key = db_key_for_path(&entry.path); + + let mut saved_mtime = None; + while let Some(db_entry) = db_entries.peek() { + match db_entry { + Ok((db_path, db_embedded_file)) => match (*db_path).cmp(&entry_db_key) { + Ordering::Less => { + if let Some(deletion_range) = deletion_range.as_mut() { + deletion_range.1 = Bound::Included(db_path); + } else { + deletion_range = + Some((Bound::Included(db_path), Bound::Included(db_path))); + } + + db_entries.next(); + } + Ordering::Equal => { + if let Some(deletion_range) = deletion_range.take() { + deleted_entry_ranges_tx + .send(( + deletion_range.0.map(ToString::to_string), + deletion_range.1.map(ToString::to_string), + )) + .await?; + } + saved_mtime = db_embedded_file.mtime; + db_entries.next(); + break; + } + Ordering::Greater => { + break; + } + }, + Err(_) => return Err(db_entries.next().unwrap().unwrap_err())?, + } + } + + if entry.mtime != saved_mtime { + let handle = entries_being_indexed.insert(entry.id); + updated_entries_tx.send((entry.clone(), handle)).await?; + } + } + + if let Some(db_entry) = db_entries.next() { + let (db_path, _) = db_entry?; + deleted_entry_ranges_tx + .send((Bound::Included(db_path.to_string()), Bound::Unbounded)) + .await?; + } + + Ok(()) + }); + + ScanEntries { + updated_entries: updated_entries_rx, + deleted_entry_ranges: deleted_entry_ranges_rx, + task, + } + } + + fn scan_updated_entries( + &self, + worktree: Snapshot, + updated_entries: UpdatedEntriesSet, + cx: &AppContext, + ) -> ScanEntries { + let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); + let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); + let entries_being_indexed = self.entry_ids_being_indexed.clone(); + let task = cx.background_executor().spawn(async move { + for (path, entry_id, status) in updated_entries.iter() { + match status { + project::PathChange::Added + | project::PathChange::Updated + | project::PathChange::AddedOrUpdated => { + if let Some(entry) = worktree.entry_for_id(*entry_id) { + if entry.is_file() { + let handle = entries_being_indexed.insert(entry.id); + updated_entries_tx.send((entry.clone(), handle)).await?; + } + } + } + project::PathChange::Removed => { + let db_path = db_key_for_path(path); + deleted_entry_ranges_tx + .send((Bound::Included(db_path.clone()), Bound::Included(db_path))) + .await?; + } + project::PathChange::Loaded => { + // Do nothing. + } + } + } + + Ok(()) + }); + + ScanEntries { + updated_entries: updated_entries_rx, + deleted_entry_ranges: deleted_entry_ranges_rx, + task, + } + } + + fn chunk_files( + &self, + worktree_abs_path: Arc, + entries: channel::Receiver<(Entry, IndexingEntryHandle)>, + cx: &AppContext, + ) -> ChunkFiles { + let language_registry = self.language_registry.clone(); + let fs = self.fs.clone(); + let (chunked_files_tx, chunked_files_rx) = channel::bounded(2048); + let task = cx.spawn(|cx| async move { + cx.background_executor() + .scoped(|cx| { + for _ in 0..cx.num_cpus() { + cx.spawn(async { + while let Ok((entry, handle)) = entries.recv().await { + let entry_abs_path = worktree_abs_path.join(&entry.path); + match fs.load(&entry_abs_path).await { + Ok(text) => { + let language = language_registry + .language_for_file_path(&entry.path) + .await + .ok(); + let chunked_file = ChunkedFile { + chunks: chunking::chunk_text( + &text, + language.as_ref(), + &entry.path, + ), + handle, + path: entry.path, + mtime: entry.mtime, + text, + }; + + if chunked_files_tx.send(chunked_file).await.is_err() { + return; + } + } + Err(_)=> { + log::error!("Failed to read contents into a UTF-8 string: {entry_abs_path:?}"); + } + } + } + }); + } + }) + .await; + Ok(()) + }); + + ChunkFiles { + files: chunked_files_rx, + task, + } + } + + pub fn embed_files( + embedding_provider: Arc, + chunked_files: channel::Receiver, + cx: &AppContext, + ) -> EmbedFiles { + let embedding_provider = embedding_provider.clone(); + let (embedded_files_tx, embedded_files_rx) = channel::bounded(512); + let task = cx.background_executor().spawn(async move { + let mut chunked_file_batches = + chunked_files.chunks_timeout(512, Duration::from_secs(2)); + while let Some(chunked_files) = chunked_file_batches.next().await { + // View the batch of files as a vec of chunks + // Flatten out to a vec of chunks that we can subdivide into batch sized pieces + // Once those are done, reassemble them back into the files in which they belong + // If any embeddings fail for a file, the entire file is discarded + + let chunks: Vec = chunked_files + .iter() + .flat_map(|file| { + file.chunks.iter().map(|chunk| TextToEmbed { + text: &file.text[chunk.range.clone()], + digest: chunk.digest, + }) + }) + .collect::>(); + + let mut embeddings: Vec> = Vec::new(); + for embedding_batch in chunks.chunks(embedding_provider.batch_size()) { + if let Some(batch_embeddings) = + embedding_provider.embed(embedding_batch).await.log_err() + { + if batch_embeddings.len() == embedding_batch.len() { + embeddings.extend(batch_embeddings.into_iter().map(Some)); + continue; + } + log::error!( + "embedding provider returned unexpected embedding count {}, expected {}", + batch_embeddings.len(), embedding_batch.len() + ); + } + + embeddings.extend(iter::repeat(None).take(embedding_batch.len())); + } + + let mut embeddings = embeddings.into_iter(); + for chunked_file in chunked_files { + let mut embedded_file = EmbeddedFile { + path: chunked_file.path, + mtime: chunked_file.mtime, + chunks: Vec::new(), + }; + + let mut embedded_all_chunks = true; + for (chunk, embedding) in + chunked_file.chunks.into_iter().zip(embeddings.by_ref()) + { + if let Some(embedding) = embedding { + embedded_file + .chunks + .push(EmbeddedChunk { chunk, embedding }); + } else { + embedded_all_chunks = false; + } + } + + if embedded_all_chunks { + embedded_files_tx + .send((embedded_file, chunked_file.handle)) + .await?; + } + } + } + Ok(()) + }); + + EmbedFiles { + files: embedded_files_rx, + task, + } + } + + fn persist_embeddings( + &self, + mut deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, + embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, + cx: &AppContext, + ) -> Task> { + let db_connection = self.db_connection.clone(); + let db = self.db; + cx.background_executor().spawn(async move { + while let Some(deletion_range) = deleted_entry_ranges.next().await { + let mut txn = db_connection.write_txn()?; + let start = deletion_range.0.as_ref().map(|start| start.as_str()); + let end = deletion_range.1.as_ref().map(|end| end.as_str()); + log::debug!("deleting embeddings in range {:?}", &(start, end)); + db.delete_range(&mut txn, &(start, end))?; + txn.commit()?; + } + + let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2)); + while let Some(embedded_files) = embedded_files.next().await { + let mut txn = db_connection.write_txn()?; + for (file, _) in &embedded_files { + log::debug!("saving embedding for file {:?}", file.path); + let key = db_key_for_path(&file.path); + db.put(&mut txn, &key, file)?; + } + txn.commit()?; + + drop(embedded_files); + log::debug!("committed"); + } + + Ok(()) + }) + } + + pub fn paths(&self, cx: &AppContext) -> Task>>> { + let connection = self.db_connection.clone(); + let db = self.db; + cx.background_executor().spawn(async move { + let tx = connection + .read_txn() + .context("failed to create read transaction")?; + let result = db + .iter(&tx)? + .map(|entry| Ok(entry?.1.path.clone())) + .collect::>>>(); + drop(tx); + result + }) + } + + pub fn chunks_for_path( + &self, + path: Arc, + cx: &AppContext, + ) -> Task>> { + let connection = self.db_connection.clone(); + let db = self.db; + cx.background_executor().spawn(async move { + let tx = connection + .read_txn() + .context("failed to create read transaction")?; + Ok(db + .get(&tx, &db_key_for_path(&path))? + .ok_or_else(|| anyhow!("no such path"))? + .chunks + .clone()) + }) + } +} + +struct ScanEntries { + updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>, + deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, + task: Task>, +} + +struct ChunkFiles { + files: channel::Receiver, + task: Task>, +} + +pub struct ChunkedFile { + pub path: Arc, + pub mtime: Option, + pub handle: IndexingEntryHandle, + pub text: String, + pub chunks: Vec, +} + +pub struct EmbedFiles { + pub files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, + pub task: Task>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EmbeddedFile { + pub path: Arc, + pub mtime: Option, + pub chunks: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EmbeddedChunk { + pub chunk: Chunk, + pub embedding: Embedding, +} + +fn db_key_for_path(path: &Arc) -> String { + path.to_string_lossy().replace('/', "\0") +} diff --git a/crates/semantic_index/src/indexing.rs b/crates/semantic_index/src/indexing.rs new file mode 100644 index 0000000000..aca9504891 --- /dev/null +++ b/crates/semantic_index/src/indexing.rs @@ -0,0 +1,49 @@ +use collections::HashSet; +use parking_lot::Mutex; +use project::ProjectEntryId; +use smol::channel; +use std::sync::{Arc, Weak}; + +/// The set of entries that are currently being indexed. +pub struct IndexingEntrySet { + entry_ids: Mutex>, + tx: channel::Sender<()>, +} + +/// When dropped, removes the entry from the set of entries that are being indexed. +#[derive(Clone)] +pub(crate) struct IndexingEntryHandle { + entry_id: ProjectEntryId, + set: Weak, +} + +impl IndexingEntrySet { + pub fn new(tx: channel::Sender<()>) -> Self { + Self { + entry_ids: Default::default(), + tx, + } + } + + pub fn insert(self: &Arc, entry_id: ProjectEntryId) -> IndexingEntryHandle { + self.entry_ids.lock().insert(entry_id); + self.tx.send_blocking(()).ok(); + IndexingEntryHandle { + entry_id, + set: Arc::downgrade(self), + } + } + + pub fn len(&self) -> usize { + self.entry_ids.lock().len() + } +} + +impl Drop for IndexingEntryHandle { + fn drop(&mut self) { + if let Some(set) = self.set.upgrade() { + set.tx.send_blocking(()).ok(); + set.entry_ids.lock().remove(&self.entry_id); + } + } +} diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs new file mode 100644 index 0000000000..84a72c1a3d --- /dev/null +++ b/crates/semantic_index/src/project_index.rs @@ -0,0 +1,523 @@ +use crate::{ + embedding::{EmbeddingProvider, TextToEmbed}, + summary_index::FileSummary, + worktree_index::{WorktreeIndex, WorktreeIndexHandle}, +}; +use anyhow::{anyhow, Context, Result}; +use collections::HashMap; +use fs::Fs; +use futures::{stream::StreamExt, FutureExt}; +use gpui::{ + AppContext, Entity, EntityId, EventEmitter, Model, ModelContext, Subscription, Task, WeakModel, +}; +use language::LanguageRegistry; +use log; +use project::{Project, Worktree, WorktreeId}; +use serde::{Deserialize, Serialize}; +use smol::channel; +use std::{cmp::Ordering, future::Future, num::NonZeroUsize, ops::Range, path::Path, sync::Arc}; +use util::ResultExt; + +#[derive(Debug)] +pub struct SearchResult { + pub worktree: Model, + pub path: Arc, + pub range: Range, + pub score: f32, +} + +pub struct WorktreeSearchResult { + pub worktree_id: WorktreeId, + pub path: Arc, + pub range: Range, + pub score: f32, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub enum Status { + Idle, + Loading, + Scanning { remaining_count: NonZeroUsize }, +} + +pub struct ProjectIndex { + db_connection: heed::Env, + project: WeakModel, + worktree_indices: HashMap, + language_registry: Arc, + fs: Arc, + last_status: Status, + status_tx: channel::Sender<()>, + embedding_provider: Arc, + _maintain_status: Task<()>, + _subscription: Subscription, +} + +impl ProjectIndex { + pub fn new( + project: Model, + db_connection: heed::Env, + embedding_provider: Arc, + cx: &mut ModelContext, + ) -> Self { + let language_registry = project.read(cx).languages().clone(); + let fs = project.read(cx).fs().clone(); + let (status_tx, mut status_rx) = channel::unbounded(); + let mut this = ProjectIndex { + db_connection, + project: project.downgrade(), + worktree_indices: HashMap::default(), + language_registry, + fs, + status_tx, + last_status: Status::Idle, + embedding_provider, + _subscription: cx.subscribe(&project, Self::handle_project_event), + _maintain_status: cx.spawn(|this, mut cx| async move { + while status_rx.next().await.is_some() { + if this + .update(&mut cx, |this, cx| this.update_status(cx)) + .is_err() + { + break; + } + } + }), + }; + this.update_worktree_indices(cx); + this + } + + pub fn status(&self) -> Status { + self.last_status + } + + pub fn project(&self) -> WeakModel { + self.project.clone() + } + + pub fn fs(&self) -> Arc { + self.fs.clone() + } + + fn handle_project_event( + &mut self, + _: Model, + event: &project::Event, + cx: &mut ModelContext, + ) { + match event { + project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => { + self.update_worktree_indices(cx); + } + _ => {} + } + } + + fn update_worktree_indices(&mut self, cx: &mut ModelContext) { + let Some(project) = self.project.upgrade() else { + return; + }; + + let worktrees = project + .read(cx) + .visible_worktrees(cx) + .filter_map(|worktree| { + if worktree.read(cx).is_local() { + Some((worktree.entity_id(), worktree)) + } else { + None + } + }) + .collect::>(); + + self.worktree_indices + .retain(|worktree_id, _| worktrees.contains_key(worktree_id)); + for (worktree_id, worktree) in worktrees { + self.worktree_indices.entry(worktree_id).or_insert_with(|| { + let worktree_index = WorktreeIndex::load( + worktree.clone(), + self.db_connection.clone(), + self.language_registry.clone(), + self.fs.clone(), + self.status_tx.clone(), + self.embedding_provider.clone(), + cx, + ); + + let load_worktree = cx.spawn(|this, mut cx| async move { + let result = match worktree_index.await { + Ok(worktree_index) => { + this.update(&mut cx, |this, _| { + this.worktree_indices.insert( + worktree_id, + WorktreeIndexHandle::Loaded { + index: worktree_index.clone(), + }, + ); + })?; + Ok(worktree_index) + } + Err(error) => { + this.update(&mut cx, |this, _cx| { + this.worktree_indices.remove(&worktree_id) + })?; + Err(Arc::new(error)) + } + }; + + this.update(&mut cx, |this, cx| this.update_status(cx))?; + + result + }); + + WorktreeIndexHandle::Loading { + index: load_worktree.shared(), + } + }); + } + + self.update_status(cx); + } + + fn update_status(&mut self, cx: &mut ModelContext) { + let mut indexing_count = 0; + let mut any_loading = false; + + for index in self.worktree_indices.values_mut() { + match index { + WorktreeIndexHandle::Loading { .. } => { + any_loading = true; + break; + } + WorktreeIndexHandle::Loaded { index, .. } => { + indexing_count += index.read(cx).entry_ids_being_indexed().len(); + } + } + } + + let status = if any_loading { + Status::Loading + } else if let Some(remaining_count) = NonZeroUsize::new(indexing_count) { + Status::Scanning { remaining_count } + } else { + Status::Idle + }; + + if status != self.last_status { + self.last_status = status; + cx.emit(status); + } + } + + pub fn search( + &self, + query: String, + limit: usize, + cx: &AppContext, + ) -> Task>> { + let (chunks_tx, chunks_rx) = channel::bounded(1024); + let mut worktree_scan_tasks = Vec::new(); + for worktree_index in self.worktree_indices.values() { + let worktree_index = worktree_index.clone(); + let chunks_tx = chunks_tx.clone(); + worktree_scan_tasks.push(cx.spawn(|cx| async move { + let index = match worktree_index { + WorktreeIndexHandle::Loading { index } => { + index.clone().await.map_err(|error| anyhow!(error))? + } + WorktreeIndexHandle::Loaded { index } => index.clone(), + }; + + index + .read_with(&cx, |index, cx| { + let worktree_id = index.worktree().read(cx).id(); + let db_connection = index.db_connection().clone(); + let db = *index.embedding_index().db(); + cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + let db_entries = db.iter(&txn).context("failed to iterate database")?; + for db_entry in db_entries { + let (_key, db_embedded_file) = db_entry?; + for chunk in db_embedded_file.chunks { + chunks_tx + .send((worktree_id, db_embedded_file.path.clone(), chunk)) + .await?; + } + } + anyhow::Ok(()) + }) + })? + .await + })); + } + drop(chunks_tx); + + let project = self.project.clone(); + let embedding_provider = self.embedding_provider.clone(); + cx.spawn(|cx| async move { + #[cfg(debug_assertions)] + let embedding_query_start = std::time::Instant::now(); + log::info!("Searching for {query}"); + + let query_embeddings = embedding_provider + .embed(&[TextToEmbed::new(&query)]) + .await?; + let query_embedding = query_embeddings + .into_iter() + .next() + .ok_or_else(|| anyhow!("no embedding for query"))?; + + let mut results_by_worker = Vec::new(); + for _ in 0..cx.background_executor().num_cpus() { + results_by_worker.push(Vec::::new()); + } + + #[cfg(debug_assertions)] + let search_start = std::time::Instant::now(); + + cx.background_executor() + .scoped(|cx| { + for results in results_by_worker.iter_mut() { + cx.spawn(async { + while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { + let score = chunk.embedding.similarity(&query_embedding); + let ix = match results.binary_search_by(|probe| { + score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) + }) { + Ok(ix) | Err(ix) => ix, + }; + results.insert( + ix, + WorktreeSearchResult { + worktree_id, + path: path.clone(), + range: chunk.chunk.range.clone(), + score, + }, + ); + results.truncate(limit); + } + }); + } + }) + .await; + + for scan_task in futures::future::join_all(worktree_scan_tasks).await { + scan_task.log_err(); + } + + project.read_with(&cx, |project, cx| { + let mut search_results = Vec::with_capacity(results_by_worker.len() * limit); + for worker_results in results_by_worker { + search_results.extend(worker_results.into_iter().filter_map(|result| { + Some(SearchResult { + worktree: project.worktree_for_id(result.worktree_id, cx)?, + path: result.path, + range: result.range, + score: result.score, + }) + })); + } + search_results.sort_unstable_by(|a, b| { + b.score.partial_cmp(&a.score).unwrap_or(Ordering::Equal) + }); + search_results.truncate(limit); + + #[cfg(debug_assertions)] + { + let search_elapsed = search_start.elapsed(); + log::debug!( + "searched {} entries in {:?}", + search_results.len(), + search_elapsed + ); + let embedding_query_elapsed = embedding_query_start.elapsed(); + log::debug!("embedding query took {:?}", embedding_query_elapsed); + } + + search_results + }) + }) + } + + #[cfg(test)] + pub fn path_count(&self, cx: &AppContext) -> Result { + let mut result = 0; + for worktree_index in self.worktree_indices.values() { + if let WorktreeIndexHandle::Loaded { index, .. } = worktree_index { + result += index.read(cx).path_count()?; + } + } + Ok(result) + } + + pub(crate) fn worktree_index( + &self, + worktree_id: WorktreeId, + cx: &AppContext, + ) -> Option> { + for index in self.worktree_indices.values() { + if let WorktreeIndexHandle::Loaded { index, .. } = index { + if index.read(cx).worktree().read(cx).id() == worktree_id { + return Some(index.clone()); + } + } + } + None + } + + pub(crate) fn worktree_indices(&self, cx: &AppContext) -> Vec> { + let mut result = self + .worktree_indices + .values() + .filter_map(|index| { + if let WorktreeIndexHandle::Loaded { index, .. } = index { + Some(index.clone()) + } else { + None + } + }) + .collect::>(); + result.sort_by_key(|index| index.read(cx).worktree().read(cx).id()); + result + } + + pub fn all_summaries(&self, cx: &AppContext) -> Task>> { + let (summaries_tx, summaries_rx) = channel::bounded(1024); + let mut worktree_scan_tasks = Vec::new(); + for worktree_index in self.worktree_indices.values() { + let worktree_index = worktree_index.clone(); + let summaries_tx: channel::Sender<(String, String)> = summaries_tx.clone(); + worktree_scan_tasks.push(cx.spawn(|cx| async move { + let index = match worktree_index { + WorktreeIndexHandle::Loading { index } => { + index.clone().await.map_err(|error| anyhow!(error))? + } + WorktreeIndexHandle::Loaded { index } => index.clone(), + }; + + index + .read_with(&cx, |index, cx| { + let db_connection = index.db_connection().clone(); + let summary_index = index.summary_index(); + let file_digest_db = summary_index.file_digest_db(); + let summary_db = summary_index.summary_db(); + + cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create db read transaction")?; + let db_entries = file_digest_db + .iter(&txn) + .context("failed to iterate database")?; + for db_entry in db_entries { + let (file_path, db_file) = db_entry?; + + match summary_db.get(&txn, &db_file.digest) { + Ok(opt_summary) => { + // Currently, we only use summaries we already have. If the file hasn't been + // summarized yet, then we skip it and don't include it in the inferred context. + // If we want to do just-in-time summarization, this would be the place to do it! + if let Some(summary) = opt_summary { + summaries_tx + .send((file_path.to_string(), summary.to_string())) + .await?; + } else { + log::warn!("No summary found for {:?}", &db_file); + } + } + Err(err) => { + log::error!( + "Error reading from summary database: {:?}", + err + ); + } + } + } + anyhow::Ok(()) + }) + })? + .await + })); + } + drop(summaries_tx); + + let project = self.project.clone(); + cx.spawn(|cx| async move { + let mut results_by_worker = Vec::new(); + for _ in 0..cx.background_executor().num_cpus() { + results_by_worker.push(Vec::::new()); + } + + cx.background_executor() + .scoped(|cx| { + for results in results_by_worker.iter_mut() { + cx.spawn(async { + while let Ok((filename, summary)) = summaries_rx.recv().await { + results.push(FileSummary { filename, summary }); + } + }); + } + }) + .await; + + for scan_task in futures::future::join_all(worktree_scan_tasks).await { + scan_task.log_err(); + } + + project.read_with(&cx, |_project, _cx| { + results_by_worker.into_iter().flatten().collect() + }) + }) + } + + /// Empty out the backlogs of all the worktrees in the project + pub fn flush_summary_backlogs(&self, cx: &AppContext) -> impl Future { + let flush_start = std::time::Instant::now(); + + futures::future::join_all(self.worktree_indices.values().map(|worktree_index| { + let worktree_index = worktree_index.clone(); + + cx.spawn(|cx| async move { + let index = match worktree_index { + WorktreeIndexHandle::Loading { index } => { + index.clone().await.map_err(|error| anyhow!(error))? + } + WorktreeIndexHandle::Loaded { index } => index.clone(), + }; + let worktree_abs_path = + cx.update(|cx| index.read(cx).worktree().read(cx).abs_path())?; + + index + .read_with(&cx, |index, cx| { + cx.background_executor() + .spawn(index.summary_index().flush_backlog(worktree_abs_path, cx)) + })? + .await + }) + })) + .map(move |results| { + // Log any errors, but don't block the user. These summaries are supposed to + // improve quality by providing extra context, but they aren't hard requirements! + for result in results { + if let Err(err) = result { + log::error!("Error flushing summary backlog: {:?}", err); + } + } + + log::info!("Summary backlog flushed in {:?}", flush_start.elapsed()); + }) + } + + pub fn remaining_summaries(&self, cx: &mut ModelContext) -> usize { + self.worktree_indices(cx) + .iter() + .map(|index| index.read(cx).summary_index().backlog_len()) + .sum() + } +} + +impl EventEmitter for ProjectIndex {} diff --git a/crates/semantic_index/src/project_index_debug_view.rs b/crates/semantic_index/src/project_index_debug_view.rs index e5881a24e7..d6628064ac 100644 --- a/crates/semantic_index/src/project_index_debug_view.rs +++ b/crates/semantic_index/src/project_index_debug_view.rs @@ -55,8 +55,12 @@ impl ProjectIndexDebugView { for index in worktree_indices { let (root_path, worktree_id, worktree_paths) = index.read_with(&cx, |index, cx| { - let worktree = index.worktree.read(cx); - (worktree.abs_path(), worktree.id(), index.paths(cx)) + let worktree = index.worktree().read(cx); + ( + worktree.abs_path(), + worktree.id(), + index.embedding_index().paths(cx), + ) })?; rows.push(Row::Worktree(root_path)); rows.extend( @@ -82,10 +86,12 @@ impl ProjectIndexDebugView { cx: &mut ViewContext, ) -> Option<()> { let project_index = self.index.read(cx); - let fs = project_index.fs.clone(); + let fs = project_index.fs().clone(); let worktree_index = project_index.worktree_index(worktree_id, cx)?.read(cx); - let root_path = worktree_index.worktree.read(cx).abs_path(); - let chunks = worktree_index.chunks_for_path(file_path.clone(), cx); + let root_path = worktree_index.worktree().read(cx).abs_path(); + let chunks = worktree_index + .embedding_index() + .chunks_for_path(file_path.clone(), cx); cx.spawn(|this, mut cx| async move { let chunks = chunks.await?; diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index fad3a5d3e8..f2b325ead6 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -1,48 +1,35 @@ mod chunking; mod embedding; +mod embedding_index; +mod indexing; +mod project_index; mod project_index_debug_view; +mod summary_backlog; +mod summary_index; +mod worktree_index; + +use anyhow::{Context as _, Result}; +use collections::HashMap; +use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; +use project::Project; +use project_index::ProjectIndex; +use std::{path::PathBuf, sync::Arc}; +use ui::ViewContext; +use workspace::Workspace; -use anyhow::{anyhow, Context as _, Result}; -use chunking::{chunk_text, Chunk}; -use collections::{Bound, HashMap, HashSet}; pub use embedding::*; -use fs::Fs; -use futures::{future::Shared, stream::StreamExt, FutureExt}; -use futures_batch::ChunksTimeoutStreamExt; -use gpui::{ - AppContext, AsyncAppContext, BorrowAppContext, Context, Entity, EntityId, EventEmitter, Global, - Model, ModelContext, Subscription, Task, WeakModel, -}; -use heed::types::{SerdeBincode, Str}; -use language::LanguageRegistry; -use parking_lot::Mutex; -use project::{Entry, Project, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId}; -use serde::{Deserialize, Serialize}; -use smol::channel; -use std::{ - cmp::Ordering, - future::Future, - iter, - num::NonZeroUsize, - ops::Range, - path::{Path, PathBuf}, - sync::{Arc, Weak}, - time::{Duration, SystemTime}, -}; -use util::ResultExt; -use worktree::Snapshot; - pub use project_index_debug_view::ProjectIndexDebugView; +pub use summary_index::FileSummary; -pub struct SemanticIndex { +pub struct SemanticDb { embedding_provider: Arc, db_connection: heed::Env, project_indices: HashMap, Model>, } -impl Global for SemanticIndex {} +impl Global for SemanticDb {} -impl SemanticIndex { +impl SemanticDb { pub async fn new( db_path: PathBuf, embedding_provider: Arc, @@ -62,7 +49,45 @@ impl SemanticIndex { .await .context("opening database connection")?; - Ok(SemanticIndex { + cx.update(|cx| { + cx.observe_new_views( + |workspace: &mut Workspace, cx: &mut ViewContext| { + let project = workspace.project().clone(); + + if cx.has_global::() { + cx.update_global::(|this, cx| { + let project_index = cx.new_model(|cx| { + ProjectIndex::new( + project.clone(), + this.db_connection.clone(), + this.embedding_provider.clone(), + cx, + ) + }); + + let project_weak = project.downgrade(); + this.project_indices + .insert(project_weak.clone(), project_index); + + cx.on_release(move |_, _, cx| { + if cx.has_global::() { + cx.update_global::(|this, _| { + this.project_indices.remove(&project_weak); + }) + } + }) + .detach(); + }) + } else { + log::info!("No SemanticDb, skipping project index") + } + }, + ) + .detach(); + }) + .ok(); + + Ok(SemanticDb { db_connection, embedding_provider, project_indices: HashMap::default(), @@ -72,985 +97,50 @@ impl SemanticIndex { pub fn project_index( &mut self, project: Model, + _cx: &mut AppContext, + ) -> Option> { + self.project_indices.get(&project.downgrade()).cloned() + } + + pub fn remaining_summaries( + &self, + project: &WeakModel, cx: &mut AppContext, - ) -> Model { - let project_weak = project.downgrade(); - project.update(cx, move |_, cx| { - cx.on_release(move |_, cx| { - if cx.has_global::() { - cx.update_global::(|this, _| { - this.project_indices.remove(&project_weak); - }) - } - }) - .detach(); - }); - - self.project_indices - .entry(project.downgrade()) - .or_insert_with(|| { - cx.new_model(|cx| { - ProjectIndex::new( - project, - self.db_connection.clone(), - self.embedding_provider.clone(), - cx, - ) - }) - }) - .clone() - } -} - -pub struct ProjectIndex { - db_connection: heed::Env, - project: WeakModel, - worktree_indices: HashMap, - language_registry: Arc, - fs: Arc, - last_status: Status, - status_tx: channel::Sender<()>, - embedding_provider: Arc, - _maintain_status: Task<()>, - _subscription: Subscription, -} - -#[derive(Clone)] -enum WorktreeIndexHandle { - Loading { - index: Shared, Arc>>>, - }, - Loaded { - index: Model, - }, -} - -impl ProjectIndex { - fn new( - project: Model, - db_connection: heed::Env, - embedding_provider: Arc, - cx: &mut ModelContext, - ) -> Self { - let language_registry = project.read(cx).languages().clone(); - let fs = project.read(cx).fs().clone(); - let (status_tx, mut status_rx) = channel::unbounded(); - let mut this = ProjectIndex { - db_connection, - project: project.downgrade(), - worktree_indices: HashMap::default(), - language_registry, - fs, - status_tx, - last_status: Status::Idle, - embedding_provider, - _subscription: cx.subscribe(&project, Self::handle_project_event), - _maintain_status: cx.spawn(|this, mut cx| async move { - while status_rx.next().await.is_some() { - if this - .update(&mut cx, |this, cx| this.update_status(cx)) - .is_err() - { - break; - } - } - }), - }; - this.update_worktree_indices(cx); - this - } - - pub fn status(&self) -> Status { - self.last_status - } - - pub fn project(&self) -> WeakModel { - self.project.clone() - } - - pub fn fs(&self) -> Arc { - self.fs.clone() - } - - fn handle_project_event( - &mut self, - _: Model, - event: &project::Event, - cx: &mut ModelContext, - ) { - match event { - project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => { - self.update_worktree_indices(cx); - } - _ => {} - } - } - - fn update_worktree_indices(&mut self, cx: &mut ModelContext) { - let Some(project) = self.project.upgrade() else { - return; - }; - - let worktrees = project - .read(cx) - .visible_worktrees(cx) - .filter_map(|worktree| { - if worktree.read(cx).is_local() { - Some((worktree.entity_id(), worktree)) - } else { - None - } - }) - .collect::>(); - - self.worktree_indices - .retain(|worktree_id, _| worktrees.contains_key(worktree_id)); - for (worktree_id, worktree) in worktrees { - self.worktree_indices.entry(worktree_id).or_insert_with(|| { - let worktree_index = WorktreeIndex::load( - worktree.clone(), - self.db_connection.clone(), - self.language_registry.clone(), - self.fs.clone(), - self.status_tx.clone(), - self.embedding_provider.clone(), - cx, - ); - - let load_worktree = cx.spawn(|this, mut cx| async move { - let result = match worktree_index.await { - Ok(worktree_index) => { - this.update(&mut cx, |this, _| { - this.worktree_indices.insert( - worktree_id, - WorktreeIndexHandle::Loaded { - index: worktree_index.clone(), - }, - ); - })?; - Ok(worktree_index) - } - Err(error) => { - this.update(&mut cx, |this, _cx| { - this.worktree_indices.remove(&worktree_id) - })?; - Err(Arc::new(error)) - } - }; - - this.update(&mut cx, |this, cx| this.update_status(cx))?; - - result - }); - - WorktreeIndexHandle::Loading { - index: load_worktree.shared(), - } - }); - } - - self.update_status(cx); - } - - fn update_status(&mut self, cx: &mut ModelContext) { - let mut indexing_count = 0; - let mut any_loading = false; - - for index in self.worktree_indices.values_mut() { - match index { - WorktreeIndexHandle::Loading { .. } => { - any_loading = true; - break; - } - WorktreeIndexHandle::Loaded { index, .. } => { - indexing_count += index.read(cx).entry_ids_being_indexed.len(); - } - } - } - - let status = if any_loading { - Status::Loading - } else if let Some(remaining_count) = NonZeroUsize::new(indexing_count) { - Status::Scanning { remaining_count } - } else { - Status::Idle - }; - - if status != self.last_status { - self.last_status = status; - cx.emit(status); - } - } - - pub fn search( - &self, - query: String, - limit: usize, - cx: &AppContext, - ) -> Task>> { - let (chunks_tx, chunks_rx) = channel::bounded(1024); - let mut worktree_scan_tasks = Vec::new(); - for worktree_index in self.worktree_indices.values() { - let worktree_index = worktree_index.clone(); - let chunks_tx = chunks_tx.clone(); - worktree_scan_tasks.push(cx.spawn(|cx| async move { - let index = match worktree_index { - WorktreeIndexHandle::Loading { index } => { - index.clone().await.map_err(|error| anyhow!(error))? - } - WorktreeIndexHandle::Loaded { index } => index.clone(), - }; - - index - .read_with(&cx, |index, cx| { - let worktree_id = index.worktree.read(cx).id(); - let db_connection = index.db_connection.clone(); - let db = index.db; - cx.background_executor().spawn(async move { - let txn = db_connection - .read_txn() - .context("failed to create read transaction")?; - let db_entries = db.iter(&txn).context("failed to iterate database")?; - for db_entry in db_entries { - let (_key, db_embedded_file) = db_entry?; - for chunk in db_embedded_file.chunks { - chunks_tx - .send((worktree_id, db_embedded_file.path.clone(), chunk)) - .await?; - } - } - anyhow::Ok(()) - }) - })? - .await - })); - } - drop(chunks_tx); - - let project = self.project.clone(); - let embedding_provider = self.embedding_provider.clone(); - cx.spawn(|cx| async move { - #[cfg(debug_assertions)] - let embedding_query_start = std::time::Instant::now(); - log::info!("Searching for {query}"); - - let query_embeddings = embedding_provider - .embed(&[TextToEmbed::new(&query)]) - .await?; - let query_embedding = query_embeddings - .into_iter() - .next() - .ok_or_else(|| anyhow!("no embedding for query"))?; - - let mut results_by_worker = Vec::new(); - for _ in 0..cx.background_executor().num_cpus() { - results_by_worker.push(Vec::::new()); - } - - #[cfg(debug_assertions)] - let search_start = std::time::Instant::now(); - - cx.background_executor() - .scoped(|cx| { - for results in results_by_worker.iter_mut() { - cx.spawn(async { - while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { - let score = chunk.embedding.similarity(&query_embedding); - let ix = match results.binary_search_by(|probe| { - score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) - }) { - Ok(ix) | Err(ix) => ix, - }; - results.insert( - ix, - WorktreeSearchResult { - worktree_id, - path: path.clone(), - range: chunk.chunk.range.clone(), - score, - }, - ); - results.truncate(limit); - } - }); - } - }) - .await; - - for scan_task in futures::future::join_all(worktree_scan_tasks).await { - scan_task.log_err(); - } - - project.read_with(&cx, |project, cx| { - let mut search_results = Vec::with_capacity(results_by_worker.len() * limit); - for worker_results in results_by_worker { - search_results.extend(worker_results.into_iter().filter_map(|result| { - Some(SearchResult { - worktree: project.worktree_for_id(result.worktree_id, cx)?, - path: result.path, - range: result.range, - score: result.score, - }) - })); - } - search_results.sort_unstable_by(|a, b| { - b.score.partial_cmp(&a.score).unwrap_or(Ordering::Equal) - }); - search_results.truncate(limit); - - #[cfg(debug_assertions)] - { - let search_elapsed = search_start.elapsed(); - log::debug!( - "searched {} entries in {:?}", - search_results.len(), - search_elapsed - ); - let embedding_query_elapsed = embedding_query_start.elapsed(); - log::debug!("embedding query took {:?}", embedding_query_elapsed); - } - - search_results + ) -> Option { + self.project_indices.get(project).map(|project_index| { + project_index.update(cx, |project_index, cx| { + project_index.remaining_summaries(cx) }) }) } - - #[cfg(test)] - pub fn path_count(&self, cx: &AppContext) -> Result { - let mut result = 0; - for worktree_index in self.worktree_indices.values() { - if let WorktreeIndexHandle::Loaded { index, .. } = worktree_index { - result += index.read(cx).path_count()?; - } - } - Ok(result) - } - - pub(crate) fn worktree_index( - &self, - worktree_id: WorktreeId, - cx: &AppContext, - ) -> Option> { - for index in self.worktree_indices.values() { - if let WorktreeIndexHandle::Loaded { index, .. } = index { - if index.read(cx).worktree.read(cx).id() == worktree_id { - return Some(index.clone()); - } - } - } - None - } - - pub(crate) fn worktree_indices(&self, cx: &AppContext) -> Vec> { - let mut result = self - .worktree_indices - .values() - .filter_map(|index| { - if let WorktreeIndexHandle::Loaded { index, .. } = index { - Some(index.clone()) - } else { - None - } - }) - .collect::>(); - result.sort_by_key(|index| index.read(cx).worktree.read(cx).id()); - result - } -} - -pub struct SearchResult { - pub worktree: Model, - pub path: Arc, - pub range: Range, - pub score: f32, -} - -pub struct WorktreeSearchResult { - pub worktree_id: WorktreeId, - pub path: Arc, - pub range: Range, - pub score: f32, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub enum Status { - Idle, - Loading, - Scanning { remaining_count: NonZeroUsize }, -} - -impl EventEmitter for ProjectIndex {} - -struct WorktreeIndex { - worktree: Model, - db_connection: heed::Env, - db: heed::Database>, - language_registry: Arc, - fs: Arc, - embedding_provider: Arc, - entry_ids_being_indexed: Arc, - _index_entries: Task>, - _subscription: Subscription, -} - -impl WorktreeIndex { - pub fn load( - worktree: Model, - db_connection: heed::Env, - language_registry: Arc, - fs: Arc, - status_tx: channel::Sender<()>, - embedding_provider: Arc, - cx: &mut AppContext, - ) -> Task>> { - let worktree_abs_path = worktree.read(cx).abs_path(); - cx.spawn(|mut cx| async move { - let db = cx - .background_executor() - .spawn({ - let db_connection = db_connection.clone(); - async move { - let mut txn = db_connection.write_txn()?; - let db_name = worktree_abs_path.to_string_lossy(); - let db = db_connection.create_database(&mut txn, Some(&db_name))?; - txn.commit()?; - anyhow::Ok(db) - } - }) - .await?; - cx.new_model(|cx| { - Self::new( - worktree, - db_connection, - db, - status_tx, - language_registry, - fs, - embedding_provider, - cx, - ) - }) - }) - } - - #[allow(clippy::too_many_arguments)] - fn new( - worktree: Model, - db_connection: heed::Env, - db: heed::Database>, - status: channel::Sender<()>, - language_registry: Arc, - fs: Arc, - embedding_provider: Arc, - cx: &mut ModelContext, - ) -> Self { - let (updated_entries_tx, updated_entries_rx) = channel::unbounded(); - let _subscription = cx.subscribe(&worktree, move |_this, _worktree, event, _cx| { - if let worktree::Event::UpdatedEntries(update) = event { - _ = updated_entries_tx.try_send(update.clone()); - } - }); - - Self { - db_connection, - db, - worktree, - language_registry, - fs, - embedding_provider, - entry_ids_being_indexed: Arc::new(IndexingEntrySet::new(status)), - _index_entries: cx.spawn(|this, cx| Self::index_entries(this, updated_entries_rx, cx)), - _subscription, - } - } - - async fn index_entries( - this: WeakModel, - updated_entries: channel::Receiver, - mut cx: AsyncAppContext, - ) -> Result<()> { - let index = this.update(&mut cx, |this, cx| this.index_entries_changed_on_disk(cx))?; - index.await.log_err(); - - while let Ok(updated_entries) = updated_entries.recv().await { - let index = this.update(&mut cx, |this, cx| { - this.index_updated_entries(updated_entries, cx) - })?; - index.await.log_err(); - } - - Ok(()) - } - - fn index_entries_changed_on_disk(&self, cx: &AppContext) -> impl Future> { - let worktree = self.worktree.read(cx).snapshot(); - let worktree_abs_path = worktree.abs_path().clone(); - let scan = self.scan_entries(worktree, cx); - let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); - let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); - let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); - async move { - futures::try_join!(scan.task, chunk.task, embed.task, persist)?; - Ok(()) - } - } - - fn index_updated_entries( - &self, - updated_entries: UpdatedEntriesSet, - cx: &AppContext, - ) -> impl Future> { - let worktree = self.worktree.read(cx).snapshot(); - let worktree_abs_path = worktree.abs_path().clone(); - let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx); - let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); - let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); - let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); - async move { - futures::try_join!(scan.task, chunk.task, embed.task, persist)?; - Ok(()) - } - } - - fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> ScanEntries { - let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); - let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); - let db_connection = self.db_connection.clone(); - let db = self.db; - let entries_being_indexed = self.entry_ids_being_indexed.clone(); - let task = cx.background_executor().spawn(async move { - let txn = db_connection - .read_txn() - .context("failed to create read transaction")?; - let mut db_entries = db - .iter(&txn) - .context("failed to create iterator")? - .move_between_keys() - .peekable(); - - let mut deletion_range: Option<(Bound<&str>, Bound<&str>)> = None; - for entry in worktree.files(false, 0) { - let entry_db_key = db_key_for_path(&entry.path); - - let mut saved_mtime = None; - while let Some(db_entry) = db_entries.peek() { - match db_entry { - Ok((db_path, db_embedded_file)) => match (*db_path).cmp(&entry_db_key) { - Ordering::Less => { - if let Some(deletion_range) = deletion_range.as_mut() { - deletion_range.1 = Bound::Included(db_path); - } else { - deletion_range = - Some((Bound::Included(db_path), Bound::Included(db_path))); - } - - db_entries.next(); - } - Ordering::Equal => { - if let Some(deletion_range) = deletion_range.take() { - deleted_entry_ranges_tx - .send(( - deletion_range.0.map(ToString::to_string), - deletion_range.1.map(ToString::to_string), - )) - .await?; - } - saved_mtime = db_embedded_file.mtime; - db_entries.next(); - break; - } - Ordering::Greater => { - break; - } - }, - Err(_) => return Err(db_entries.next().unwrap().unwrap_err())?, - } - } - - if entry.mtime != saved_mtime { - let handle = entries_being_indexed.insert(entry.id); - updated_entries_tx.send((entry.clone(), handle)).await?; - } - } - - if let Some(db_entry) = db_entries.next() { - let (db_path, _) = db_entry?; - deleted_entry_ranges_tx - .send((Bound::Included(db_path.to_string()), Bound::Unbounded)) - .await?; - } - - Ok(()) - }); - - ScanEntries { - updated_entries: updated_entries_rx, - deleted_entry_ranges: deleted_entry_ranges_rx, - task, - } - } - - fn scan_updated_entries( - &self, - worktree: Snapshot, - updated_entries: UpdatedEntriesSet, - cx: &AppContext, - ) -> ScanEntries { - let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); - let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); - let entries_being_indexed = self.entry_ids_being_indexed.clone(); - let task = cx.background_executor().spawn(async move { - for (path, entry_id, status) in updated_entries.iter() { - match status { - project::PathChange::Added - | project::PathChange::Updated - | project::PathChange::AddedOrUpdated => { - if let Some(entry) = worktree.entry_for_id(*entry_id) { - if entry.is_file() { - let handle = entries_being_indexed.insert(entry.id); - updated_entries_tx.send((entry.clone(), handle)).await?; - } - } - } - project::PathChange::Removed => { - let db_path = db_key_for_path(path); - deleted_entry_ranges_tx - .send((Bound::Included(db_path.clone()), Bound::Included(db_path))) - .await?; - } - project::PathChange::Loaded => { - // Do nothing. - } - } - } - - Ok(()) - }); - - ScanEntries { - updated_entries: updated_entries_rx, - deleted_entry_ranges: deleted_entry_ranges_rx, - task, - } - } - - fn chunk_files( - &self, - worktree_abs_path: Arc, - entries: channel::Receiver<(Entry, IndexingEntryHandle)>, - cx: &AppContext, - ) -> ChunkFiles { - let language_registry = self.language_registry.clone(); - let fs = self.fs.clone(); - let (chunked_files_tx, chunked_files_rx) = channel::bounded(2048); - let task = cx.spawn(|cx| async move { - cx.background_executor() - .scoped(|cx| { - for _ in 0..cx.num_cpus() { - cx.spawn(async { - while let Ok((entry, handle)) = entries.recv().await { - let entry_abs_path = worktree_abs_path.join(&entry.path); - let Some(text) = fs - .load(&entry_abs_path) - .await - .with_context(|| { - format!("failed to read path {entry_abs_path:?}") - }) - .log_err() - else { - continue; - }; - let language = language_registry - .language_for_file_path(&entry.path) - .await - .ok(); - let chunked_file = ChunkedFile { - chunks: chunk_text(&text, language.as_ref(), &entry.path), - handle, - path: entry.path, - mtime: entry.mtime, - text, - }; - - if chunked_files_tx.send(chunked_file).await.is_err() { - return; - } - } - }); - } - }) - .await; - Ok(()) - }); - - ChunkFiles { - files: chunked_files_rx, - task, - } - } - - fn embed_files( - embedding_provider: Arc, - chunked_files: channel::Receiver, - cx: &AppContext, - ) -> EmbedFiles { - let embedding_provider = embedding_provider.clone(); - let (embedded_files_tx, embedded_files_rx) = channel::bounded(512); - let task = cx.background_executor().spawn(async move { - let mut chunked_file_batches = - chunked_files.chunks_timeout(512, Duration::from_secs(2)); - while let Some(chunked_files) = chunked_file_batches.next().await { - // View the batch of files as a vec of chunks - // Flatten out to a vec of chunks that we can subdivide into batch sized pieces - // Once those are done, reassemble them back into the files in which they belong - // If any embeddings fail for a file, the entire file is discarded - - let chunks: Vec = chunked_files - .iter() - .flat_map(|file| { - file.chunks.iter().map(|chunk| TextToEmbed { - text: &file.text[chunk.range.clone()], - digest: chunk.digest, - }) - }) - .collect::>(); - - let mut embeddings: Vec> = Vec::new(); - for embedding_batch in chunks.chunks(embedding_provider.batch_size()) { - if let Some(batch_embeddings) = - embedding_provider.embed(embedding_batch).await.log_err() - { - if batch_embeddings.len() == embedding_batch.len() { - embeddings.extend(batch_embeddings.into_iter().map(Some)); - continue; - } - log::error!( - "embedding provider returned unexpected embedding count {}, expected {}", - batch_embeddings.len(), embedding_batch.len() - ); - } - - embeddings.extend(iter::repeat(None).take(embedding_batch.len())); - } - - let mut embeddings = embeddings.into_iter(); - for chunked_file in chunked_files { - let mut embedded_file = EmbeddedFile { - path: chunked_file.path, - mtime: chunked_file.mtime, - chunks: Vec::new(), - }; - - let mut embedded_all_chunks = true; - for (chunk, embedding) in - chunked_file.chunks.into_iter().zip(embeddings.by_ref()) - { - if let Some(embedding) = embedding { - embedded_file - .chunks - .push(EmbeddedChunk { chunk, embedding }); - } else { - embedded_all_chunks = false; - } - } - - if embedded_all_chunks { - embedded_files_tx - .send((embedded_file, chunked_file.handle)) - .await?; - } - } - } - Ok(()) - }); - - EmbedFiles { - files: embedded_files_rx, - task, - } - } - - fn persist_embeddings( - &self, - mut deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, - embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, - cx: &AppContext, - ) -> Task> { - let db_connection = self.db_connection.clone(); - let db = self.db; - cx.background_executor().spawn(async move { - while let Some(deletion_range) = deleted_entry_ranges.next().await { - let mut txn = db_connection.write_txn()?; - let start = deletion_range.0.as_ref().map(|start| start.as_str()); - let end = deletion_range.1.as_ref().map(|end| end.as_str()); - log::debug!("deleting embeddings in range {:?}", &(start, end)); - db.delete_range(&mut txn, &(start, end))?; - txn.commit()?; - } - - let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2)); - while let Some(embedded_files) = embedded_files.next().await { - let mut txn = db_connection.write_txn()?; - for (file, _) in &embedded_files { - log::debug!("saving embedding for file {:?}", file.path); - let key = db_key_for_path(&file.path); - db.put(&mut txn, &key, file)?; - } - txn.commit()?; - - drop(embedded_files); - log::debug!("committed"); - } - - Ok(()) - }) - } - - fn paths(&self, cx: &AppContext) -> Task>>> { - let connection = self.db_connection.clone(); - let db = self.db; - cx.background_executor().spawn(async move { - let tx = connection - .read_txn() - .context("failed to create read transaction")?; - let result = db - .iter(&tx)? - .map(|entry| Ok(entry?.1.path.clone())) - .collect::>>>(); - drop(tx); - result - }) - } - - fn chunks_for_path( - &self, - path: Arc, - cx: &AppContext, - ) -> Task>> { - let connection = self.db_connection.clone(); - let db = self.db; - cx.background_executor().spawn(async move { - let tx = connection - .read_txn() - .context("failed to create read transaction")?; - Ok(db - .get(&tx, &db_key_for_path(&path))? - .ok_or_else(|| anyhow!("no such path"))? - .chunks - .clone()) - }) - } - - #[cfg(test)] - fn path_count(&self) -> Result { - let txn = self - .db_connection - .read_txn() - .context("failed to create read transaction")?; - Ok(self.db.len(&txn)?) - } -} - -struct ScanEntries { - updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>, - deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, - task: Task>, -} - -struct ChunkFiles { - files: channel::Receiver, - task: Task>, -} - -struct ChunkedFile { - pub path: Arc, - pub mtime: Option, - pub handle: IndexingEntryHandle, - pub text: String, - pub chunks: Vec, -} - -struct EmbedFiles { - files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, - task: Task>, -} - -#[derive(Debug, Serialize, Deserialize)] -struct EmbeddedFile { - path: Arc, - mtime: Option, - chunks: Vec, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -struct EmbeddedChunk { - chunk: Chunk, - embedding: Embedding, -} - -/// The set of entries that are currently being indexed. -struct IndexingEntrySet { - entry_ids: Mutex>, - tx: channel::Sender<()>, -} - -/// When dropped, removes the entry from the set of entries that are being indexed. -#[derive(Clone)] -struct IndexingEntryHandle { - entry_id: ProjectEntryId, - set: Weak, -} - -impl IndexingEntrySet { - fn new(tx: channel::Sender<()>) -> Self { - Self { - entry_ids: Default::default(), - tx, - } - } - - fn insert(self: &Arc, entry_id: ProjectEntryId) -> IndexingEntryHandle { - self.entry_ids.lock().insert(entry_id); - self.tx.send_blocking(()).ok(); - IndexingEntryHandle { - entry_id, - set: Arc::downgrade(self), - } - } - - pub fn len(&self) -> usize { - self.entry_ids.lock().len() - } -} - -impl Drop for IndexingEntryHandle { - fn drop(&mut self) { - if let Some(set) = self.set.upgrade() { - set.tx.send_blocking(()).ok(); - set.entry_ids.lock().remove(&self.entry_id); - } - } -} - -fn db_key_for_path(path: &Arc) -> String { - path.to_string_lossy().replace('/', "\0") } #[cfg(test)] mod tests { use super::*; + use anyhow::anyhow; + use chunking::Chunk; + use embedding_index::{ChunkedFile, EmbeddingIndex}; + use feature_flags::FeatureFlagAppExt; + use fs::FakeFs; use futures::{future::BoxFuture, FutureExt}; use gpui::TestAppContext; + use indexing::IndexingEntrySet; use language::language_settings::AllLanguageSettings; - use project::Project; + use project::{Project, ProjectEntryId}; + use serde_json::json; use settings::SettingsStore; + use smol::{channel, stream::StreamExt}; use std::{future, path::Path, sync::Arc}; fn init_test(cx: &mut TestAppContext) { + env_logger::try_init().ok(); + cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); language::init(cx); + cx.update_flags(false, vec![]); Project::init_settings(cx); SettingsStore::update(cx, |store, cx| { store.update_user_settings::(cx, |_| {}); @@ -1100,7 +190,7 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); - let mut semantic_index = SemanticIndex::new( + let mut semantic_index = SemanticDb::new( temp_dir.path().into(), Arc::new(TestEmbeddingProvider::new(16, |text| { let mut embedding = vec![0f32; 2]; @@ -1124,26 +214,57 @@ mod tests { .await .unwrap(); - let project_path = Path::new("./fixture"); + let fs = FakeFs::new(cx.executor()); + let project_path = Path::new("/fake_project"); - let project = cx - .spawn(|mut cx| async move { Project::example([project_path], &mut cx).await }) - .await; + fs.insert_tree( + project_path, + json!({ + "fixture": { + "main.rs": include_str!("../fixture/main.rs"), + "needle.md": include_str!("../fixture/needle.md"), + } + }), + ) + .await; + + let project = Project::test(fs, [project_path], cx).await; cx.update(|cx| { let language_registry = project.read(cx).languages().clone(); let node_runtime = project.read(cx).node_runtime().unwrap().clone(); languages::init(language_registry, node_runtime, cx); + + // Manually create and insert the ProjectIndex + let project_index = cx.new_model(|cx| { + ProjectIndex::new( + project.clone(), + semantic_index.db_connection.clone(), + semantic_index.embedding_provider.clone(), + cx, + ) + }); + semantic_index + .project_indices + .insert(project.downgrade(), project_index); }); - let project_index = cx.update(|cx| semantic_index.project_index(project.clone(), cx)); + let project_index = cx + .update(|_cx| { + semantic_index + .project_indices + .get(&project.downgrade()) + .cloned() + }) + .unwrap(); - while project_index - .read_with(cx, |index, cx| index.path_count(cx)) + cx.run_until_parked(); + while cx + .update(|cx| semantic_index.remaining_summaries(&project.downgrade(), cx)) .unwrap() - == 0 + > 0 { - project_index.next_event(cx).await; + cx.run_until_parked(); } let results = cx @@ -1155,7 +276,11 @@ mod tests { .await .unwrap(); - assert!(results.len() > 1, "should have found some results"); + assert!( + results.len() > 1, + "should have found some results, but only found {:?}", + results + ); for result in &results { println!("result: {:?}", result.path); @@ -1165,7 +290,7 @@ mod tests { // Find result that is greater than 0.5 let search_result = results.iter().find(|result| result.score > 0.9).unwrap(); - assert_eq!(search_result.path.to_string_lossy(), "needle.md"); + assert_eq!(search_result.path.to_string_lossy(), "fixture/needle.md"); let content = cx .update(|cx| { @@ -1236,7 +361,7 @@ mod tests { chunked_files_tx.close(); let embed_files_task = - cx.update(|cx| WorktreeIndex::embed_files(provider.clone(), chunked_files_rx, cx)); + cx.update(|cx| EmbeddingIndex::embed_files(provider.clone(), chunked_files_rx, cx)); embed_files_task.task.await.unwrap(); let mut embedded_files_rx = embed_files_task.files; diff --git a/crates/semantic_index/src/summary_backlog.rs b/crates/semantic_index/src/summary_backlog.rs new file mode 100644 index 0000000000..c6d8e33a45 --- /dev/null +++ b/crates/semantic_index/src/summary_backlog.rs @@ -0,0 +1,48 @@ +use collections::HashMap; +use std::{path::Path, sync::Arc, time::SystemTime}; + +const MAX_FILES_BEFORE_RESUMMARIZE: usize = 4; +const MAX_BYTES_BEFORE_RESUMMARIZE: u64 = 1_000_000; // 1 MB + +#[derive(Default, Debug)] +pub struct SummaryBacklog { + /// Key: path to a file that needs summarization, but that we haven't summarized yet. Value: that file's size on disk, in bytes, and its mtime. + files: HashMap, (u64, Option)>, + /// Cache of the sum of all values in `files`, so we don't have to traverse the whole map to check if we're over the byte limit. + total_bytes: u64, +} + +impl SummaryBacklog { + /// Store the given path in the backlog, along with how many bytes are in it. + pub fn insert(&mut self, path: Arc, bytes_on_disk: u64, mtime: Option) { + let (prev_bytes, _) = self + .files + .insert(path, (bytes_on_disk, mtime)) + .unwrap_or_default(); // Default to 0 prev_bytes + + // Update the cached total by subtracting out the old amount and adding the new one. + self.total_bytes = self.total_bytes - prev_bytes + bytes_on_disk; + } + + /// Returns true if the total number of bytes in the backlog exceeds a predefined threshold. + pub fn needs_drain(&self) -> bool { + self.files.len() > MAX_FILES_BEFORE_RESUMMARIZE || + // The whole purpose of the cached total_bytes is to make this comparison cheap. + // Otherwise we'd have to traverse the entire dictionary every time we wanted this answer. + self.total_bytes > MAX_BYTES_BEFORE_RESUMMARIZE + } + + /// Remove all the entries in the backlog and return the file paths as an iterator. + #[allow(clippy::needless_lifetimes)] // Clippy thinks this 'a can be elided, but eliding it gives a compile error + pub fn drain<'a>(&'a mut self) -> impl Iterator, Option)> + 'a { + self.total_bytes = 0; + + self.files + .drain() + .map(|(path, (_size, mtime))| (path, mtime)) + } + + pub fn len(&self) -> usize { + self.files.len() + } +} diff --git a/crates/semantic_index/src/summary_index.rs b/crates/semantic_index/src/summary_index.rs new file mode 100644 index 0000000000..08f25ae028 --- /dev/null +++ b/crates/semantic_index/src/summary_index.rs @@ -0,0 +1,693 @@ +use anyhow::{anyhow, Context as _, Result}; +use arrayvec::ArrayString; +use fs::Fs; +use futures::{stream::StreamExt, TryFutureExt}; +use futures_batch::ChunksTimeoutStreamExt; +use gpui::{AppContext, Model, Task}; +use heed::{ + types::{SerdeBincode, Str}, + RoTxn, +}; +use language_model::{ + LanguageModelCompletionEvent, LanguageModelId, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, +}; +use log; +use parking_lot::Mutex; +use project::{Entry, UpdatedEntriesSet, Worktree}; +use serde::{Deserialize, Serialize}; +use smol::channel; +use std::{ + future::Future, + path::Path, + sync::Arc, + time::{Duration, Instant, SystemTime}, +}; +use util::ResultExt; +use worktree::Snapshot; + +use crate::{indexing::IndexingEntrySet, summary_backlog::SummaryBacklog}; + +#[derive(Serialize, Deserialize, Debug)] +pub struct FileSummary { + pub filename: String, + pub summary: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct UnsummarizedFile { + // Path to the file on disk + path: Arc, + // The mtime of the file on disk + mtime: Option, + // BLAKE3 hash of the source file's contents + digest: Blake3Digest, + // The source file's contents + contents: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct SummarizedFile { + // Path to the file on disk + path: String, + // The mtime of the file on disk + mtime: Option, + // BLAKE3 hash of the source file's contents + digest: Blake3Digest, + // The LLM's summary of the file's contents + summary: String, +} + +/// This is what blake3's to_hex() method returns - see https://docs.rs/blake3/1.5.3/src/blake3/lib.rs.html#246 +pub type Blake3Digest = ArrayString<{ blake3::OUT_LEN * 2 }>; + +#[derive(Debug, Serialize, Deserialize)] +pub struct FileDigest { + pub mtime: Option, + pub digest: Blake3Digest, +} + +struct NeedsSummary { + files: channel::Receiver, + task: Task>, +} + +struct SummarizeFiles { + files: channel::Receiver, + task: Task>, +} + +pub struct SummaryIndex { + worktree: Model, + fs: Arc, + db_connection: heed::Env, + file_digest_db: heed::Database>, // Key: file path. Val: BLAKE3 digest of its contents. + summary_db: heed::Database, Str>, // Key: BLAKE3 digest of a file's contents. Val: LLM summary of those contents. + backlog: Arc>, + _entry_ids_being_indexed: Arc, // TODO can this be removed? +} + +struct Backlogged { + paths_to_digest: channel::Receiver, Option)>>, + task: Task>, +} + +struct MightNeedSummaryFiles { + files: channel::Receiver, + task: Task>, +} + +impl SummaryIndex { + pub fn new( + worktree: Model, + fs: Arc, + db_connection: heed::Env, + file_digest_db: heed::Database>, + summary_db: heed::Database, Str>, + _entry_ids_being_indexed: Arc, + ) -> Self { + Self { + worktree, + fs, + db_connection, + file_digest_db, + summary_db, + _entry_ids_being_indexed, + backlog: Default::default(), + } + } + + pub fn file_digest_db(&self) -> heed::Database> { + self.file_digest_db + } + + pub fn summary_db(&self) -> heed::Database, Str> { + self.summary_db + } + + pub fn index_entries_changed_on_disk( + &self, + is_auto_available: bool, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let backlogged; + let digest; + let needs_summary; + let summaries; + let persist; + + if is_auto_available { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + + backlogged = self.scan_entries(worktree, cx); + digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx); + needs_summary = self.check_summary_cache(digest.files, cx); + summaries = self.summarize_files(needs_summary.files, cx); + persist = self.persist_summaries(summaries.files, cx); + } else { + // This feature is only staff-shipped, so make the rest of these no-ops. + backlogged = Backlogged { + paths_to_digest: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + digest = MightNeedSummaryFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + needs_summary = NeedsSummary { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + summaries = SummarizeFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + persist = Task::ready(Ok(())); + } + + async move { + futures::try_join!( + backlogged.task, + digest.task, + needs_summary.task, + summaries.task, + persist + )?; + + if is_auto_available { + log::info!( + "Summarizing everything that changed on disk took {:?}", + start.elapsed() + ); + } + + Ok(()) + } + } + + pub fn index_updated_entries( + &mut self, + updated_entries: UpdatedEntriesSet, + is_auto_available: bool, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let backlogged; + let digest; + let needs_summary; + let summaries; + let persist; + + if is_auto_available { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + + backlogged = self.scan_updated_entries(worktree, updated_entries.clone(), cx); + digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx); + needs_summary = self.check_summary_cache(digest.files, cx); + summaries = self.summarize_files(needs_summary.files, cx); + persist = self.persist_summaries(summaries.files, cx); + } else { + // This feature is only staff-shipped, so make the rest of these no-ops. + backlogged = Backlogged { + paths_to_digest: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + digest = MightNeedSummaryFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + needs_summary = NeedsSummary { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + summaries = SummarizeFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + persist = Task::ready(Ok(())); + } + + async move { + futures::try_join!( + backlogged.task, + digest.task, + needs_summary.task, + summaries.task, + persist + )?; + + log::info!("Summarizing updated entries took {:?}", start.elapsed()); + + Ok(()) + } + } + + fn check_summary_cache( + &self, + mut might_need_summary: channel::Receiver, + cx: &AppContext, + ) -> NeedsSummary { + let db_connection = self.db_connection.clone(); + let db = self.summary_db; + let (needs_summary_tx, needs_summary_rx) = channel::bounded(512); + let task = cx.background_executor().spawn(async move { + while let Some(file) = might_need_summary.next().await { + let tx = db_connection + .read_txn() + .context("Failed to create read transaction for checking which hashes are in summary cache")?; + + match db.get(&tx, &file.digest) { + Ok(opt_answer) => { + if opt_answer.is_none() { + // It's not in the summary cache db, so we need to summarize it. + log::debug!("File {:?} (digest {:?}) was NOT in the db cache and needs to be resummarized.", file.path.display(), &file.digest); + needs_summary_tx.send(file).await?; + } else { + log::debug!("File {:?} (digest {:?}) was in the db cache and does not need to be resummarized.", file.path.display(), &file.digest); + } + } + Err(err) => { + log::error!("Reading from the summaries database failed: {:?}", err); + } + } + } + + Ok(()) + }); + + NeedsSummary { + files: needs_summary_rx, + task, + } + } + + fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> Backlogged { + let (tx, rx) = channel::bounded(512); + let db_connection = self.db_connection.clone(); + let digest_db = self.file_digest_db; + let backlog = Arc::clone(&self.backlog); + let task = cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + + for entry in worktree.files(false, 0) { + let needs_summary = + Self::add_to_backlog(Arc::clone(&backlog), digest_db, &txn, entry); + + if !needs_summary.is_empty() { + tx.send(needs_summary).await?; + } + } + + // TODO delete db entries for deleted files + + Ok(()) + }); + + Backlogged { + paths_to_digest: rx, + task, + } + } + + fn add_to_backlog( + backlog: Arc>, + digest_db: heed::Database>, + txn: &RoTxn<'_>, + entry: &Entry, + ) -> Vec<(Arc, Option)> { + let entry_db_key = db_key_for_path(&entry.path); + + match digest_db.get(&txn, &entry_db_key) { + Ok(opt_saved_digest) => { + // The file path is the same, but the mtime is different. (Or there was no mtime.) + // It needs updating, so add it to the backlog! Then, if the backlog is full, drain it and summarize its contents. + if entry.mtime != opt_saved_digest.and_then(|digest| digest.mtime) { + let mut backlog = backlog.lock(); + + log::info!( + "Inserting {:?} ({:?} bytes) into backlog", + &entry.path, + entry.size, + ); + backlog.insert(Arc::clone(&entry.path), entry.size, entry.mtime); + + if backlog.needs_drain() { + log::info!("Draining summary backlog..."); + return backlog.drain().collect(); + } + } + } + Err(err) => { + log::error!( + "Error trying to get file digest db entry {:?}: {:?}", + &entry_db_key, + err + ); + } + } + + Vec::new() + } + + fn scan_updated_entries( + &self, + worktree: Snapshot, + updated_entries: UpdatedEntriesSet, + cx: &AppContext, + ) -> Backlogged { + log::info!("Scanning for updated entries that might need summarization..."); + let (tx, rx) = channel::bounded(512); + // let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); + let db_connection = self.db_connection.clone(); + let digest_db = self.file_digest_db; + let backlog = Arc::clone(&self.backlog); + let task = cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + + for (path, entry_id, status) in updated_entries.iter() { + match status { + project::PathChange::Loaded + | project::PathChange::Added + | project::PathChange::Updated + | project::PathChange::AddedOrUpdated => { + if let Some(entry) = worktree.entry_for_id(*entry_id) { + if entry.is_file() { + let needs_summary = Self::add_to_backlog( + Arc::clone(&backlog), + digest_db, + &txn, + entry, + ); + + if !needs_summary.is_empty() { + tx.send(needs_summary).await?; + } + } + } + } + project::PathChange::Removed => { + let _db_path = db_key_for_path(path); + // TODO delete db entries for deleted files + // deleted_entry_ranges_tx + // .send((Bound::Included(db_path.clone()), Bound::Included(db_path))) + // .await?; + } + } + } + + Ok(()) + }); + + Backlogged { + paths_to_digest: rx, + // deleted_entry_ranges: deleted_entry_ranges_rx, + task, + } + } + + fn digest_files( + &self, + paths: channel::Receiver, Option)>>, + worktree_abs_path: Arc, + cx: &AppContext, + ) -> MightNeedSummaryFiles { + let fs = self.fs.clone(); + let (rx, tx) = channel::bounded(2048); + let task = cx.spawn(|cx| async move { + cx.background_executor() + .scoped(|cx| { + for _ in 0..cx.num_cpus() { + cx.spawn(async { + while let Ok(pairs) = paths.recv().await { + // Note: we could process all these files concurrently if desired. Might or might not speed things up. + for (path, mtime) in pairs { + let entry_abs_path = worktree_abs_path.join(&path); + + // Load the file's contents and compute its hash digest. + let unsummarized_file = { + let Some(contents) = fs + .load(&entry_abs_path) + .await + .with_context(|| { + format!("failed to read path {entry_abs_path:?}") + }) + .log_err() + else { + continue; + }; + + let digest = { + let mut hasher = blake3::Hasher::new(); + // Incorporate both the (relative) file path as well as the contents of the file into the hash. + // This is because in some languages and frameworks, identical files can do different things + // depending on their paths (e.g. Rails controllers). It's also why we send the path to the model. + hasher.update(path.display().to_string().as_bytes()); + hasher.update(contents.as_bytes()); + hasher.finalize().to_hex() + }; + + UnsummarizedFile { + digest, + contents, + path, + mtime, + } + }; + + if let Err(err) = rx + .send(unsummarized_file) + .map_err(|error| anyhow!(error)) + .await + { + log::error!("Error: {:?}", err); + + return; + } + } + } + }); + } + }) + .await; + Ok(()) + }); + + MightNeedSummaryFiles { files: tx, task } + } + + fn summarize_files( + &self, + mut unsummarized_files: channel::Receiver, + cx: &AppContext, + ) -> SummarizeFiles { + let (summarized_tx, summarized_rx) = channel::bounded(512); + let task = cx.spawn(|cx| async move { + while let Some(file) = unsummarized_files.next().await { + log::debug!("Summarizing {:?}", file); + let summary = cx + .update(|cx| Self::summarize_code(&file.contents, &file.path, cx))? + .await + .unwrap_or_else(|err| { + // Log a warning because we'll continue anyway. + // In the future, we may want to try splitting it up into multiple requests and concatenating the summaries, + // but this might give bad summaries due to cutting off source code files in the middle. + log::warn!("Failed to summarize {} - {:?}", file.path.display(), err); + + String::new() + }); + + // Note that the summary could be empty because of an error talking to a cloud provider, + // e.g. because the context limit was exceeded. In that case, we return Ok(String::new()). + if !summary.is_empty() { + summarized_tx + .send(SummarizedFile { + path: file.path.display().to_string(), + digest: file.digest, + summary, + mtime: file.mtime, + }) + .await? + } + } + + Ok(()) + }); + + SummarizeFiles { + files: summarized_rx, + task, + } + } + + fn summarize_code( + code: &str, + path: &Path, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let (summary_model_id, use_cache): (LanguageModelId, bool) = ( + "Qwen/Qwen2-7B-Instruct".to_string().into(), // TODO read this from the user's settings. + false, // qwen2 doesn't have a cache, but we should probably infer this from the model + ); + let Some(model) = LanguageModelRegistry::read_global(cx) + .available_models(cx) + .find(|model| &model.id() == &summary_model_id) + else { + return cx.background_executor().spawn(async move { + Err(anyhow!("Couldn't find the preferred summarization model ({:?}) in the language registry's available models", summary_model_id)) + }); + }; + let utf8_path = path.to_string_lossy(); + const PROMPT_BEFORE_CODE: &str = "Summarize what the code in this file does in 3 sentences, using no newlines or bullet points in the summary:"; + let prompt = format!("{PROMPT_BEFORE_CODE}\n{utf8_path}:\n{code}"); + + log::debug!( + "Summarizing code by sending this prompt to {:?}: {:?}", + model.name(), + &prompt + ); + + let request = LanguageModelRequest { + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + cache: use_cache, + }], + tools: Vec::new(), + stop: Vec::new(), + temperature: 1.0, + }; + + let code_len = code.len(); + cx.spawn(|cx| async move { + let stream = model.stream_completion(request, &cx); + cx.background_executor() + .spawn(async move { + let answer: String = stream + .await? + .filter_map(|event| async { + if let Ok(LanguageModelCompletionEvent::Text(text)) = event { + Some(text) + } else { + None + } + }) + .collect() + .await; + + log::info!( + "It took {:?} to summarize {:?} bytes of code.", + start.elapsed(), + code_len + ); + + log::debug!("Summary was: {:?}", &answer); + + Ok(answer) + }) + .await + + // TODO if summarization failed, put it back in the backlog! + }) + } + + fn persist_summaries( + &self, + summaries: channel::Receiver, + cx: &AppContext, + ) -> Task> { + let db_connection = self.db_connection.clone(); + let digest_db = self.file_digest_db; + let summary_db = self.summary_db; + cx.background_executor().spawn(async move { + let mut summaries = summaries.chunks_timeout(4096, Duration::from_secs(2)); + while let Some(summaries) = summaries.next().await { + let mut txn = db_connection.write_txn()?; + for file in &summaries { + log::debug!( + "Saving summary of {:?} - which is {} bytes of summary for content digest {:?}", + &file.path, + file.summary.len(), + file.digest + ); + digest_db.put( + &mut txn, + &file.path, + &FileDigest { + mtime: file.mtime, + digest: file.digest, + }, + )?; + summary_db.put(&mut txn, &file.digest, &file.summary)?; + } + txn.commit()?; + + drop(summaries); + log::debug!("committed summaries"); + } + + Ok(()) + }) + } + + /// Empty out the backlog of files that haven't been resummarized, and resummarize them immediately. + pub(crate) fn flush_backlog( + &self, + worktree_abs_path: Arc, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let backlogged = { + let (tx, rx) = channel::bounded(512); + let needs_summary: Vec<(Arc, Option)> = { + let mut backlog = self.backlog.lock(); + + backlog.drain().collect() + }; + + let task = cx.background_executor().spawn(async move { + tx.send(needs_summary).await?; + Ok(()) + }); + + Backlogged { + paths_to_digest: rx, + task, + } + }; + + let digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx); + let needs_summary = self.check_summary_cache(digest.files, cx); + let summaries = self.summarize_files(needs_summary.files, cx); + let persist = self.persist_summaries(summaries.files, cx); + + async move { + futures::try_join!( + backlogged.task, + digest.task, + needs_summary.task, + summaries.task, + persist + )?; + + log::info!("Summarizing backlogged entries took {:?}", start.elapsed()); + + Ok(()) + } + } + + pub(crate) fn backlog_len(&self) -> usize { + self.backlog.lock().len() + } +} + +fn db_key_for_path(path: &Arc) -> String { + path.to_string_lossy().replace('/', "\0") +} diff --git a/crates/semantic_index/src/worktree_index.rs b/crates/semantic_index/src/worktree_index.rs new file mode 100644 index 0000000000..7ca5a49619 --- /dev/null +++ b/crates/semantic_index/src/worktree_index.rs @@ -0,0 +1,217 @@ +use crate::embedding::EmbeddingProvider; +use crate::embedding_index::EmbeddingIndex; +use crate::indexing::IndexingEntrySet; +use crate::summary_index::SummaryIndex; +use anyhow::Result; +use feature_flags::{AutoCommand, FeatureFlagAppExt}; +use fs::Fs; +use futures::future::Shared; +use gpui::{ + AppContext, AsyncAppContext, Context, Model, ModelContext, Subscription, Task, WeakModel, +}; +use language::LanguageRegistry; +use log; +use project::{UpdatedEntriesSet, Worktree}; +use smol::channel; +use std::sync::Arc; +use util::ResultExt; + +#[derive(Clone)] +pub enum WorktreeIndexHandle { + Loading { + index: Shared, Arc>>>, + }, + Loaded { + index: Model, + }, +} + +pub struct WorktreeIndex { + worktree: Model, + db_connection: heed::Env, + embedding_index: EmbeddingIndex, + summary_index: SummaryIndex, + entry_ids_being_indexed: Arc, + _index_entries: Task>, + _subscription: Subscription, +} + +impl WorktreeIndex { + pub fn load( + worktree: Model, + db_connection: heed::Env, + language_registry: Arc, + fs: Arc, + status_tx: channel::Sender<()>, + embedding_provider: Arc, + cx: &mut AppContext, + ) -> Task>> { + let worktree_for_index = worktree.clone(); + let worktree_for_summary = worktree.clone(); + let worktree_abs_path = worktree.read(cx).abs_path(); + let embedding_fs = Arc::clone(&fs); + let summary_fs = fs; + cx.spawn(|mut cx| async move { + let entries_being_indexed = Arc::new(IndexingEntrySet::new(status_tx)); + let (embedding_index, summary_index) = cx + .background_executor() + .spawn({ + let entries_being_indexed = Arc::clone(&entries_being_indexed); + let db_connection = db_connection.clone(); + async move { + let mut txn = db_connection.write_txn()?; + let embedding_index = { + let db_name = worktree_abs_path.to_string_lossy(); + let db = db_connection.create_database(&mut txn, Some(&db_name))?; + + EmbeddingIndex::new( + worktree_for_index, + embedding_fs, + db_connection.clone(), + db, + language_registry, + embedding_provider, + Arc::clone(&entries_being_indexed), + ) + }; + let summary_index = { + let file_digest_db = { + let db_name = + // Prepend something that wouldn't be found at the beginning of an + // absolute path, so we don't get db key namespace conflicts with + // embeddings, which use the abs path as a key. + format!("digests-{}", worktree_abs_path.to_string_lossy()); + db_connection.create_database(&mut txn, Some(&db_name))? + }; + let summary_db = { + let db_name = + // Prepend something that wouldn't be found at the beginning of an + // absolute path, so we don't get db key namespace conflicts with + // embeddings, which use the abs path as a key. + format!("summaries-{}", worktree_abs_path.to_string_lossy()); + db_connection.create_database(&mut txn, Some(&db_name))? + }; + SummaryIndex::new( + worktree_for_summary, + summary_fs, + db_connection.clone(), + file_digest_db, + summary_db, + Arc::clone(&entries_being_indexed), + ) + }; + txn.commit()?; + anyhow::Ok((embedding_index, summary_index)) + } + }) + .await?; + + cx.new_model(|cx| { + Self::new( + worktree, + db_connection, + embedding_index, + summary_index, + entries_being_indexed, + cx, + ) + }) + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn new( + worktree: Model, + db_connection: heed::Env, + embedding_index: EmbeddingIndex, + summary_index: SummaryIndex, + entry_ids_being_indexed: Arc, + cx: &mut ModelContext, + ) -> Self { + let (updated_entries_tx, updated_entries_rx) = channel::unbounded(); + let _subscription = cx.subscribe(&worktree, move |_this, _worktree, event, _cx| { + if let worktree::Event::UpdatedEntries(update) = event { + log::debug!("Updating entries..."); + _ = updated_entries_tx.try_send(update.clone()); + } + }); + + Self { + db_connection, + embedding_index, + summary_index, + worktree, + entry_ids_being_indexed, + _index_entries: cx.spawn(|this, cx| Self::index_entries(this, updated_entries_rx, cx)), + _subscription, + } + } + + pub fn entry_ids_being_indexed(&self) -> &IndexingEntrySet { + self.entry_ids_being_indexed.as_ref() + } + + pub fn worktree(&self) -> &Model { + &self.worktree + } + + pub fn db_connection(&self) -> &heed::Env { + &self.db_connection + } + + pub fn embedding_index(&self) -> &EmbeddingIndex { + &self.embedding_index + } + + pub fn summary_index(&self) -> &SummaryIndex { + &self.summary_index + } + + async fn index_entries( + this: WeakModel, + updated_entries: channel::Receiver, + mut cx: AsyncAppContext, + ) -> Result<()> { + let is_auto_available = cx.update(|cx| cx.wait_for_flag::())?.await; + let index = this.update(&mut cx, |this, cx| { + futures::future::try_join( + this.embedding_index.index_entries_changed_on_disk(cx), + this.summary_index + .index_entries_changed_on_disk(is_auto_available, cx), + ) + })?; + index.await.log_err(); + + while let Ok(updated_entries) = updated_entries.recv().await { + let is_auto_available = cx + .update(|cx| cx.has_flag::()) + .unwrap_or(false); + + let index = this.update(&mut cx, |this, cx| { + futures::future::try_join( + this.embedding_index + .index_updated_entries(updated_entries.clone(), cx), + this.summary_index.index_updated_entries( + updated_entries, + is_auto_available, + cx, + ), + ) + })?; + index.await.log_err(); + } + + Ok(()) + } + + #[cfg(test)] + pub fn path_count(&self) -> Result { + use anyhow::Context; + + let txn = self + .db_connection + .read_txn() + .context("failed to create read transaction")?; + Ok(self.embedding_index().db().len(&txn)?) + } +} diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index c6e64deb59..584524a1d7 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -3227,6 +3227,8 @@ pub struct Entry { pub git_status: Option, /// Whether this entry is considered to be a `.env` file. pub is_private: bool, + /// The entry's size on disk, in bytes. + pub size: u64, pub char_bag: CharBag, pub is_fifo: bool, } @@ -3282,6 +3284,7 @@ impl Entry { path, inode: metadata.inode, mtime: Some(metadata.mtime), + size: metadata.len, canonical_path, is_symlink: metadata.is_symlink, is_ignored: false, @@ -5210,6 +5213,7 @@ impl<'a> From<&'a Entry> for proto::Entry { is_external: entry.is_external, git_status: entry.git_status.map(git_status_to_proto), is_fifo: entry.is_fifo, + size: Some(entry.size), } } } @@ -5231,6 +5235,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry { path, inode: entry.inode, mtime: entry.mtime.map(|time| time.into()), + size: entry.size.unwrap_or(0), canonical_path: None, is_ignored: entry.is_ignored, is_external: entry.is_external, From de344c833bf5b61641eb0057127d17ac830f74b4 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 13 Sep 2024 14:49:50 -0400 Subject: [PATCH 073/270] zed_extension_api: Use v0.2.0 WIT types (#17802) This PR makes `zed_extension_api` use the WIT types from v0.2.0 of extension API. A follow-up from #17795, since I had forgotten to do it there. Release Notes: - N/A --- crates/extension_api/src/extension_api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index f66c6ef59f..f0b5baab9a 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -186,7 +186,7 @@ mod wit { wit_bindgen::generate!({ skip: ["init-extension"], - path: "./wit/since_v0.1.0", + path: "./wit/since_v0.2.0", }); } From 8f833ea0294448c8b7e9e6a09541fa9537867e65 Mon Sep 17 00:00:00 2001 From: Barry Penner <34104395+skytwosea@users.noreply.github.com> Date: Fri, 13 Sep 2024 14:51:14 -0400 Subject: [PATCH 074/270] Fix missing on-mouseup when dragging the window on Linux (#17801) Zed Hackathon entry :D Release Notes: - Fixed a bug where Zed would initiate a window move and then refuse to release the mouse. Co-authored-by: Mikayla --- crates/title_bar/src/title_bar.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index fd3f01e5f7..e2d45a923b 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -182,6 +182,12 @@ impl Render for TitleBar { .on_mouse_down_out(cx.listener(move |this, _ev, _cx| { this.should_move = false; })) + .on_mouse_up( + gpui::MouseButton::Left, + cx.listener(move |this, _ev, _cx| { + this.should_move = false; + }), + ) .on_mouse_down( gpui::MouseButton::Left, cx.listener(move |this, _ev, _cx| { From adbe973f02b2e4cfe0645657ca10681a58f333a0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 13 Sep 2024 15:11:10 -0400 Subject: [PATCH 075/270] editor: In OpenFile check if file with path_suffix exists (#17805) Demo: https://github.com/user-attachments/assets/6acb6c1e-bb15-4205-9dcb-2aa4bb99dcf9 Release Notes: - When using `OpenFile` (`gf` in Vim mode) and the word under the cursor is not an existing file path, we now fall back and additionally check whether a file called `.` exists. That's similar to Vim's `suffixesadd` option. --------- Co-authored-by: Abdelhakim Qbaich Co-authored-by: Pete LeVasseur --- crates/editor/src/hover_links.rs | 69 +++++++++++++++++++++++++++----- crates/language/src/language.rs | 4 ++ crates/vim/src/command.rs | 19 +++++++++ 3 files changed, 83 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 86c17625e1..3f590273df 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -713,17 +713,42 @@ pub(crate) async fn find_file( cx: &mut AsyncWindowContext, ) -> Option<(Range, ResolvedPath)> { let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()).ok()?; - + let scope = snapshot.language_scope_at(position); let (range, candidate_file_path) = surrounding_filename(snapshot, position)?; - let existing_path = project - .update(cx, |project, cx| { - project.resolve_existing_file_path(&candidate_file_path, buffer, cx) - }) - .ok()? - .await?; + async fn check_path( + candidate_file_path: &str, + project: &Model, + buffer: &Model, + cx: &mut AsyncWindowContext, + ) -> Option { + project + .update(cx, |project, cx| { + project.resolve_existing_file_path(&candidate_file_path, buffer, cx) + }) + .ok()? + .await + } - Some((range, existing_path)) + if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await { + return Some((range, existing_path)); + } + + if let Some(scope) = scope { + for suffix in scope.path_suffixes() { + if candidate_file_path.ends_with(format!(".{suffix}").as_str()) { + continue; + } + + let suffixed_candidate = format!("{candidate_file_path}.{suffix}"); + if let Some(existing_path) = check_path(&suffixed_candidate, &project, buffer, cx).await + { + return Some((range, existing_path)); + } + } + } + + None } fn surrounding_filename( @@ -1490,7 +1515,8 @@ mod tests { You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. - Or go to /root/dir/file2.rs if project is local.ˇ + Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file.ˇ "}); // File does not exist @@ -1499,6 +1525,7 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); // No highlight @@ -1517,6 +1544,7 @@ mod tests { Go to fˇile2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); @@ -1525,6 +1553,7 @@ mod tests { Go to «file2.rsˇ» if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); // Moving the mouse over a relative path that does exist should highlight it @@ -1533,6 +1562,7 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/fˇile2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); @@ -1541,6 +1571,7 @@ mod tests { Go to file2.rs if you want. Or go to «../dir/file2.rsˇ» if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); // Moving the mouse over an absolute path that does exist should highlight it @@ -1549,6 +1580,7 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/diˇr/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); @@ -1557,6 +1589,25 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to «/root/dir/file2.rsˇ» if project is local. + Or go to /root/dir/file2 if this is a Rust file. + "}); + + // Moving the mouse over a path that exists, if we add the language-specific suffix, it should highlight it + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to /root/dir/file2.rs if project is local. + Or go to /root/diˇr/file2 if this is a Rust file. + "}); + + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights::(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to /root/dir/file2.rs if project is local. + Or go to «/root/dir/file2ˇ» if this is a Rust file. "}); cx.simulate_click(screen_coord, Modifiers::secondary_key()); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index cd39490d0b..3112d88aa5 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1410,6 +1410,10 @@ impl Language { } impl LanguageScope { + pub fn path_suffixes(&self) -> &[String] { + &self.language.path_suffixes() + } + pub fn language_name(&self) -> LanguageName { self.language.config.name.clone() } diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 06c5f0bd3f..67a674afa6 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -969,6 +969,9 @@ mod test { fs.as_fake() .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) .await; + fs.as_fake() + .insert_file("/root/dir/file3.rs", "go to file3".as_bytes().to_vec()) + .await; // Put the path to the second file into the currently open buffer cx.set_state(indoc! {"go to fiˇle2.rs"}, Mode::Normal); @@ -981,5 +984,21 @@ mod test { cx.workspace(|workspace, cx| { assert_active_item(workspace, "/root/dir/file2.rs", "This is file2.rs", cx); }); + + // Update editor to point to `file2.rs` + cx.editor = cx.workspace(|workspace, cx| workspace.active_item_as::(cx).unwrap()); + + // Put the path to the third file into the currently open buffer, + // but remove its suffix, because we want that lookup to happen automatically. + cx.set_state(indoc! {"go to fiˇle3"}, Mode::Normal); + + // Go to file3.rs + cx.simulate_keystrokes("g f"); + + // We now have three items + cx.workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 3)); + cx.workspace(|workspace, cx| { + assert_active_item(workspace, "/root/dir/file3.rs", "go to file3", cx); + }); } } From 1b36c62188884ae5fe5d5d3b07d36cb2ee730469 Mon Sep 17 00:00:00 2001 From: tepek2 Date: Fri, 13 Sep 2024 21:17:01 +0200 Subject: [PATCH 076/270] Add keybinding to swap pane items (#15583) - Rearrange tabs (left: `ctrl-shift-pageup`, right: `ctrl-shift-pagedown`) like Chrome Co-authored-by: Peter Tripp --- assets/keymaps/default-linux.json | 2 + assets/keymaps/default-macos.json | 2 + crates/workspace/src/pane.rs | 24 +++++++++ docs/src/key-bindings.md | 82 ++++++++++++++++--------------- 4 files changed, 70 insertions(+), 40 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 3c627d7803..bb5673dde6 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -245,6 +245,8 @@ "bindings": { "ctrl-pageup": "pane::ActivatePrevItem", "ctrl-pagedown": "pane::ActivateNextItem", + "ctrl-shift-pageup": "pane::SwapItemLeft", + "ctrl-shift-pagedown": "pane::SwapItemRight", "ctrl-w": "pane::CloseActiveItem", "ctrl-f4": "pane::CloseActiveItem", "alt-ctrl-t": "pane::CloseInactiveItems", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index ed6ece0556..9a0c08c3dc 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -285,6 +285,8 @@ "cmd-}": "pane::ActivateNextItem", "alt-cmd-left": "pane::ActivatePrevItem", "alt-cmd-right": "pane::ActivateNextItem", + "ctrl-shift-pageup": "pane::SwapItemLeft", + "ctrl-shift-pagedown": "pane::SwapItemRight", "cmd-w": "pane::CloseActiveItem", "alt-cmd-t": "pane::CloseInactiveItems", "ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes", diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index d0fa411381..09b4683c0c 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -158,6 +158,8 @@ actions!( SplitDown, SplitHorizontal, SplitVertical, + SwapItemLeft, + SwapItemRight, TogglePreviewTab, TogglePinTab, ] @@ -1054,6 +1056,26 @@ impl Pane { self.activate_item(index, activate_pane, activate_pane, cx); } + pub fn swap_item_left(&mut self, cx: &mut ViewContext) { + let index = self.active_item_index; + if index == 0 { + return; + } + + self.items.swap(index, index - 1); + self.activate_item(index - 1, true, true, cx); + } + + pub fn swap_item_right(&mut self, cx: &mut ViewContext) { + let index = self.active_item_index; + if index + 1 == self.items.len() { + return; + } + + self.items.swap(index, index + 1); + self.activate_item(index + 1, true, true, cx); + } + pub fn close_active_item( &mut self, action: &CloseActiveItem, @@ -2574,6 +2596,8 @@ impl Render for Pane { .on_action(cx.listener(|pane: &mut Pane, _: &ActivateNextItem, cx| { pane.activate_next_item(true, cx); })) + .on_action(cx.listener(|pane, _: &SwapItemLeft, cx| pane.swap_item_left(cx))) + .on_action(cx.listener(|pane, _: &SwapItemRight, cx| pane.swap_item_right(cx))) .on_action(cx.listener(|pane, action, cx| { pane.toggle_pin_tab(action, cx); })) diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 2a97bc62a8..989e101e7d 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -405,46 +405,48 @@ TBD: Add Column with Linux shortcuts #### Pane -| **Command** | **Target** | **Default Shortcut** | -| ----------------------------- | -------------- | ----------------------- | -| Activate item 1 | Pane | `Control + 1` | -| Activate item 2 | Pane | `Control + 2` | -| Activate item 3 | Pane | `Control + 3` | -| Activate item 4 | Pane | `Control + 4` | -| Activate item 5 | Pane | `Control + 5` | -| Activate item 6 | Pane | `Control + 6` | -| Activate item 7 | Pane | `Control + 7` | -| Activate item 8 | Pane | `Control + 8` | -| Activate item 9 | Pane | `Control + 9` | -| Activate last item | Pane | `Control + 0` | -| Activate next item | Pane | `Alt + ⌘ + Right` | -| Activate next item | Pane | `⌘ + }` | -| Activate prev item | Pane | `Alt + ⌘ + Left` | -| Activate prev item | Pane | `⌘ + {` | -| Close active item | Pane | `⌘ + W` | -| Close all items | Pane | `⌘ + K, ⌘ + W` | -| Close clean items | Pane | `⌘ + K, U` | -| Close inactive items | Pane | `Alt + ⌘ + T` | -| Go back | Pane | `Control + -` | -| Go forward | Pane | `Control + Shift + _` | -| Reopen closed item | Pane | `⌘ + Shift + T` | -| Split down | Pane | `⌘ + K, Down` | -| Split left | Pane | `⌘ + K, Left` | -| Split right | Pane | `⌘ + K, Right` | -| Split up | Pane | `⌘ + K, Up` | -| Toggle filters | Project Search | `Alt + ⌘ + F` | -| Toggle focus | Project Search | `⌘ + F` | -| Toggle focus | Project Search | `⌘ + Shift + F` | -| Activate regex mode | Search | `Alt + ⌘ + G` | -| Activate text mode | Search | `Alt + ⌘ + X` | -| Cycle mode | Search | `Alt + Tab` | -| Select all matches | Search | `Alt + Enter` | -| Select next match | Search | `⌘ + G` | -| Select prev match | Search | `⌘ + Shift + G` | -| Toggle case sensitive | Search | `Alt + ⌘ + C` | -| Toggle replace | Search | `⌘ + Shift + H` | -| Toggle whole word | Search | `Alt + ⌘ + W` | -| Close inactive tabs and panes | Workspace | `Control + Alt + ⌘ + W` | +| **Command** | **Target** | **Default Shortcut** | +| ----------------------------- | -------------- | ----------------------------- | +| Activate item 1 | Pane | `Control + 1` | +| Activate item 2 | Pane | `Control + 2` | +| Activate item 3 | Pane | `Control + 3` | +| Activate item 4 | Pane | `Control + 4` | +| Activate item 5 | Pane | `Control + 5` | +| Activate item 6 | Pane | `Control + 6` | +| Activate item 7 | Pane | `Control + 7` | +| Activate item 8 | Pane | `Control + 8` | +| Activate item 9 | Pane | `Control + 9` | +| Activate last item | Pane | `Control + 0` | +| Activate next item | Pane | `Alt + ⌘ + Right` | +| Activate next item | Pane | `⌘ + }` | +| Activate prev item | Pane | `Alt + ⌘ + Left` | +| Activate prev item | Pane | `⌘ + {` | +| Swap item to left | Pane | `Control + Shift + Page Up` | +| Swap item to right | Pane | `Control + Shift + Page Down` | +| Close active item | Pane | `⌘ + W` | +| Close all items | Pane | `⌘ + K, ⌘ + W` | +| Close clean items | Pane | `⌘ + K, U` | +| Close inactive items | Pane | `Alt + ⌘ + T` | +| Go back | Pane | `Control + -` | +| Go forward | Pane | `Control + Shift + _` | +| Reopen closed item | Pane | `⌘ + Shift + T` | +| Split down | Pane | `⌘ + K, Down` | +| Split left | Pane | `⌘ + K, Left` | +| Split right | Pane | `⌘ + K, Right` | +| Split up | Pane | `⌘ + K, Up` | +| Toggle filters | Project Search | `Alt + ⌘ + F` | +| Toggle focus | Project Search | `⌘ + F` | +| Toggle focus | Project Search | `⌘ + Shift + F` | +| Activate regex mode | Search | `Alt + ⌘ + G` | +| Activate text mode | Search | `Alt + ⌘ + X` | +| Cycle mode | Search | `Alt + Tab` | +| Select all matches | Search | `Alt + Enter` | +| Select next match | Search | `⌘ + G` | +| Select prev match | Search | `⌘ + Shift + G` | +| Toggle case sensitive | Search | `Alt + ⌘ + C` | +| Toggle replace | Search | `⌘ + Shift + H` | +| Toggle whole word | Search | `Alt + ⌘ + W` | +| Close inactive tabs and panes | Workspace | `Control + Alt + ⌘ + W` | #### Buffer Search Bar From c71f052276d57c93358b700d8022b5c8f1338289 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 13 Sep 2024 15:42:15 -0400 Subject: [PATCH 077/270] Add ability to use o1-preview and o1-mini as custom models (#17804) This is a barebones modification of the OpenAI provider code to accommodate non-streaming completions. This is specifically for the o1 models, which do not support streaming. Tested that this is working by running a `/workflow` with the following (arbitrarily chosen) settings: ```json { "language_models": { "openai": { "version": "1", "available_models": [ { "name": "o1-preview", "display_name": "o1-preview", "max_tokens": 128000, "max_completion_tokens": 30000 }, { "name": "o1-mini", "display_name": "o1-mini", "max_tokens": 128000, "max_completion_tokens": 20000 } ] } }, } ``` Release Notes: - Changed `low_speed_timeout_in_seconds` option to `600` for OpenAI provider to accommodate recent o1 model release. --------- Co-authored-by: Peter Co-authored-by: Bennet Co-authored-by: Marshall Bowers --- assets/settings/default.json | 3 +- crates/assistant/src/assistant_settings.rs | 2 + crates/assistant/src/inline_assistant.rs | 2 +- crates/language_model/src/provider/cloud.rs | 3 + crates/language_model/src/provider/open_ai.rs | 2 + crates/language_model/src/settings.rs | 2 + crates/open_ai/src/open_ai.rs | 126 +++++++++++++++++- 7 files changed, 136 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 65254afb7c..22dafb2890 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -916,7 +916,8 @@ }, "openai": { "version": "1", - "api_url": "https://api.openai.com/v1" + "api_url": "https://api.openai.com/v1", + "low_speed_timeout_in_seconds": 600 } }, // Zed's Prettier integration settings. diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index 7939eacd93..e2c6a8eb24 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -163,11 +163,13 @@ impl AssistantSettingsContent { display_name, max_tokens, max_output_tokens, + max_completion_tokens: None, } => Some(open_ai::AvailableModel { name, display_name, max_tokens, max_output_tokens, + max_completion_tokens: None, }), _ => None, }) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 246a408477..b01a712a7e 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2407,7 +2407,7 @@ impl Codegen { Ok(LanguageModelRequest { messages, tools: Vec::new(), - stop: vec!["|END|>".to_string()], + stop: Vec::new(), temperature: 1., }) } diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 0de7fb3feb..f8f64ff3b8 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -78,6 +78,8 @@ pub struct AvailableModel { pub max_tokens: usize, /// The maximum number of output tokens allowed by the model. pub max_output_tokens: Option, + /// The maximum number of completion tokens allowed by the model (o1-* only) + pub max_completion_tokens: Option, /// Override this model with a different Anthropic model for tool calls. pub tool_override: Option, /// Indicates whether this custom model supports caching. @@ -257,6 +259,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, + max_completion_tokens: model.max_completion_tokens, }), AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom { name: model.name.clone(), diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index fe5e60caec..98424a23aa 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -43,6 +43,7 @@ pub struct AvailableModel { pub display_name: Option, pub max_tokens: usize, pub max_output_tokens: Option, + pub max_completion_tokens: Option, } pub struct OpenAiLanguageModelProvider { @@ -175,6 +176,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, + max_completion_tokens: model.max_completion_tokens, }, ); } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 0059ed56c4..80749c0bdb 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -178,11 +178,13 @@ impl OpenAiSettingsContent { display_name, max_tokens, max_output_tokens, + max_completion_tokens, } => Some(provider::open_ai::AvailableModel { name, max_tokens, max_output_tokens, display_name, + max_completion_tokens, }), _ => None, }) diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 5b621d6bb8..7b0294bd9c 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -1,12 +1,21 @@ mod supported_countries; use anyhow::{anyhow, Context, Result}; -use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; +use futures::{ + io::BufReader, + stream::{self, BoxStream}, + AsyncBufReadExt, AsyncReadExt, Stream, StreamExt, +}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; use isahc::config::Configurable; use serde::{Deserialize, Serialize}; use serde_json::Value; -use std::{convert::TryFrom, future::Future, pin::Pin, time::Duration}; +use std::{ + convert::TryFrom, + future::{self, Future}, + pin::Pin, + time::Duration, +}; use strum::EnumIter; pub use supported_countries::*; @@ -72,6 +81,7 @@ pub enum Model { display_name: Option, max_tokens: usize, max_output_tokens: Option, + max_completion_tokens: Option, }, } @@ -139,6 +149,7 @@ pub struct Request { pub stream: bool, #[serde(default, skip_serializing_if = "Option::is_none")] pub max_tokens: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] pub stop: Vec, pub temperature: f32, #[serde(default, skip_serializing_if = "Option::is_none")] @@ -263,6 +274,111 @@ pub struct ResponseStreamEvent { pub usage: Option, } +#[derive(Serialize, Deserialize, Debug)] +pub struct Response { + pub id: String, + pub object: String, + pub created: u64, + pub model: String, + pub choices: Vec, + pub usage: Usage, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Choice { + pub index: u32, + pub message: RequestMessage, + pub finish_reason: Option, +} + +pub async fn complete( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: Request, + low_speed_timeout: Option, +) -> Result { + let uri = format!("{api_url}/chat/completions"); + let mut request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_key)); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + }; + + let mut request_body = request; + request_body.stream = false; + + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request_body)?))?; + let mut response = client.send(request).await?; + + if response.status().is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + let response: Response = serde_json::from_str(&body)?; + Ok(response) + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + #[derive(Deserialize)] + struct OpenAiResponse { + error: OpenAiError, + } + + #[derive(Deserialize)] + struct OpenAiError { + message: String, + } + + match serde_json::from_str::(&body) { + Ok(response) if !response.error.message.is_empty() => Err(anyhow!( + "Failed to connect to OpenAI API: {}", + response.error.message, + )), + + _ => Err(anyhow!( + "Failed to connect to OpenAI API: {} {}", + response.status(), + body, + )), + } + } +} + +fn adapt_response_to_stream(response: Response) -> ResponseStreamEvent { + ResponseStreamEvent { + created: response.created as u32, + model: response.model, + choices: response + .choices + .into_iter() + .map(|choice| ChoiceDelta { + index: choice.index, + delta: ResponseMessageDelta { + role: Some(match choice.message { + RequestMessage::Assistant { .. } => Role::Assistant, + RequestMessage::User { .. } => Role::User, + RequestMessage::System { .. } => Role::System, + RequestMessage::Tool { .. } => Role::Tool, + }), + content: match choice.message { + RequestMessage::Assistant { content, .. } => content, + RequestMessage::User { content } => Some(content), + RequestMessage::System { content } => Some(content), + RequestMessage::Tool { content, .. } => Some(content), + }, + tool_calls: None, + }, + finish_reason: choice.finish_reason, + }) + .collect(), + usage: Some(response.usage), + } +} + pub async fn stream_completion( client: &dyn HttpClient, api_url: &str, @@ -270,6 +386,12 @@ pub async fn stream_completion( request: Request, low_speed_timeout: Option, ) -> Result>> { + if request.model == "o1-preview" || request.model == "o1-mini" { + let response = complete(client, api_url, api_key, request, low_speed_timeout).await; + let response_stream_event = response.map(adapt_response_to_stream); + return Ok(stream::once(future::ready(response_stream_event)).boxed()); + } + let uri = format!("{api_url}/chat/completions"); let mut request_builder = HttpRequest::builder() .method(Method::POST) From e145c13f731269bd361dc64602bc9fddf61684bf Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 13 Sep 2024 16:05:34 -0400 Subject: [PATCH 078/270] Add stray UI polish to the SSH flow (#17798) Some super subtle refinement opportunities I spotted while playing around with this flow. There are mostly copywriting tweaks and some UI tweaks here and there (including editing the modal horizontal padding). --- Release Notes: - N/A --- crates/recent_projects/src/dev_servers.rs | 32 +++++++++++------------ crates/ui/src/components/modal.rs | 9 +++---- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index d8b10f31f9..491f378f30 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -929,7 +929,7 @@ impl DevServerProjects { .on_click( cx.listener(move |this, _, cx| this.delete_ssh_server(ix, cx)), ) - .tooltip(|cx| Tooltip::text("Remove dev server", cx)) + .tooltip(|cx| Tooltip::text("Remove Dev Server", cx)) })), ), ) @@ -1162,9 +1162,10 @@ impl DevServerProjects { }) }); - const MANUAL_SETUP_MESSAGE: &str = "Click create to generate a token for this server. The next step will provide instructions for setting zed up on that machine."; + const MANUAL_SETUP_MESSAGE: &str = + "Generate a token for this server and follow the steps to set Zed up on that machine."; const SSH_SETUP_MESSAGE: &str = - "Enter the command you use to ssh into this server.\nFor example: `ssh me@my.server` or `ssh me@secret-box:2222`."; + "Enter the command you use to SSH into this server.\nFor example: `ssh me@my.server` or `ssh me@secret-box:2222`."; Modal::new("create-dev-server", Some(self.scroll_handle.clone())) .header( @@ -1191,6 +1192,7 @@ impl DevServerProjects { .child( v_flex() .w_full() + .px_2() .gap_y(Spacing::Large.rems(cx)) .when(ssh_prompt.is_none(), |el| { el.child( @@ -1346,9 +1348,9 @@ impl DevServerProjects { ) -> Div { self.markdown.update(cx, |markdown, cx| { if kind == NewServerKind::Manual { - markdown.reset(format!("Please log into '{}'. If you don't yet have zed installed, run:\n```\ncurl https://zed.dev/install.sh | bash\n```\nThen to start zed in headless mode:\n```\nzed --dev-server-token {}\n```", dev_server_name, access_token), cx); + markdown.reset(format!("Please log into '{}'. If you don't yet have Zed installed, run:\n```\ncurl https://zed.dev/install.sh | bash\n```\nThen, to start Zed in headless mode:\n```\nzed --dev-server-token {}\n```", dev_server_name, access_token), cx); } else { - markdown.reset("Please wait while we connect over SSH.\n\nIf you run into problems, please [file a bug](https://github.com/zed-industries/zed), and in the meantime try using manual setup.".to_string(), cx); + markdown.reset("Please wait while we connect over SSH.\n\nIf you run into problems, please [file a bug](https://github.com/zed-industries/zed), and in the meantime try using the manual setup.".to_string(), cx); } }); @@ -1420,15 +1422,14 @@ impl DevServerProjects { ) .when(is_signed_out, |modal| { modal - .section(Section::new().child(v_flex().mb_4().child(Label::new( - "You are not currently signed in to Zed. Currently the remote development features are only available to signed in users. Please sign in to continue.", + .section(Section::new().child(div().child(Label::new( + "To continue with the remote development features, you need to sign in to Zed.", )))) .footer( ModalFooter::new().end_slot( - Button::new("sign_in", "Sign in") + Button::new("sign_in", "Sign in with GitHub") .icon(IconName::Github) .icon_position(IconPosition::Start) - .style(ButtonStyle::Filled) .full_width() .on_click(cx.listener(|_, _, cx| { let client = Client::global(cx).clone(); @@ -1447,17 +1448,15 @@ impl DevServerProjects { .when(!is_signed_out, |modal| { modal.section( Section::new().child( - div().mb_4().child( + div().child( List::new() - .empty_message("No dev servers registered.") + .empty_message("No dev servers registered yet.") .header(Some( ListHeader::new("Connections").end_slot( - Button::new("register-dev-server-button", "Connect") + Button::new("register-dev-server-button", "Connect New Server") .icon(IconName::Plus) .icon_position(IconPosition::Start) - .tooltip(|cx| { - Tooltip::text("Connect to a new server", cx) - }) + .icon_color(Color::Muted) .on_click(cx.listener(|this, _, cx| { this.mode = Mode::CreateDevServer( CreateDevServer { @@ -1524,6 +1523,7 @@ impl Render for DevServerProjects { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { div() .track_focus(&self.focus_handle) + .p_2() .elevation_3(cx) .key_context("DevServerModal") .on_action(cx.listener(Self::cancel)) @@ -1590,7 +1590,7 @@ pub fn reconnect_to_dev_server( cx: &mut WindowContext, ) -> Task> { let Some(ssh_connection_string) = dev_server.ssh_connection_string else { - return Task::ready(Err(anyhow!("can't reconnect, no ssh_connection_string"))); + return Task::ready(Err(anyhow!("Can't reconnect, no ssh_connection_string"))); }; let dev_server_store = dev_server_projects::Store::global(cx); let get_access_token = dev_server_store.update(cx, |store, cx| { diff --git a/crates/ui/src/components/modal.rs b/crates/ui/src/components/modal.rs index de85ee9daf..dec7a14a52 100644 --- a/crates/ui/src/components/modal.rs +++ b/crates/ui/src/components/modal.rs @@ -1,6 +1,6 @@ use crate::{ - h_flex, rems_from_px, v_flex, Clickable, Color, Headline, HeadlineSize, IconButton, - IconButtonShape, IconName, Label, LabelCommon, LabelSize, Spacing, + h_flex, v_flex, Clickable, Color, Headline, HeadlineSize, IconButton, IconButtonShape, + IconName, Label, LabelCommon, LabelSize, Spacing, }; use gpui::{prelude::FluentBuilder, *}; use smallvec::SmallVec; @@ -210,7 +210,7 @@ impl ParentElement for ModalRow { impl RenderOnce for ModalRow { fn render(self, _cx: &mut WindowContext) -> impl IntoElement { - h_flex().w_full().px_2().py_1().children(self.children) + h_flex().w_full().py_1().children(self.children) } } @@ -326,7 +326,6 @@ impl RenderOnce for Section { .border_color(cx.theme().colors().border) .bg(section_bg) .py(Spacing::Medium.rems(cx)) - .px(Spacing::Large.rems(cx) - rems_from_px(1.0)) .gap_y(Spacing::Small.rems(cx)) .child(div().flex().flex_1().size_full().children(self.children)), ) @@ -334,7 +333,7 @@ impl RenderOnce for Section { v_flex() .w_full() .gap_y(Spacing::Small.rems(cx)) - .px(Spacing::Large.rems(cx) + Spacing::Large.rems(cx)) + .px(Spacing::Medium.rems(cx) + Spacing::Medium.rems(cx)) .children(self.children) }; From d245f5e75cb095444cbe582ee7d4765e551faa99 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 13 Sep 2024 16:23:55 -0400 Subject: [PATCH 079/270] OpenAI o1-preview and o1-mini support (#17796) Release Notes: - Added support for OpenAI o1-mini and o1-preview models. --------- Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Bennet --- .../language_model/src/model/cloud_model.rs | 2 ++ crates/language_model/src/provider/open_ai.rs | 11 +++++--- crates/language_model/src/request.rs | 3 ++- crates/open_ai/src/open_ai.rs | 27 ++++++++++++++----- docs/src/assistant/configuration.md | 10 +++++-- 5 files changed, 39 insertions(+), 14 deletions(-) diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index be0812eab9..2ce48931f6 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -102,6 +102,8 @@ impl CloudModel { | open_ai::Model::FourTurbo | open_ai::Model::FourOmni | open_ai::Model::FourOmniMini + | open_ai::Model::O1Mini + | open_ai::Model::O1Preview | open_ai::Model::Custom { .. } => { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 98424a23aa..222c153041 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -372,10 +372,13 @@ pub fn count_open_ai_tokens( }) .collect::>(); - if let open_ai::Model::Custom { .. } = model { - tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) - } else { - tiktoken_rs::num_tokens_from_messages(model.id(), &messages) + match model { + open_ai::Model::Custom { .. } + | open_ai::Model::O1Mini + | open_ai::Model::O1Preview => { + tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) + } + _ => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), } }) .boxed() diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 4162e9df87..dd480b8aaf 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -241,6 +241,7 @@ pub struct LanguageModelRequest { impl LanguageModelRequest { pub fn into_open_ai(self, model: String, max_output_tokens: Option) -> open_ai::Request { + let stream = !model.starts_with("o1-"); open_ai::Request { model, messages: self @@ -259,7 +260,7 @@ impl LanguageModelRequest { }, }) .collect(), - stream: true, + stream, stop: self.stop, temperature: self.temperature, max_tokens: max_output_tokens, diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 7b0294bd9c..e67fe1af27 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -63,17 +63,22 @@ impl From for String { #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { - #[serde(rename = "gpt-3.5-turbo", alias = "gpt-3.5-turbo-0613")] + #[serde(rename = "gpt-3.5-turbo", alias = "gpt-3.5-turbo")] ThreePointFiveTurbo, - #[serde(rename = "gpt-4", alias = "gpt-4-0613")] + #[serde(rename = "gpt-4", alias = "gpt-4")] Four, - #[serde(rename = "gpt-4-turbo-preview", alias = "gpt-4-1106-preview")] + #[serde(rename = "gpt-4-turbo", alias = "gpt-4-turbo")] FourTurbo, - #[serde(rename = "gpt-4o", alias = "gpt-4o-2024-05-13")] + #[serde(rename = "gpt-4o", alias = "gpt-4o")] #[default] FourOmni, - #[serde(rename = "gpt-4o-mini", alias = "gpt-4o-mini-2024-07-18")] + #[serde(rename = "gpt-4o-mini", alias = "gpt-4o-mini")] FourOmniMini, + #[serde(rename = "o1-preview", alias = "o1-preview")] + O1Preview, + #[serde(rename = "o1-mini", alias = "o1-mini")] + O1Mini, + #[serde(rename = "custom")] Custom { name: String, @@ -93,6 +98,8 @@ impl Model { "gpt-4-turbo-preview" => Ok(Self::FourTurbo), "gpt-4o" => Ok(Self::FourOmni), "gpt-4o-mini" => Ok(Self::FourOmniMini), + "o1-preview" => Ok(Self::O1Preview), + "o1-mini" => Ok(Self::O1Mini), _ => Err(anyhow!("invalid model id")), } } @@ -101,9 +108,11 @@ impl Model { match self { Self::ThreePointFiveTurbo => "gpt-3.5-turbo", Self::Four => "gpt-4", - Self::FourTurbo => "gpt-4-turbo-preview", + Self::FourTurbo => "gpt-4-turbo", Self::FourOmni => "gpt-4o", Self::FourOmniMini => "gpt-4o-mini", + Self::O1Preview => "o1-preview", + Self::O1Mini => "o1-mini", Self::Custom { name, .. } => name, } } @@ -115,6 +124,8 @@ impl Model { Self::FourTurbo => "gpt-4-turbo", Self::FourOmni => "gpt-4o", Self::FourOmniMini => "gpt-4o-mini", + Self::O1Preview => "o1-preview", + Self::O1Mini => "o1-mini", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -123,11 +134,13 @@ impl Model { pub fn max_token_count(&self) -> usize { match self { - Self::ThreePointFiveTurbo => 4096, + Self::ThreePointFiveTurbo => 16385, Self::Four => 8192, Self::FourTurbo => 128000, Self::FourOmni => 128000, Self::FourOmniMini => 128000, + Self::O1Preview => 128000, + Self::O1Mini => 128000, Self::Custom { max_tokens, .. } => *max_tokens, } } diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index 0fd242c619..4d9870e896 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -165,7 +165,7 @@ Zed will also use the `OPENAI_API_KEY` environment variable if it's defined. #### OpenAI Custom Models {#openai-custom-models} -The Zed Assistant comes pre-configured to use the latest version for common models (GPT-3.5 Turbo, GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o mini). If you wish to use alternate models, perhaps a preview release or a dated model release, you can do so by adding the following to your Zed `settings.json`: +The Zed Assistant comes pre-configured to use the latest version for common models (GPT-3.5 Turbo, GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o mini). If you wish to use alternate models, perhaps a preview release or a dated model release or you wish to control the request parameters you can do so by adding the following to your Zed `settings.json`: ```json { @@ -176,6 +176,12 @@ The Zed Assistant comes pre-configured to use the latest version for common mode "provider": "openai", "name": "gpt-4o-2024-08-06", "max_tokens": 128000 + }, + { + "name": "o1-mini", + "display_name": "o1-mini", + "max_tokens": 128000, + "max_completion_tokens": 20000 } ] } @@ -183,7 +189,7 @@ The Zed Assistant comes pre-configured to use the latest version for common mode } ``` -You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). Custom models will be listed in the model dropdown in the assistant panel. +You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). OpenAI `o1` models should set `max_completion_tokens` as well to avoid incurring high reasoning token costs. Custom models will be listed in the model dropdown in the assistant panel. ### Advanced configuration {#advanced-configuration} From fac9ee5f861b501b4ca79ab04d410eeb49d9ec5e Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Fri, 13 Sep 2024 16:45:16 -0400 Subject: [PATCH 080/270] Add `ui_macros` crate & `DerivePathStr` derive macro (#17811) This PR adds the `ui_macros` crate to allow building supporting macros for the `ui` crate. Additionally, it implements the `DerivePathStr` derive macro and the `path_str` attribute macro. These macros work together to generate a `path` method for enum variants, which is useful for creating standardized string representations of enum variants. The `DerivePathStr` macro provides the following functionality: - Generates a `path` method for each enum variant. - Allows specifying a prefix (required) and suffix (optional) for all paths. - Supports `strum` attributes for case conversion (e.g., snake_case, lowercase). Usage example: ```rust #[derive(DerivePathStr)] #[path_str(prefix = "my_prefix", suffix = ".txt")] #[strum(serialize_all = "snake_case")] enum MyEnum { VariantOne, VariantTwo, } // Generated paths: // MyEnum::VariantOne.path() -> "my_prefix/variant_one.txt" // MyEnum::VariantTwo.path() -> "my_prefix/variant_two.txt" ``` In a later PR this will be used to automate the creation of icon & image paths in the `ui` crate. This gives the following benefits: 1. Ensures standard naming of assets as paths are not manually specified. 2. Makes adding new enum variants less tedious and error-prone. 3. Quickly catches missing or incorrect paths during compilation. 3. Adds a building block towards being able to lint for unused assets in the future. Release Notes: - N/A --- Cargo.lock | 11 +++ Cargo.toml | 3 + crates/editor/Cargo.toml | 2 +- crates/ui/Cargo.toml | 1 + crates/ui/src/path_str.rs | 33 ++++++++ crates/ui/src/ui.rs | 1 + crates/ui_macros/Cargo.toml | 19 +++++ crates/ui_macros/LICENSE-GPL | 1 + crates/ui_macros/src/derive_path_str.rs | 105 ++++++++++++++++++++++++ crates/ui_macros/src/ui_macros.rs | 53 ++++++++++++ 10 files changed, 228 insertions(+), 1 deletion(-) create mode 100644 crates/ui/src/path_str.rs create mode 100644 crates/ui_macros/Cargo.toml create mode 120000 crates/ui_macros/LICENSE-GPL create mode 100644 crates/ui_macros/src/derive_path_str.rs create mode 100644 crates/ui_macros/src/ui_macros.rs diff --git a/Cargo.lock b/Cargo.lock index 793cb66ad7..79f4e803a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12273,6 +12273,7 @@ dependencies = [ "story", "strum 0.25.0", "theme", + "ui_macros", "windows 0.58.0", ] @@ -12287,6 +12288,16 @@ dependencies = [ "ui", ] +[[package]] +name = "ui_macros" +version = "0.1.0" +dependencies = [ + "convert_case 0.6.0", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "unicase" version = "2.7.0" diff --git a/Cargo.toml b/Cargo.toml index 53109002fa..726ffe0cca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -118,6 +118,7 @@ members = [ "crates/title_bar", "crates/ui", "crates/ui_input", + "crates/ui_macros", "crates/util", "crates/vcs_menu", "crates/vim", @@ -292,6 +293,7 @@ time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } +ui_macros = { path = "crates/ui_macros" } util = { path = "crates/util" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } @@ -333,6 +335,7 @@ chrono = { version = "0.4", features = ["serde"] } clap = { version = "4.4", features = ["derive"] } clickhouse = "0.11.6" cocoa = "0.26" +convert_case = "0.6.0" core-foundation = "0.9.3" core-foundation-sys = "0.8.6" ctor = "0.2.6" diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 324201b41e..b1cc59ace6 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -35,7 +35,7 @@ chrono.workspace = true client.workspace = true clock.workspace = true collections.workspace = true -convert_case = "0.6.0" +convert_case.workspace = true db.workspace = true emojis.workspace = true file_icons.workspace = true diff --git a/crates/ui/Cargo.toml b/crates/ui/Cargo.toml index 71e67cb184..594814ae2a 100644 --- a/crates/ui/Cargo.toml +++ b/crates/ui/Cargo.toml @@ -23,6 +23,7 @@ smallvec.workspace = true story = { workspace = true, optional = true } strum = { workspace = true, features = ["derive"] } theme.workspace = true +ui_macros.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/ui/src/path_str.rs b/crates/ui/src/path_str.rs new file mode 100644 index 0000000000..2ebb3fedb3 --- /dev/null +++ b/crates/ui/src/path_str.rs @@ -0,0 +1,33 @@ +#[cfg(test)] +mod tests { + use strum::EnumString; + use ui_macros::{path_str, DerivePathStr}; + + #[test] + fn test_derive_path_str_with_prefix() { + #[derive(Debug, EnumString, DerivePathStr)] + #[strum(serialize_all = "snake_case")] + #[path_str(prefix = "test_prefix")] + enum MyEnum { + FooBar, + Baz, + } + + assert_eq!(MyEnum::FooBar.path(), "test_prefix/foo_bar"); + assert_eq!(MyEnum::Baz.path(), "test_prefix/baz"); + } + + #[test] + fn test_derive_path_str_with_prefix_and_suffix() { + #[derive(Debug, EnumString, DerivePathStr)] + #[strum(serialize_all = "snake_case")] + #[path_str(prefix = "test_prefix", suffix = ".txt")] + enum MyEnum { + FooBar, + Baz, + } + + assert_eq!(MyEnum::FooBar.path(), "test_prefix/foo_bar.txt"); + assert_eq!(MyEnum::Baz.path(), "test_prefix/baz.txt"); + } +} diff --git a/crates/ui/src/ui.rs b/crates/ui/src/ui.rs index a0146c69fa..4f5d6314be 100644 --- a/crates/ui/src/ui.rs +++ b/crates/ui/src/ui.rs @@ -8,6 +8,7 @@ mod components; mod disableable; mod fixed; mod key_bindings; +mod path_str; pub mod prelude; mod selectable; mod styled_ext; diff --git a/crates/ui_macros/Cargo.toml b/crates/ui_macros/Cargo.toml new file mode 100644 index 0000000000..72009f1162 --- /dev/null +++ b/crates/ui_macros/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "ui_macros" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/ui_macros.rs" +proc-macro = true + +[dependencies] +proc-macro2 = "1.0.66" +quote = "1.0.9" +syn = { version = "1.0.72", features = ["full", "extra-traits"] } +convert_case.workspace = true diff --git a/crates/ui_macros/LICENSE-GPL b/crates/ui_macros/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/ui_macros/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ui_macros/src/derive_path_str.rs b/crates/ui_macros/src/derive_path_str.rs new file mode 100644 index 0000000000..3988bc0c5c --- /dev/null +++ b/crates/ui_macros/src/derive_path_str.rs @@ -0,0 +1,105 @@ +use convert_case::{Case, Casing}; +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Attribute, Data, DeriveInput, Lit, Meta, NestedMeta}; + +pub fn derive_path_str(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + let prefix = get_attr_value(&input.attrs, "prefix").expect("prefix attribute is required"); + let suffix = get_attr_value(&input.attrs, "suffix").unwrap_or_else(|| "".to_string()); + + let serialize_all = get_strum_serialize_all(&input.attrs); + let path_str_impl = impl_path_str(name, &input.data, &prefix, &suffix, serialize_all); + + let expanded = quote! { + impl #name { + pub fn path(&self) -> &'static str { + #path_str_impl + } + } + }; + + TokenStream::from(expanded) +} + +fn impl_path_str( + name: &syn::Ident, + data: &Data, + prefix: &str, + suffix: &str, + serialize_all: Option, +) -> proc_macro2::TokenStream { + match *data { + Data::Enum(ref data) => { + let match_arms = data.variants.iter().map(|variant| { + let ident = &variant.ident; + let variant_name = if let Some(ref case) = serialize_all { + match case.as_str() { + "snake_case" => ident.to_string().to_case(Case::Snake), + "lowercase" => ident.to_string().to_lowercase(), + _ => ident.to_string(), + } + } else { + ident.to_string() + }; + let path = format!("{}/{}{}", prefix, variant_name, suffix); + quote! { + #name::#ident => #path, + } + }); + + quote! { + match self { + #(#match_arms)* + } + } + } + _ => panic!("DerivePathStr only supports enums"), + } +} + +fn get_strum_serialize_all(attrs: &[Attribute]) -> Option { + attrs + .iter() + .filter(|attr| attr.path.is_ident("strum")) + .find_map(|attr| { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + meta_list.nested.iter().find_map(|nested_meta| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = nested_meta { + if name_value.path.is_ident("serialize_all") { + if let Lit::Str(lit_str) = &name_value.lit { + return Some(lit_str.value()); + } + } + } + None + }) + } else { + None + } + }) +} + +fn get_attr_value(attrs: &[Attribute], key: &str) -> Option { + attrs + .iter() + .filter(|attr| attr.path.is_ident("path_str")) + .find_map(|attr| { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + meta_list.nested.iter().find_map(|nested_meta| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = nested_meta { + if name_value.path.is_ident(key) { + if let Lit::Str(lit_str) = &name_value.lit { + return Some(lit_str.value()); + } + } + } + None + }) + } else { + None + } + }) +} diff --git a/crates/ui_macros/src/ui_macros.rs b/crates/ui_macros/src/ui_macros.rs new file mode 100644 index 0000000000..a625caefd5 --- /dev/null +++ b/crates/ui_macros/src/ui_macros.rs @@ -0,0 +1,53 @@ +mod derive_path_str; + +use proc_macro::TokenStream; + +/// Derives the `path` method for an enum. +/// +/// This macro generates a `path` method for each variant of the enum, which returns a string +/// representation of the enum variant's path. The path is constructed using a prefix and +/// optionally a suffix, which are specified using attributes. +/// +/// # Attributes +/// +/// - `#[path_str(prefix = "...")]`: Required. Specifies the prefix for all paths. +/// - `#[path_str(suffix = "...")]`: Optional. Specifies a suffix for all paths. +/// - `#[strum(serialize_all = "...")]`: Optional. Specifies the case conversion for variant names. +/// +/// # Example +/// +/// ``` +/// use strum::EnumString; +/// use ui_macros::{path_str, DerivePathStr}; +/// +/// #[derive(EnumString, DerivePathStr)] +/// #[path_str(prefix = "my_prefix", suffix = ".txt")] +/// #[strum(serialize_all = "snake_case")] +/// enum MyEnum { +/// VariantOne, +/// VariantTwo, +/// } +/// +/// // These assertions would work if we could instantiate the enum +/// // assert_eq!(MyEnum::VariantOne.path(), "my_prefix/variant_one.txt"); +/// // assert_eq!(MyEnum::VariantTwo.path(), "my_prefix/variant_two.txt"); +/// ``` +/// +/// # Panics +/// +/// This macro will panic if used on anything other than an enum. +#[proc_macro_derive(DerivePathStr, attributes(path_str))] +pub fn derive_path_str(input: TokenStream) -> TokenStream { + derive_path_str::derive_path_str(input) +} + +/// A marker attribute for use with `DerivePathStr`. +/// +/// This attribute is used to specify the prefix and suffix for the `path` method +/// generated by `DerivePathStr`. It doesn't modify the input and is only used as a +/// marker for the derive macro. +#[proc_macro_attribute] +pub fn path_str(_args: TokenStream, input: TokenStream) -> TokenStream { + // This attribute doesn't modify the input, it's just a marker + input +} From ce848375fe6511c2e167a9d002af8af4772bf08c Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Fri, 13 Sep 2024 17:44:16 -0400 Subject: [PATCH 081/270] add `ui::Vector` and separate images from icons (#17815) This PR pulls non-icon assets out of `ui::components::icon` in preparation for icon standardization. In the future icons will have standard names and sizes, and these image assets won't conform to those constraints. We can also add a `ui::components::image::Image` wrapper around the `gpui::img` element in the future for any Zed-specific image styling we want to enforce. Of note: ```rust #[derive(Debug, PartialEq, Eq, Copy, Clone, EnumIter, EnumString, IntoStaticStr, Serialize, Deserialize, DerivePathStr)] #[strum(serialize_all = "snake_case")] #[path_str(prefix = "images", suffix = ".svg")] pub enum VectorName { ZedLogo, ZedXCopilot, } ``` You can see in the above code we no longer need to manually specify paths for image/icon enums like we currently do in `ui::components::icon`. The icon component will get this same treatment in the future, once we: - do the design work needed to standardize the icons - remove unused icons - update icon names Release Notes: - N/A --- assets/images/zed_logo.svg | 10 ++ assets/images/zed_x_copilot.svg | 14 +++ crates/assets/src/assets.rs | 1 + crates/copilot/src/sign_in.rs | 12 +- .../gpui_macros/src/derive_path_static_str.rs | 73 +++++++++++ crates/gpui_macros/src/gpui_macros.rs | 7 ++ crates/storybook/src/assets.rs | 1 + crates/storybook/src/story_selector.rs | 2 + crates/ui/src/components.rs | 4 + crates/ui/src/components/icon.rs | 2 - crates/ui/src/components/image.rs | 115 ++++++++++++++++++ 11 files changed, 231 insertions(+), 10 deletions(-) create mode 100644 assets/images/zed_logo.svg create mode 100644 assets/images/zed_x_copilot.svg create mode 100644 crates/gpui_macros/src/derive_path_static_str.rs create mode 100644 crates/ui/src/components/image.rs diff --git a/assets/images/zed_logo.svg b/assets/images/zed_logo.svg new file mode 100644 index 0000000000..d1769449c1 --- /dev/null +++ b/assets/images/zed_logo.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/images/zed_x_copilot.svg b/assets/images/zed_x_copilot.svg new file mode 100644 index 0000000000..3c5be71074 --- /dev/null +++ b/assets/images/zed_x_copilot.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/crates/assets/src/assets.rs b/crates/assets/src/assets.rs index 395cbf62f6..ee990085f6 100644 --- a/crates/assets/src/assets.rs +++ b/crates/assets/src/assets.rs @@ -8,6 +8,7 @@ use rust_embed::RustEmbed; #[folder = "../../assets"] #[include = "fonts/**/*"] #[include = "icons/**/*"] +#[include = "images/**/*"] #[include = "themes/**/*"] #[exclude = "themes/src/*"] #[include = "sounds/**/*"] diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index 1d14e5c1aa..da6b969b72 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -1,10 +1,10 @@ use crate::{request::PromptUserDeviceFlow, Copilot, Status}; use gpui::{ - div, svg, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, + div, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, FocusableView, InteractiveElement, IntoElement, Model, MouseDownEvent, ParentElement, Render, Styled, Subscription, ViewContext, }; -use ui::{prelude::*, Button, IconName, Label}; +use ui::{prelude::*, Button, Label, Vector, VectorName}; use workspace::ModalView; const COPILOT_SIGN_UP_URL: &str = "https://github.com/features/copilot"; @@ -198,12 +198,8 @@ impl Render for CopilotCodeVerification { cx.focus(&this.focus_handle); })) .child( - svg() - .w_32() - .h_16() - .flex_none() - .path(IconName::ZedXCopilot.path()) - .text_color(cx.theme().colors().icon), + Vector::new(VectorName::ZedXCopilot, rems(8.), rems(4.)) + .color(Color::Custom(cx.theme().colors().icon)), ) .child(prompt) } diff --git a/crates/gpui_macros/src/derive_path_static_str.rs b/crates/gpui_macros/src/derive_path_static_str.rs new file mode 100644 index 0000000000..25531fd2ad --- /dev/null +++ b/crates/gpui_macros/src/derive_path_static_str.rs @@ -0,0 +1,73 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Attribute, Data, DeriveInput, Lit, Meta, NestedMeta}; + +pub fn derive_path_static_str(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + let prefix = get_attr_value(&input.attrs, "prefix").unwrap_or_else(|| "".to_string()); + let suffix = get_attr_value(&input.attrs, "suffix").unwrap_or_else(|| "".to_string()); + let delimiter = get_attr_value(&input.attrs, "delimiter").unwrap_or_else(|| "/".to_string()); + + let path_str_impl = impl_path_str(name, &input.data, &prefix, &suffix, &delimiter); + + let expanded = quote! { + impl #name { + pub fn path_str(&self) -> &'static str { + #path_str_impl + } + } + }; + + TokenStream::from(expanded) +} + +fn impl_path_str( + name: &syn::Ident, + data: &Data, + prefix: &str, + suffix: &str, + delimiter: &str, +) -> proc_macro2::TokenStream { + match *data { + Data::Enum(ref data) => { + let match_arms = data.variants.iter().map(|variant| { + let ident = &variant.ident; + let path = format!("{}{}{}{}{}", prefix, delimiter, ident, delimiter, suffix); + quote! { + #name::#ident => #path, + } + }); + + quote! { + match self { + #(#match_arms)* + } + } + } + _ => panic!("DerivePathStr only supports enums"), + } +} + +fn get_attr_value(attrs: &[Attribute], key: &str) -> Option { + attrs + .iter() + .filter(|attr| attr.path.is_ident("derive_path_static_str")) + .find_map(|attr| { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + meta_list.nested.iter().find_map(|nested_meta| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = nested_meta { + if name_value.path.is_ident(key) { + if let Lit::Str(lit_str) = &name_value.lit { + return Some(lit_str.value()); + } + } + } + None + }) + } else { + None + } + }) +} diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index c4cf5358b3..09cf4027d2 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -1,4 +1,5 @@ mod derive_into_element; +mod derive_path_static_str; mod derive_render; mod register_action; mod styles; @@ -27,6 +28,12 @@ pub fn derive_render(input: TokenStream) -> TokenStream { derive_render::derive_render(input) } +#[proc_macro_derive(PathStaticStr)] +#[doc(hidden)] +pub fn derive_path_static_str(input: TokenStream) -> TokenStream { + derive_path_static_str::derive_path_static_str(input) +} + /// Used by GPUI to generate the style helpers. #[proc_macro] #[doc(hidden)] diff --git a/crates/storybook/src/assets.rs b/crates/storybook/src/assets.rs index da874e5f2d..f45d1457df 100644 --- a/crates/storybook/src/assets.rs +++ b/crates/storybook/src/assets.rs @@ -8,6 +8,7 @@ use rust_embed::RustEmbed; #[folder = "../../assets"] #[include = "fonts/**/*"] #[include = "icons/**/*"] +#[include = "images/**/*"] #[include = "themes/**/*"] #[include = "sounds/**/*"] #[include = "*.md"] diff --git a/crates/storybook/src/story_selector.rs b/crates/storybook/src/story_selector.rs index 5df02b1df2..881fd83f8f 100644 --- a/crates/storybook/src/story_selector.rs +++ b/crates/storybook/src/story_selector.rs @@ -40,6 +40,7 @@ pub enum ComponentStory { ToolStrip, ViewportUnits, WithRemSize, + Vector, } impl ComponentStory { @@ -75,6 +76,7 @@ impl ComponentStory { Self::ToolStrip => cx.new_view(|_| ui::ToolStripStory).into(), Self::ViewportUnits => cx.new_view(|_| crate::stories::ViewportUnitsStory).into(), Self::WithRemSize => cx.new_view(|_| crate::stories::WithRemSizeStory).into(), + Self::Vector => cx.new_view(|_| ui::VectorStory).into(), } } } diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 3a56e46eae..fe63b03502 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -7,6 +7,7 @@ mod divider; mod dropdown_menu; mod facepile; mod icon; +mod image; mod indicator; mod keybinding; mod label; @@ -37,6 +38,7 @@ pub use divider::*; pub use dropdown_menu::*; pub use facepile::*; pub use icon::*; +pub use image::*; pub use indicator::*; pub use keybinding::*; pub use label::*; @@ -55,5 +57,7 @@ pub use tab_bar::*; pub use tool_strip::*; pub use tooltip::*; +#[cfg(feature = "stories")] +pub use image::story::*; #[cfg(feature = "stories")] pub use stories::*; diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index 0001ab4a2b..fd4f17ac0e 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -271,7 +271,6 @@ pub enum IconName { XCircle, ZedAssistant, ZedAssistantFilled, - ZedXCopilot, Visible, } @@ -443,7 +442,6 @@ impl IconName { IconName::XCircle => "icons/error.svg", IconName::ZedAssistant => "icons/zed_assistant.svg", IconName::ZedAssistantFilled => "icons/zed_assistant_filled.svg", - IconName::ZedXCopilot => "icons/zed_x_copilot.svg", IconName::Visible => "icons/visible.svg", } } diff --git a/crates/ui/src/components/image.rs b/crates/ui/src/components/image.rs new file mode 100644 index 0000000000..286fe7f56f --- /dev/null +++ b/crates/ui/src/components/image.rs @@ -0,0 +1,115 @@ +use gpui::{svg, IntoElement, Rems, RenderOnce, Size, Styled, WindowContext}; +use serde::{Deserialize, Serialize}; +use strum::{EnumIter, EnumString, IntoStaticStr}; +use ui_macros::{path_str, DerivePathStr}; + +use crate::Color; + +#[derive( + Debug, + PartialEq, + Eq, + Copy, + Clone, + EnumIter, + EnumString, + IntoStaticStr, + Serialize, + Deserialize, + DerivePathStr, +)] +#[strum(serialize_all = "snake_case")] +#[path_str(prefix = "images", suffix = ".svg")] +pub enum VectorName { + ZedLogo, + ZedXCopilot, +} + +/// A vector image, such as an SVG. +/// +/// A [Vector] is different from an [Icon] in that it is intended +/// to be displayed at a specific size, or series of sizes, rather +/// than conforming to the standard size of an icons. +#[derive(IntoElement)] +pub struct Vector { + path: &'static str, + color: Color, + size: Size, +} + +impl Vector { + /// Create a new [Vector] image with the given [VectorName] and size. + pub fn new(vector: VectorName, width: Rems, height: Rems) -> Self { + Self { + path: vector.path(), + color: Color::default(), + size: Size { width, height }, + } + } + + /// Create a new [Vector] image where the width and height are the same. + pub fn square(vector: VectorName, size: Rems) -> Self { + Self::new(vector, size, size) + } + + /// Set the image color + pub fn color(mut self, color: Color) -> Self { + self.color = color; + self + } + + /// Set the image size + pub fn size(mut self, size: impl Into>) -> Self { + let size = size.into(); + + self.size = size; + self + } +} + +impl RenderOnce for Vector { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let width = self.size.width; + let height = self.size.height; + + svg() + // By default, prevent the SVG from stretching + // to fill its container. + .flex_none() + .w(width) + .h(height) + .path(self.path) + .text_color(self.color.color(cx)) + } +} + +#[cfg(feature = "stories")] +pub mod story { + use gpui::Render; + use story::{Story, StoryItem, StorySection}; + use strum::IntoEnumIterator; + + use crate::prelude::*; + + use super::{Vector, VectorName}; + + pub struct VectorStory; + + impl Render for VectorStory { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + Story::container().child(StorySection::new().children(VectorName::iter().map( + |vector| StoryItem::new(format!("{:?}", vector), Vector::square(vector, rems(8.))), + ))) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn vector_path() { + assert_eq!(VectorName::ZedLogo.path(), "images/zed_logo.svg"); + } +} From e8a2dd92c82f56d01067aa9966eaaf095a7e657c Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Fri, 13 Sep 2024 21:12:29 -0400 Subject: [PATCH 082/270] Derive icon paths (#17816) This PR improves adding and working with icons by using the new `DerivePathStr` to derive icon paths. This means paths no longer need to be manually specified, and the `IconName` and file name will always be consistent between icons. This PR does not do any work to standardize icons visually, remove unused icons, or any other such cleanup. Release Notes: - N/A --- assets/icons/audio_off.svg | 1 + assets/icons/audio_on.svg | 1 + ...ase_insensitive.svg => case_sensitive.svg} | 0 assets/icons/{x.svg => close.svg} | 0 .../{text_select.svg => cursor_i_beam.svg} | 0 .../{text-cursor.svg => cursor_text.svg} | 0 assets/icons/{feedback.svg => envelope.svg} | 0 assets/icons/file_doc.svg | 6 + assets/icons/file_generic.svg | 5 + assets/icons/file_git.svg | 6 + assets/icons/file_lock.svg | 4 + assets/icons/file_rust.svg | 4 + assets/icons/file_toml.svg | 5 + assets/icons/{project.svg => file_tree.svg} | 0 assets/icons/folder.svg | 3 + assets/icons/folder_open.svg | 4 + .../icons/{stop_sharing.svg => folder_x.svg} | 0 ...{conversations.svg => message_bubbles.svg} | 0 assets/icons/{desktop.svg => screen.svg} | 0 assets/icons/settings.svg | 4 + .../{sliders-alt.svg => settings_alt.svg} | 0 assets/icons/speaker_off.svg | 8 - .../{user_group_16.svg => user_group.svg} | 0 .../icons/{word_search.svg => whole_word.svg} | 0 assets/icons/{error.svg => x_circle.svg} | 0 .../src/activity_indicator.rs | 6 +- crates/assistant/src/assistant_panel.rs | 6 +- crates/assistant/src/inline_assistant.rs | 2 +- .../src/slash_command/diagnostics_command.rs | 4 +- .../src/terminal_inline_assistant.rs | 2 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/diagnostics/src/diagnostics.rs | 4 +- crates/diagnostics/src/items.rs | 4 +- crates/diagnostics/src/toolbar_controls.rs | 2 +- .../quick_action_bar/src/quick_action_bar.rs | 2 +- crates/terminal_view/src/terminal_view.rs | 2 +- crates/ui/src/components/icon.rs | 219 +++--------------- .../ui/src/components/stories/list_header.rs | 2 +- crates/workspace/src/notifications.rs | 14 +- 39 files changed, 96 insertions(+), 226 deletions(-) create mode 100644 assets/icons/audio_off.svg create mode 100644 assets/icons/audio_on.svg rename assets/icons/{case_insensitive.svg => case_sensitive.svg} (100%) rename assets/icons/{x.svg => close.svg} (100%) rename assets/icons/{text_select.svg => cursor_i_beam.svg} (100%) rename assets/icons/{text-cursor.svg => cursor_text.svg} (100%) rename assets/icons/{feedback.svg => envelope.svg} (100%) create mode 100644 assets/icons/file_doc.svg create mode 100644 assets/icons/file_generic.svg create mode 100644 assets/icons/file_git.svg create mode 100644 assets/icons/file_lock.svg create mode 100644 assets/icons/file_rust.svg create mode 100644 assets/icons/file_toml.svg rename assets/icons/{project.svg => file_tree.svg} (100%) create mode 100644 assets/icons/folder.svg create mode 100644 assets/icons/folder_open.svg rename assets/icons/{stop_sharing.svg => folder_x.svg} (100%) rename assets/icons/{conversations.svg => message_bubbles.svg} (100%) rename assets/icons/{desktop.svg => screen.svg} (100%) create mode 100644 assets/icons/settings.svg rename assets/icons/{sliders-alt.svg => settings_alt.svg} (100%) delete mode 100644 assets/icons/speaker_off.svg rename assets/icons/{user_group_16.svg => user_group.svg} (100%) rename assets/icons/{word_search.svg => whole_word.svg} (100%) rename assets/icons/{error.svg => x_circle.svg} (100%) diff --git a/assets/icons/audio_off.svg b/assets/icons/audio_off.svg new file mode 100644 index 0000000000..93b98471ca --- /dev/null +++ b/assets/icons/audio_off.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/audio_on.svg b/assets/icons/audio_on.svg new file mode 100644 index 0000000000..42310ea32c --- /dev/null +++ b/assets/icons/audio_on.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/case_insensitive.svg b/assets/icons/case_sensitive.svg similarity index 100% rename from assets/icons/case_insensitive.svg rename to assets/icons/case_sensitive.svg diff --git a/assets/icons/x.svg b/assets/icons/close.svg similarity index 100% rename from assets/icons/x.svg rename to assets/icons/close.svg diff --git a/assets/icons/text_select.svg b/assets/icons/cursor_i_beam.svg similarity index 100% rename from assets/icons/text_select.svg rename to assets/icons/cursor_i_beam.svg diff --git a/assets/icons/text-cursor.svg b/assets/icons/cursor_text.svg similarity index 100% rename from assets/icons/text-cursor.svg rename to assets/icons/cursor_text.svg diff --git a/assets/icons/feedback.svg b/assets/icons/envelope.svg similarity index 100% rename from assets/icons/feedback.svg rename to assets/icons/envelope.svg diff --git a/assets/icons/file_doc.svg b/assets/icons/file_doc.svg new file mode 100644 index 0000000000..3b11995f36 --- /dev/null +++ b/assets/icons/file_doc.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_generic.svg b/assets/icons/file_generic.svg new file mode 100644 index 0000000000..3c72bd3320 --- /dev/null +++ b/assets/icons/file_generic.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_git.svg b/assets/icons/file_git.svg new file mode 100644 index 0000000000..197db2e9e6 --- /dev/null +++ b/assets/icons/file_git.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_lock.svg b/assets/icons/file_lock.svg new file mode 100644 index 0000000000..6bfef249b4 --- /dev/null +++ b/assets/icons/file_lock.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_rust.svg b/assets/icons/file_rust.svg new file mode 100644 index 0000000000..5db753628a --- /dev/null +++ b/assets/icons/file_rust.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_toml.svg b/assets/icons/file_toml.svg new file mode 100644 index 0000000000..9ab78af50f --- /dev/null +++ b/assets/icons/file_toml.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/project.svg b/assets/icons/file_tree.svg similarity index 100% rename from assets/icons/project.svg rename to assets/icons/file_tree.svg diff --git a/assets/icons/folder.svg b/assets/icons/folder.svg new file mode 100644 index 0000000000..a76dc63d1a --- /dev/null +++ b/assets/icons/folder.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/folder_open.svg b/assets/icons/folder_open.svg new file mode 100644 index 0000000000..ef37f55f83 --- /dev/null +++ b/assets/icons/folder_open.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/stop_sharing.svg b/assets/icons/folder_x.svg similarity index 100% rename from assets/icons/stop_sharing.svg rename to assets/icons/folder_x.svg diff --git a/assets/icons/conversations.svg b/assets/icons/message_bubbles.svg similarity index 100% rename from assets/icons/conversations.svg rename to assets/icons/message_bubbles.svg diff --git a/assets/icons/desktop.svg b/assets/icons/screen.svg similarity index 100% rename from assets/icons/desktop.svg rename to assets/icons/screen.svg diff --git a/assets/icons/settings.svg b/assets/icons/settings.svg new file mode 100644 index 0000000000..081d25bf48 --- /dev/null +++ b/assets/icons/settings.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/sliders-alt.svg b/assets/icons/settings_alt.svg similarity index 100% rename from assets/icons/sliders-alt.svg rename to assets/icons/settings_alt.svg diff --git a/assets/icons/speaker_off.svg b/assets/icons/speaker_off.svg deleted file mode 100644 index f60c35de7f..0000000000 --- a/assets/icons/speaker_off.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/user_group_16.svg b/assets/icons/user_group.svg similarity index 100% rename from assets/icons/user_group_16.svg rename to assets/icons/user_group.svg diff --git a/assets/icons/word_search.svg b/assets/icons/whole_word.svg similarity index 100% rename from assets/icons/word_search.svg rename to assets/icons/whole_word.svg diff --git a/assets/icons/error.svg b/assets/icons/x_circle.svg similarity index 100% rename from assets/icons/error.svg rename to assets/icons/x_circle.svg diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 4b6508edb0..3f567c9e80 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -262,7 +262,7 @@ impl ActivityIndicator { if !failed.is_empty() { return Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), @@ -280,7 +280,7 @@ impl ActivityIndicator { if let Some(failure) = self.project.read(cx).last_formatting_failure() { return Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), @@ -333,7 +333,7 @@ impl ActivityIndicator { }), AutoUpdateStatus::Errored => Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 51c9aa9b4e..af3abec67c 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4110,7 +4110,7 @@ impl ContextEditor { h_flex() .gap_3() .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) @@ -5235,7 +5235,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakView) -> ButtonLike::new(fold_id) .style(ButtonStyle::Filled) .layer(ElevationIndex::ElevatedSurface) - .child(Icon::new(IconName::TextSelect)) + .child(Icon::new(IconName::CursorIBeam)) .child(Label::new(title.clone()).single_line()) .on_click(move |_, cx| { editor @@ -5339,7 +5339,7 @@ fn render_docs_slash_command_trailer( div() .id(("latest-error", row.0)) .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index b01a712a7e..8b71e54746 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1478,7 +1478,7 @@ impl Render for PromptEditor { .child( ModelSelector::new( self.fs.clone(), - IconButton::new("context", IconName::SlidersAlt) + IconButton::new("context", IconName::SettingsAlt) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 6c821bd7b4..2105830651 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -193,11 +193,11 @@ impl SlashCommand for DiagnosticsSlashCommand { .map(|(range, placeholder_type)| SlashCommandOutputSection { range, icon: match placeholder_type { - PlaceholderType::Root(_, _) => IconName::ExclamationTriangle, + PlaceholderType::Root(_, _) => IconName::Warning, PlaceholderType::File(_) => IconName::File, PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle, PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => { - IconName::ExclamationTriangle + IconName::Warning } }, label: match placeholder_type { diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 61a8813f6c..d5c085b646 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -585,7 +585,7 @@ impl Render for PromptEditor { .gap_2() .child(ModelSelector::new( self.fs.clone(), - IconButton::new("context", IconName::SlidersAlt) + IconButton::new("context", IconName::SettingsAlt) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 7270110181..59f83e0654 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2831,7 +2831,7 @@ impl Panel for CollabPanel { fn icon(&self, cx: &gpui::WindowContext) -> Option { CollaborationPanelSettings::get_global(cx) .button - .then_some(ui::IconName::Collab) + .then_some(ui::IconName::UserGroup) } fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ced97be2dc..ddf39e0bfa 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -667,7 +667,7 @@ impl Item for ProjectDiagnosticsEditor { then.child( h_flex() .gap_1() - .child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning)) + .child(Icon::new(IconName::Warning).color(Color::Warning)) .child( Label::new(self.summary.warning_count.to_string()) .color(params.text_color()), @@ -804,7 +804,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock { icon.path(IconName::XCircle.path()) .text_color(Color::Error.color(cx)) } else { - icon.path(IconName::ExclamationTriangle.path()) + icon.path(IconName::Warning.path()) .text_color(Color::Warning.color(cx)) } }), diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 80b31b999c..72a4ac9bcf 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -30,7 +30,7 @@ impl Render for DiagnosticIndicator { (0, warning_count) => h_flex() .gap_1() .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) @@ -52,7 +52,7 @@ impl Render for DiagnosticIndicator { ) .child(Label::new(error_count.to_string()).size(LabelSize::Small)) .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index 64eb268304..b546db50a0 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -50,7 +50,7 @@ impl Render for ToolbarControls { ) }) .child( - IconButton::new("toggle-warnings", IconName::ExclamationTriangle) + IconButton::new("toggle-warnings", IconName::Warning) .tooltip(move |cx| Tooltip::text(tooltip, cx)) .on_click(cx.listener(|this, _, cx| { if let Some(editor) = this.editor() { diff --git a/crates/quick_action_bar/src/quick_action_bar.rs b/crates/quick_action_bar/src/quick_action_bar.rs index 0d530d6821..57418b54b7 100644 --- a/crates/quick_action_bar/src/quick_action_bar.rs +++ b/crates/quick_action_bar/src/quick_action_bar.rs @@ -150,7 +150,7 @@ impl Render for QuickActionBar { let focus = editor.focus_handle(cx); PopoverMenu::new("editor-selections-dropdown") .trigger( - IconButton::new("toggle_editor_selections_icon", IconName::TextCursor) + IconButton::new("toggle_editor_selections_icon", IconName::CursorIBeam) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 903e9eebd2..1869e33383 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -991,7 +991,7 @@ impl Item for TerminalView { Some(terminal_task) => match &terminal_task.status { TaskStatus::Running => (IconName::Play, Color::Disabled, None), TaskStatus::Unknown => ( - IconName::ExclamationTriangle, + IconName::Warning, Color::Warning, Some(rerun_button(terminal_task.id.clone())), ), diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index fd4f17ac0e..c2743ecbd9 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -1,6 +1,7 @@ use gpui::{svg, AnimationElement, Hsla, IntoElement, Rems, Transformation}; use serde::{Deserialize, Serialize}; use strum::{EnumIter, EnumString, IntoStaticStr}; +use ui_macros::DerivePathStr; use crate::{prelude::*, Indicator}; @@ -102,15 +103,27 @@ impl IconSize { } #[derive( - Debug, PartialEq, Eq, Copy, Clone, EnumIter, EnumString, IntoStaticStr, Serialize, Deserialize, + Debug, + PartialEq, + Eq, + Copy, + Clone, + EnumIter, + EnumString, + IntoStaticStr, + Serialize, + Deserialize, + DerivePathStr, )] +#[strum(serialize_all = "snake_case")] +#[path_str(prefix = "icons", suffix = ".svg")] pub enum IconName { Ai, AiAnthropic, AiAnthropicHosted, - AiOpenAi, AiGoogle, AiOllama, + AiOpenAi, AiZed, ArrowCircle, ArrowDown, @@ -135,15 +148,13 @@ pub enum IconName { CaseSensitive, Check, ChevronDown, - /// This chevron indicates a popover menu. - ChevronDownSmall, + ChevronDownSmall, // This chevron indicates a popover menu. ChevronLeft, ChevronRight, ChevronUp, ChevronUpDown, Close, Code, - Collab, Command, Context, Control, @@ -153,6 +164,8 @@ pub enum IconName { CopilotInit, Copy, CountdownTimer, + CursorIBeam, + CursorText, Dash, DatabaseZap, Delete, @@ -162,21 +175,20 @@ pub enum IconName { EllipsisVertical, Envelope, Escape, - ExclamationTriangle, Exit, ExpandVertical, ExternalLink, Eye, File, + FileCode, FileDoc, FileGeneric, FileGit, FileLock, FileRust, + FileText, FileToml, FileTree, - FileText, - FileCode, Filter, Folder, FolderOpen, @@ -184,11 +196,11 @@ pub enum IconName { Font, FontSize, FontWeight, - Github, - GenericMinimize, - GenericMaximize, GenericClose, + GenericMaximize, + GenericMinimize, GenericRestore, + Github, Hash, HistoryRerun, Indicator, @@ -228,21 +240,21 @@ pub enum IconName { Rerun, Return, Reveal, - Route, RotateCcw, RotateCw, + Route, Save, Screen, - SearchSelection, SearchCode, + SearchSelection, SelectAll, Server, Settings, + SettingsAlt, Shift, Slash, SlashSquare, Sliders, - SlidersAlt, Snip, Space, Sparkle, @@ -260,191 +272,18 @@ pub enum IconName { SupermavenInit, Tab, Terminal, - TextCursor, - TextSelect, Trash, TriangleRight, Undo, Unpin, Update, + UserGroup, + Visible, + Warning, WholeWord, XCircle, ZedAssistant, ZedAssistantFilled, - Visible, -} - -impl IconName { - pub fn path(self) -> &'static str { - match self { - IconName::Ai => "icons/ai.svg", - IconName::AiAnthropic => "icons/ai_anthropic.svg", - IconName::AiAnthropicHosted => "icons/ai_anthropic_hosted.svg", - IconName::AiOpenAi => "icons/ai_open_ai.svg", - IconName::AiGoogle => "icons/ai_google.svg", - IconName::AiOllama => "icons/ai_ollama.svg", - IconName::AiZed => "icons/ai_zed.svg", - IconName::ArrowCircle => "icons/arrow_circle.svg", - IconName::ArrowDown => "icons/arrow_down.svg", - IconName::ArrowDownFromLine => "icons/arrow_down_from_line.svg", - IconName::ArrowLeft => "icons/arrow_left.svg", - IconName::ArrowRight => "icons/arrow_right.svg", - IconName::ArrowUp => "icons/arrow_up.svg", - IconName::ArrowUpFromLine => "icons/arrow_up_from_line.svg", - IconName::ArrowUpRight => "icons/arrow_up_right.svg", - IconName::AtSign => "icons/at_sign.svg", - IconName::AudioOff => "icons/speaker_off.svg", - IconName::AudioOn => "icons/speaker_loud.svg", - IconName::Backspace => "icons/backspace.svg", - IconName::Bell => "icons/bell.svg", - IconName::BellDot => "icons/bell_dot.svg", - IconName::BellOff => "icons/bell_off.svg", - IconName::BellRing => "icons/bell_ring.svg", - IconName::Bolt => "icons/bolt.svg", - IconName::Book => "icons/book.svg", - IconName::BookCopy => "icons/book_copy.svg", - IconName::BookPlus => "icons/book_plus.svg", - IconName::CaseSensitive => "icons/case_insensitive.svg", - IconName::Check => "icons/check.svg", - IconName::ChevronDown => "icons/chevron_down.svg", - IconName::ChevronDownSmall => "icons/chevron_down_small.svg", - IconName::ChevronLeft => "icons/chevron_left.svg", - IconName::ChevronRight => "icons/chevron_right.svg", - IconName::ChevronUp => "icons/chevron_up.svg", - IconName::ChevronUpDown => "icons/chevron_up_down.svg", - IconName::Close => "icons/x.svg", - IconName::Code => "icons/code.svg", - IconName::Collab => "icons/user_group_16.svg", - IconName::Command => "icons/command.svg", - IconName::Context => "icons/context.svg", - IconName::Control => "icons/control.svg", - IconName::Copilot => "icons/copilot.svg", - IconName::CopilotDisabled => "icons/copilot_disabled.svg", - IconName::CopilotError => "icons/copilot_error.svg", - IconName::CopilotInit => "icons/copilot_init.svg", - IconName::Copy => "icons/copy.svg", - IconName::CountdownTimer => "icons/countdown_timer.svg", - IconName::Dash => "icons/dash.svg", - IconName::DatabaseZap => "icons/database_zap.svg", - IconName::Delete => "icons/delete.svg", - IconName::Disconnected => "icons/disconnected.svg", - IconName::Download => "icons/download.svg", - IconName::Ellipsis => "icons/ellipsis.svg", - IconName::EllipsisVertical => "icons/ellipsis_vertical.svg", - IconName::Envelope => "icons/feedback.svg", - IconName::Escape => "icons/escape.svg", - IconName::ExclamationTriangle => "icons/warning.svg", - IconName::Exit => "icons/exit.svg", - IconName::ExpandVertical => "icons/expand_vertical.svg", - IconName::ExternalLink => "icons/external_link.svg", - IconName::Eye => "icons/eye.svg", - IconName::File => "icons/file.svg", - IconName::FileDoc => "icons/file_icons/book.svg", - IconName::FileGeneric => "icons/file_icons/file.svg", - IconName::FileGit => "icons/file_icons/git.svg", - IconName::FileLock => "icons/file_icons/lock.svg", - IconName::FileRust => "icons/file_icons/rust.svg", - IconName::FileToml => "icons/file_icons/toml.svg", - IconName::FileTree => "icons/project.svg", - IconName::FileCode => "icons/file_code.svg", - IconName::FileText => "icons/file_text.svg", - IconName::Filter => "icons/filter.svg", - IconName::Folder => "icons/file_icons/folder.svg", - IconName::FolderOpen => "icons/file_icons/folder_open.svg", - IconName::FolderX => "icons/stop_sharing.svg", - IconName::Font => "icons/font.svg", - IconName::FontSize => "icons/font_size.svg", - IconName::FontWeight => "icons/font_weight.svg", - IconName::Github => "icons/github.svg", - IconName::GenericMinimize => "icons/generic_minimize.svg", - IconName::GenericMaximize => "icons/generic_maximize.svg", - IconName::GenericClose => "icons/generic_close.svg", - IconName::GenericRestore => "icons/generic_restore.svg", - IconName::Hash => "icons/hash.svg", - IconName::HistoryRerun => "icons/history_rerun.svg", - IconName::Indicator => "icons/indicator.svg", - IconName::IndicatorX => "icons/indicator_x.svg", - IconName::InlayHint => "icons/inlay_hint.svg", - IconName::Library => "icons/library.svg", - IconName::LineHeight => "icons/line_height.svg", - IconName::Link => "icons/link.svg", - IconName::ListTree => "icons/list_tree.svg", - IconName::MagnifyingGlass => "icons/magnifying_glass.svg", - IconName::MailOpen => "icons/mail_open.svg", - IconName::Maximize => "icons/maximize.svg", - IconName::Menu => "icons/menu.svg", - IconName::MessageBubbles => "icons/conversations.svg", - IconName::Mic => "icons/mic.svg", - IconName::MicMute => "icons/mic_mute.svg", - IconName::Microscope => "icons/microscope.svg", - IconName::Minimize => "icons/minimize.svg", - IconName::Option => "icons/option.svg", - IconName::PageDown => "icons/page_down.svg", - IconName::PageUp => "icons/page_up.svg", - IconName::Pencil => "icons/pencil.svg", - IconName::Person => "icons/person.svg", - IconName::Pin => "icons/pin.svg", - IconName::Play => "icons/play.svg", - IconName::Plus => "icons/plus.svg", - IconName::PocketKnife => "icons/pocket_knife.svg", - IconName::Public => "icons/public.svg", - IconName::PullRequest => "icons/pull_request.svg", - IconName::Quote => "icons/quote.svg", - IconName::Regex => "icons/regex.svg", - IconName::ReplNeutral => "icons/repl_neutral.svg", - IconName::Replace => "icons/replace.svg", - IconName::ReplaceAll => "icons/replace_all.svg", - IconName::ReplaceNext => "icons/replace_next.svg", - IconName::ReplyArrowRight => "icons/reply_arrow_right.svg", - IconName::Rerun => "icons/rerun.svg", - IconName::Return => "icons/return.svg", - IconName::Reveal => "icons/reveal.svg", - IconName::RotateCcw => "icons/rotate_ccw.svg", - IconName::RotateCw => "icons/rotate_cw.svg", - IconName::Route => "icons/route.svg", - IconName::Save => "icons/save.svg", - IconName::Screen => "icons/desktop.svg", - IconName::SearchSelection => "icons/search_selection.svg", - IconName::SearchCode => "icons/search_code.svg", - IconName::SelectAll => "icons/select_all.svg", - IconName::Server => "icons/server.svg", - IconName::Settings => "icons/file_icons/settings.svg", - IconName::Shift => "icons/shift.svg", - IconName::Slash => "icons/slash.svg", - IconName::SlashSquare => "icons/slash_square.svg", - IconName::Sliders => "icons/sliders.svg", - IconName::SlidersAlt => "icons/sliders-alt.svg", - IconName::Snip => "icons/snip.svg", - IconName::Space => "icons/space.svg", - IconName::Sparkle => "icons/sparkle.svg", - IconName::SparkleAlt => "icons/sparkle_alt.svg", - IconName::SparkleFilled => "icons/sparkle_filled.svg", - IconName::Spinner => "icons/spinner.svg", - IconName::Split => "icons/split.svg", - IconName::Star => "icons/star.svg", - IconName::StarFilled => "icons/star_filled.svg", - IconName::Stop => "icons/stop.svg", - IconName::Strikethrough => "icons/strikethrough.svg", - IconName::Supermaven => "icons/supermaven.svg", - IconName::SupermavenDisabled => "icons/supermaven_disabled.svg", - IconName::SupermavenError => "icons/supermaven_error.svg", - IconName::SupermavenInit => "icons/supermaven_init.svg", - IconName::Tab => "icons/tab.svg", - IconName::Terminal => "icons/terminal.svg", - IconName::TextCursor => "icons/text-cursor.svg", - IconName::TextSelect => "icons/text_select.svg", - IconName::Trash => "icons/trash.svg", - IconName::TriangleRight => "icons/triangle_right.svg", - IconName::Unpin => "icons/unpin.svg", - IconName::Update => "icons/update.svg", - IconName::Undo => "icons/undo.svg", - IconName::WholeWord => "icons/word_search.svg", - IconName::XCircle => "icons/error.svg", - IconName::ZedAssistant => "icons/zed_assistant.svg", - IconName::ZedAssistantFilled => "icons/zed_assistant_filled.svg", - IconName::Visible => "icons/visible.svg", - } - } } #[derive(IntoElement)] diff --git a/crates/ui/src/components/stories/list_header.rs b/crates/ui/src/components/stories/list_header.rs index 358dc26a87..afcae0215a 100644 --- a/crates/ui/src/components/stories/list_header.rs +++ b/crates/ui/src/components/stories/list_header.rs @@ -24,7 +24,7 @@ impl Render for ListHeaderStory { .child( ListHeader::new("Section 4") .end_slot(IconButton::new("action_1", IconName::Bolt)) - .end_slot(IconButton::new("action_2", IconName::ExclamationTriangle)) + .end_slot(IconButton::new("action_2", IconName::Warning)) .end_slot(IconButton::new("action_3", IconName::Plus)), ) } diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 2f8b14ddb7..ffab276dd1 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -311,15 +311,11 @@ impl Render for LanguageServerPrompt { .mt(px(-2.0)) .map(|icon| { if severity == DiagnosticSeverity::ERROR { - icon.path( - IconName::ExclamationTriangle.path(), - ) - .text_color(Color::Error.color(cx)) + icon.path(IconName::Warning.path()) + .text_color(Color::Error.color(cx)) } else { - icon.path( - IconName::ExclamationTriangle.path(), - ) - .text_color(Color::Warning.color(cx)) + icon.path(IconName::Warning.path()) + .text_color(Color::Warning.color(cx)) } }) }), @@ -421,7 +417,7 @@ impl Render for ErrorMessagePrompt { .mr_2() .mt(px(-2.0)) .map(|icon| { - icon.path(IconName::ExclamationTriangle.path()) + icon.path(IconName::Warning.path()) .text_color(Color::Error.color(cx)) }), ) From 4d8c3855c229d5aff02d39dd3eac10cf2007585c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 14 Sep 2024 15:44:54 -0400 Subject: [PATCH 083/270] theme: Clamp font sizes between 6px and 100px (#17829) This PR clamps the `ui_font_size` and `buffer_font_size` settings between 6px and 100px. Release Notes: - Changed `ui_font_size` and `buffer_font_size` to require values to be between 6px and 100px (inclusive). --- crates/theme/src/settings.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 5b88d5fcb5..4d0b4f0215 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -609,10 +609,14 @@ impl settings::Settings for ThemeSettings { this.apply_theme_overrides(); merge(&mut this.ui_font_size, value.ui_font_size.map(Into::into)); + this.ui_font_size = this.ui_font_size.clamp(px(6.), px(100.)); + merge( &mut this.buffer_font_size, value.buffer_font_size.map(Into::into), ); + this.buffer_font_size = this.buffer_font_size.clamp(px(6.), px(100.)); + merge(&mut this.buffer_line_height, value.buffer_line_height); // Clamp the `unnecessary_code_fade` to ensure text can't disappear entirely. From 00c0a7254a85c1728f8fdca06bce839360139db5 Mon Sep 17 00:00:00 2001 From: Zhang <17492978+zhang0098@users.noreply.github.com> Date: Sun, 15 Sep 2024 03:49:53 +0800 Subject: [PATCH 084/270] gpui: Allow TextInput example to lose and gain focus (#17823) Improved the input.rs example file in gpui crate. The new code * allow this text field to lose and gain input focus. * change TextInput's height from full to fix. Release Notes: - N/A --- crates/gpui/examples/input.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index cdce4c61c7..7e7de269b1 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -467,9 +467,12 @@ impl Element for TextElement { let line = prepaint.line.take().unwrap(); line.paint(bounds.origin, cx.line_height(), cx).unwrap(); - if let Some(cursor) = prepaint.cursor.take() { - cx.paint_quad(cursor); + if focus_handle.is_focused(cx) { + if let Some(cursor) = prepaint.cursor.take() { + cx.paint_quad(cursor); + } } + self.input.update(cx, |input, _cx| { input.last_layout = Some(line); input.last_bounds = Some(bounds); @@ -499,7 +502,6 @@ impl Render for TextInput { .on_mouse_up_out(MouseButton::Left, cx.listener(Self::on_mouse_up)) .on_mouse_move(cx.listener(Self::on_mouse_move)) .bg(rgb(0xeeeeee)) - .size_full() .line_height(px(30.)) .text_size(px(24.)) .child( @@ -524,6 +526,13 @@ impl FocusableView for TextInput { struct InputExample { text_input: View, recent_keystrokes: Vec, + focus_handle: FocusHandle, +} + +impl FocusableView for InputExample { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } } impl InputExample { @@ -540,6 +549,7 @@ impl Render for InputExample { let num_keystrokes = self.recent_keystrokes.len(); div() .bg(rgb(0xaaaaaa)) + .track_focus(&self.focus_handle) .flex() .flex_col() .size_full() @@ -615,9 +625,10 @@ fn main() { last_bounds: None, is_selecting: false, }); - cx.new_view(|_| InputExample { + cx.new_view(|cx| InputExample { text_input, recent_keystrokes: vec![], + focus_handle: cx.focus_handle(), }) }, ) From 40a00fb2249fd9d359eb26a61e2af73950cd8f70 Mon Sep 17 00:00:00 2001 From: krizej <60076189+krizej@users.noreply.github.com> Date: Sat, 14 Sep 2024 22:06:03 +0200 Subject: [PATCH 085/270] Add missing operators and keywords to the C syntax highlighting (#17541) Based on https://en.cppreference.com/w/c/language/expressions#Operators Release Notes: - Added missing operators and keywords to the C syntax highlighting --- crates/languages/src/c/highlights.scm | 48 +++++++++++++++++++-------- 1 file changed, 35 insertions(+), 13 deletions(-) diff --git a/crates/languages/src/c/highlights.scm b/crates/languages/src/c/highlights.scm index 0a8c12f06f..634f8d81c4 100644 --- a/crates/languages/src/c/highlights.scm +++ b/crates/languages/src/c/highlights.scm @@ -9,6 +9,7 @@ "enum" "extern" "for" + "goto" "if" "inline" "return" @@ -35,27 +36,48 @@ ] @keyword [ - "--" - "-" - "-=" - "->" "=" - "!=" - "*" - "&" - "&&" - "+" - "++" "+=" - "<" - "==" - ">" + "-=" + "*=" + "/=" + "%=" + "&=" + "|=" + "^=" + "<<=" + ">>=" + "++" + "--" + "+" + "-" + "*" + "/" + "%" + "~" + "&" + "|" + "^" + "<<" + ">>" + "!" + "&&" "||" + "==" + "!=" + "<" + ">" + "<=" + ">=" + "->" + "?" + ":" ] @operator [ "." ";" + "," ] @punctuation.delimiter [ From d5268c519721241be876605dc37294edf7eed4a1 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Sun, 15 Sep 2024 04:17:38 +0800 Subject: [PATCH 086/270] docs: Add proxy settings (#17797) I'm not sure if I placed `Network Proxy` in the correct position. What I noticed is that the first half of the documentation seems to be organized alphabetically, but the second half is not. I tried to position `Network Proxy` in a spot that seemed reasonable while maintaining alphabetical order. If there's a better suggestion, I'd be happy to make adjustments. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- docs/src/configuring-zed.md | 39 +++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 82f5a24484..1c4aee533d 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1061,6 +1061,45 @@ The following settings can be overridden for each specific language: These values take in the same options as the root-level settings with the same name. +## Network Proxy + +- Description: Configure a network proxy for Zed. +- Setting: `proxy` +- Default: `null` + +**Options** + +The proxy setting must contain a URL to the proxy. + +The following URI schemes are supported: + +- `http` +- `https` +- `socks4` +- `socks4a` +- `socks5` +- `socks5h` + +`http` will be used when no scheme is specified. + +By default no proxy will be used, or Zed will attempt to retrieve proxy settings from environment variables, such as `http_proxy`, `HTTP_PROXY`, `https_proxy`, `HTTPS_PROXY`, `all_proxy`, `ALL_PROXY`. + +For example, to set an `http` proxy, add the following to your settings: + +```json +{ + "proxy": "http://127.0.0.1:10809" +} +``` + +Or to set a `socks5` proxy: + +```json +{ + "proxy": "socks5://localhost:10808" +} +``` + ## Preview tabs - Description: From d56fa258304dacc3a18fb4da94f130047ffdb691 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 14 Sep 2024 17:00:37 -0400 Subject: [PATCH 087/270] context_servers: Hide actions when no context servers are configured (#17833) This PR filters out the context servers actions from the command palette when no context servers are configured. Release Notes: - N/A --- Cargo.lock | 1 + crates/context_servers/Cargo.toml | 1 + crates/context_servers/src/context_servers.rs | 3 ++ crates/context_servers/src/manager.rs | 50 +++++++++++++------ 4 files changed, 39 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 79f4e803a8..4cb7a85e35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2780,6 +2780,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", + "command_palette_hooks", "futures 0.3.30", "gpui", "log", diff --git a/crates/context_servers/Cargo.toml b/crates/context_servers/Cargo.toml index 21bf6a1fc8..9c0336f121 100644 --- a/crates/context_servers/Cargo.toml +++ b/crates/context_servers/Cargo.toml @@ -14,6 +14,7 @@ path = "src/context_servers.rs" [dependencies] anyhow.workspace = true collections.workspace = true +command_palette_hooks.workspace = true futures.workspace = true gpui.workspace = true log.workspace = true diff --git a/crates/context_servers/src/context_servers.rs b/crates/context_servers/src/context_servers.rs index 3333f95f9f..55634bb77c 100644 --- a/crates/context_servers/src/context_servers.rs +++ b/crates/context_servers/src/context_servers.rs @@ -12,6 +12,9 @@ pub use registry::*; actions!(context_servers, [Restart]); +/// The namespace for the context servers actions. +const CONTEXT_SERVERS_NAMESPACE: &'static str = "context_servers"; + pub fn init(cx: &mut AppContext) { log::info!("initializing context server client"); manager::init(cx); diff --git a/crates/context_servers/src/manager.rs b/crates/context_servers/src/manager.rs index 1596a54eb9..08e403a434 100644 --- a/crates/context_servers/src/manager.rs +++ b/crates/context_servers/src/manager.rs @@ -15,6 +15,7 @@ //! and react to changes in settings. use collections::{HashMap, HashSet}; +use command_palette_hooks::CommandPaletteFilter; use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Task}; use log; use parking_lot::RwLock; @@ -24,6 +25,7 @@ use settings::{Settings, SettingsSources, SettingsStore}; use std::path::Path; use std::sync::Arc; +use crate::CONTEXT_SERVERS_NAMESPACE; use crate::{ client::{self, Client}, types, @@ -148,26 +150,28 @@ impl ContextServerManager { cx: &mut ModelContext, ) -> Task> { let server_id = config.id.clone(); - let server_id2 = config.id.clone(); if self.servers.contains_key(&server_id) || self.pending_servers.contains(&server_id) { return Task::ready(Ok(())); } - let task = cx.spawn(|this, mut cx| async move { - let server = Arc::new(ContextServer::new(config)); - server.start(&cx).await?; - this.update(&mut cx, |this, cx| { - this.servers.insert(server_id.clone(), server); - this.pending_servers.remove(&server_id); - cx.emit(Event::ServerStarted { - server_id: server_id.clone(), - }); - })?; - Ok(()) - }); + let task = { + let server_id = server_id.clone(); + cx.spawn(|this, mut cx| async move { + let server = Arc::new(ContextServer::new(config)); + server.start(&cx).await?; + this.update(&mut cx, |this, cx| { + this.servers.insert(server_id.clone(), server); + this.pending_servers.remove(&server_id); + cx.emit(Event::ServerStarted { + server_id: server_id.clone(), + }); + })?; + Ok(()) + }) + }; - self.pending_servers.insert(server_id2); + self.pending_servers.insert(server_id); task } @@ -243,15 +247,20 @@ impl GlobalContextServerManager { pub fn init(cx: &mut AppContext) { ContextServerSettings::register(cx); GlobalContextServerManager::register(cx); + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE); + }); + cx.observe_global::(|cx| { let manager = ContextServerManager::global(cx); cx.update_model(&manager, |manager, cx| { let settings = ContextServerSettings::get_global(cx); - let current_servers: HashMap = manager + let current_servers = manager .servers() .into_iter() .map(|server| (server.id.clone(), server.config.clone())) - .collect(); + .collect::>(); let new_servers = settings .servers @@ -279,6 +288,15 @@ pub fn init(cx: &mut AppContext) { for id in servers_to_remove { manager.remove_server(&id, cx).detach_and_log_err(cx); } + + let has_any_context_servers = !manager.servers().is_empty(); + CommandPaletteFilter::update_global(cx, |filter, _cx| { + if has_any_context_servers { + filter.show_namespace(CONTEXT_SERVERS_NAMESPACE); + } else { + filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE); + } + }); }) }) .detach(); From 6f337de440687bdc233c4e6f845f88d44a8f9740 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 14 Sep 2024 17:26:56 -0400 Subject: [PATCH 088/270] ui: Clean up doc comments for `Vector` (#17834) This PR cleans up the doc comments for the `Vector` component. Release Notes: - N/A --- crates/ui/src/components/image.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/ui/src/components/image.rs b/crates/ui/src/components/image.rs index 286fe7f56f..e7eefe5fea 100644 --- a/crates/ui/src/components/image.rs +++ b/crates/ui/src/components/image.rs @@ -27,9 +27,9 @@ pub enum VectorName { /// A vector image, such as an SVG. /// -/// A [Vector] is different from an [Icon] in that it is intended +/// A [`Vector`] is different from an [`Icon`] in that it is intended /// to be displayed at a specific size, or series of sizes, rather -/// than conforming to the standard size of an icons. +/// than conforming to the standard size of an icon. #[derive(IntoElement)] pub struct Vector { path: &'static str, @@ -38,7 +38,7 @@ pub struct Vector { } impl Vector { - /// Create a new [Vector] image with the given [VectorName] and size. + /// Creates a new [`Vector`] image with the given [`VectorName`] and size. pub fn new(vector: VectorName, width: Rems, height: Rems) -> Self { Self { path: vector.path(), @@ -47,18 +47,18 @@ impl Vector { } } - /// Create a new [Vector] image where the width and height are the same. + /// Creates a new [`Vector`] image where the width and height are the same. pub fn square(vector: VectorName, size: Rems) -> Self { Self::new(vector, size, size) } - /// Set the image color + /// Sets the vector color. pub fn color(mut self, color: Color) -> Self { self.color = color; self } - /// Set the image size + /// Sets the vector size. pub fn size(mut self, size: impl Into>) -> Self { let size = size.into(); From 4f251429c7e9596641d6716479fbd7a931245197 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 15 Sep 2024 19:45:06 -0400 Subject: [PATCH 089/270] Add perplexity extension readme (#17861) Release Notes: - N/A --- extensions/perplexity/README.md | 41 +++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 extensions/perplexity/README.md diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md new file mode 100644 index 0000000000..094a876885 --- /dev/null +++ b/extensions/perplexity/README.md @@ -0,0 +1,41 @@ +# Zed Perplexity Extension + +This example extension adds the `/perplexity` [slash command](https://zed.dev/docs/assistant/commands) to the Zed AI assistant. + +## Usage + +Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: + +``` +/perplexity What's the weather in Boulder, CO tomorrow evening? +``` + +## Development Setup + +1. Install the rust toolchain and clone the zed repo: +``` +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +mkdir -p ~/code +cd ~/code +git clone https://github.com/zed-industries/zed +``` + +2. Launch Zed and Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +3. Open Zed +4. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +5. Click "Install Dev Extension" +6. Navigate to the "extensions/perplexity" folder inside the zed git repo. +7. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) +```sh +env |grep PERPLEXITY_API_KEY +``` +8. Quit and relaunch Zed + +## PERPLEXITY_API_KEY + +This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. + +To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. + +Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. From 524a1a6fec96d1824a2999c068df6ccc84956553 Mon Sep 17 00:00:00 2001 From: Chris Veness Date: Mon, 16 Sep 2024 00:45:48 +0100 Subject: [PATCH 090/270] Note in initial_user_settings.json how to access the command palette (#17854) Newcomers might not know / remember how to access the command palette. Release Notes: - N/A --- assets/settings/initial_user_settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/settings/initial_user_settings.json b/assets/settings/initial_user_settings.json index d8ac1a0021..71f3beb1d6 100644 --- a/assets/settings/initial_user_settings.json +++ b/assets/settings/initial_user_settings.json @@ -5,7 +5,7 @@ // // To see all of Zed's default settings without changing your // custom settings, run `zed: open default settings` from the -// command palette +// command palette (cmd-shift-p / ctrl-shift-p) { "ui_font_size": 16, "buffer_font_size": 16, From cdc3791544bbba0ee36c7b85b51f3df502511249 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Mon, 16 Sep 2024 00:24:18 -0400 Subject: [PATCH 091/270] Fix incorrect icons (#17856) --- assets/icons/cursor_i_beam.svg | 2 +- assets/icons/cursor_text.svg | 1 - assets/icons/text_snippet.svg | 1 + crates/assistant/src/assistant_panel.rs | 2 +- crates/ui/src/components/icon.rs | 2 +- 5 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 assets/icons/cursor_text.svg create mode 100644 assets/icons/text_snippet.svg diff --git a/assets/icons/cursor_i_beam.svg b/assets/icons/cursor_i_beam.svg index 255635de6a..2e7b95b203 100644 --- a/assets/icons/cursor_i_beam.svg +++ b/assets/icons/cursor_i_beam.svg @@ -1 +1 @@ - + diff --git a/assets/icons/cursor_text.svg b/assets/icons/cursor_text.svg deleted file mode 100644 index 2e7b95b203..0000000000 --- a/assets/icons/cursor_text.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/text_snippet.svg b/assets/icons/text_snippet.svg new file mode 100644 index 0000000000..255635de6a --- /dev/null +++ b/assets/icons/text_snippet.svg @@ -0,0 +1 @@ + diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index af3abec67c..8d158193a6 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -5235,7 +5235,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakView) -> ButtonLike::new(fold_id) .style(ButtonStyle::Filled) .layer(ElevationIndex::ElevatedSurface) - .child(Icon::new(IconName::CursorIBeam)) + .child(Icon::new(IconName::TextSnippet)) .child(Label::new(title.clone()).single_line()) .on_click(move |_, cx| { editor diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index c2743ecbd9..a2f70244db 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -165,7 +165,7 @@ pub enum IconName { Copy, CountdownTimer, CursorIBeam, - CursorText, + TextSnippet, Dash, DatabaseZap, Delete, From 29a5def12ce233f5e98454f60af5ee4af20f25e5 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 16 Sep 2024 07:12:07 -0400 Subject: [PATCH 092/270] Refine assistant config UI (#17871) This PR does a little bit of a touch-up on the copywriting on the assistant config UI. I had friends reporting to me that some of the writing could be clearer, and hopefully, this goes into that direction! Release Notes: - N/A --- assets/icons/sliders_alt.svg | 6 ++++++ assets/icons/sliders_vertical.svg | 11 +++++++++++ crates/assistant/src/assistant_panel.rs | 4 +++- .../language_model/src/provider/anthropic.rs | 10 ++++------ .../src/provider/copilot_chat.rs | 2 +- crates/language_model/src/provider/google.rs | 10 ++++------ crates/language_model/src/provider/open_ai.rs | 18 +++++++++++------- crates/ui/src/components/icon.rs | 2 ++ 8 files changed, 42 insertions(+), 21 deletions(-) create mode 100644 assets/icons/sliders_alt.svg create mode 100644 assets/icons/sliders_vertical.svg diff --git a/assets/icons/sliders_alt.svg b/assets/icons/sliders_alt.svg new file mode 100644 index 0000000000..36c3feccfe --- /dev/null +++ b/assets/icons/sliders_alt.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/sliders_vertical.svg b/assets/icons/sliders_vertical.svg new file mode 100644 index 0000000000..ab61037a51 --- /dev/null +++ b/assets/icons/sliders_vertical.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 8d158193a6..59f5e81d05 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4117,9 +4117,11 @@ impl ContextEditor { .child(Label::new(label)), ) .child( - Button::new("open-configuration", "Open configuration") + Button::new("open-configuration", "Configure Providers") .size(ButtonSize::Compact) + .icon(Some(IconName::SlidersVertical)) .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) .style(ButtonStyle::Filled) .on_click({ let focus_handle = self.focus_handle(cx).clone(); diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 9f7135aef7..1e3d275094 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -657,11 +657,10 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { const ANTHROPIC_CONSOLE_URL: &str = "https://console.anthropic.com/settings/keys"; - const INSTRUCTIONS: [&str; 4] = [ - "To use the assistant panel or inline assistant, you need to add your Anthropic API key.", - "You can create an API key at:", - "", - "Paste your Anthropic API key below and hit enter to use the assistant:", + const INSTRUCTIONS: [&str; 3] = [ + "To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:", + "- Create one at:", + "- Paste your API key below and hit enter to use the assistant:", ]; let env_var_set = self.state.read(cx).api_key_from_env; @@ -682,7 +681,6 @@ impl Render for ConfigurationView { ) ) .child(Label::new(INSTRUCTIONS[2])) - .child(Label::new(INSTRUCTIONS[3])) .child( h_flex() .w_full() diff --git a/crates/language_model/src/provider/copilot_chat.rs b/crates/language_model/src/provider/copilot_chat.rs index e21060e54d..58b486921a 100644 --- a/crates/language_model/src/provider/copilot_chat.rs +++ b/crates/language_model/src/provider/copilot_chat.rs @@ -358,7 +358,7 @@ impl Render for ConfigurationView { } _ => { const LABEL: &str = - "To use the assistant panel or inline assistant, you must login to GitHub Copilot. Your GitHub account must have an active Copilot Chat subscription."; + "To use Zed's assistant with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription."; v_flex().gap_6().child(Label::new(LABEL)).child( v_flex() .gap_2() diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs index 005f35ff8b..daa07cdc39 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_model/src/provider/google.rs @@ -446,11 +446,10 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { const GOOGLE_CONSOLE_URL: &str = "https://aistudio.google.com/app/apikey"; - const INSTRUCTIONS: [&str; 4] = [ - "To use the Google AI assistant, you need to add your Google AI API key.", - "You can create an API key at:", - "", - "Paste your Google AI API key below and hit enter to use the assistant:", + const INSTRUCTIONS: [&str; 3] = [ + "To use Zed's assistant with Google AI, you need to add an API key. Follow these steps:", + "- Create one by visiting:", + "- Paste your API key below and hit enter to use the assistant", ]; let env_var_set = self.state.read(cx).api_key_from_env; @@ -472,7 +471,6 @@ impl Render for ConfigurationView { ) ) .child(Label::new(INSTRUCTIONS[2])) - .child(Label::new(INSTRUCTIONS[3])) .child( h_flex() .w_full() diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 222c153041..a4efb3baf0 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -497,13 +497,11 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys"; - const INSTRUCTIONS: [&str; 6] = [ - "To use the assistant panel or inline assistant, you need to add your OpenAI API key.", - " - You can create an API key at: ", - " - Make sure your OpenAI account has credits", - " - Having a subscription for another service like GitHub Copilot won't work.", - "", - "Paste your OpenAI API key below and hit enter to use the assistant:", + const INSTRUCTIONS: [&str; 4] = [ + "To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:", + " - Create one by visiting:", + " - Ensure your OpenAI account has credits", + " - Paste your API key below and hit enter to start using the assistant", ]; let env_var_set = self.state.read(cx).api_key_from_env; @@ -543,6 +541,12 @@ impl Render for ConfigurationView { ) .size(LabelSize::Small), ) + .child( + Label::new( + "Note that having a subscription for another service like GitHub Copilot won't work.".to_string(), + ) + .size(LabelSize::Small), + ) .into_any() } else { h_flex() diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index a2f70244db..a71c3e9872 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -255,6 +255,7 @@ pub enum IconName { Slash, SlashSquare, Sliders, + SlidersVertical, Snip, Space, Sparkle, @@ -284,6 +285,7 @@ pub enum IconName { XCircle, ZedAssistant, ZedAssistantFilled, + ZedXCopilot, } #[derive(IntoElement)] From 96a5daaf3f5a4424840705ba282c11ed5c4e8e77 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 16 Sep 2024 07:12:24 -0400 Subject: [PATCH 093/270] Refine symbolic link project tooltip (#17869) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR uses the tooltip with meta to display the info that a project panel item is actually a symbolic link. | Before | After | |--------|--------| | Screenshot 2024-09-16 at 11 20 15 AM | Screenshot 2024-09-16 at 11 19 12 AM | Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c8e1ce28eb..cd4196dbc6 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2298,7 +2298,7 @@ impl ProjectPanel { .id("symlink_icon") .pr_3() .tooltip(move |cx| { - Tooltip::text(format!("{path} • Symbolic Link"), cx) + Tooltip::with_meta(path.to_string(), None, "Symbolic Link", cx) }) .child( Icon::new(IconName::ArrowUpRight) @@ -2767,7 +2767,6 @@ impl Render for ProjectPanel { .track_focus(&self.focus_handle) .child( Button::new("open_project", "Open a project") - .style(ButtonStyle::Filled) .full_width() .key_binding(KeyBinding::for_action(&workspace::Open, cx)) .on_click(cx.listener(|this, _, cx| { From 4e1bb68620f746a4683af4c335acdd207775a017 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 16 Sep 2024 08:28:27 -0400 Subject: [PATCH 094/270] Use buffer font for the inline assistant (#17875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/17738 This PR is motivated by having also noticed what the issue above mentions. Looked it further and it does seem the inline assistant had a slightly bigger font-size even though it was using the configured UI font-size as per https://github.com/zed-industries/zed/pull/17542. I'm not sure why that was, technically speaking. However, I ended up realizing that, given we're within the editor, it'd make more sense to use the buffer font instead. With this change, the size mismatch seems to be gone. Screenshot 2024-09-16 at 2 13 28 PM Release Notes: - N/A --- crates/assistant/src/inline_assistant.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 8b71e54746..30a5c98a54 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1465,7 +1465,7 @@ impl Render for PromptEditor { .border_y_1() .border_color(cx.theme().status().info_border) .size_full() - .py(cx.line_height() / 2.) + .py(cx.line_height() / 2.5) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) @@ -1918,12 +1918,11 @@ impl PromptEditor { } else { cx.theme().colors().text }, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: settings.ui_font_size.into(), - font_weight: settings.ui_font.weight, - line_height: relative(1.3), + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size.into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), ..Default::default() }; EditorElement::new( From 02dfe08ce8c025a6475905d3f233a37b61ffd3cf Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Mon, 16 Sep 2024 08:29:46 -0400 Subject: [PATCH 095/270] Welcome tweaks (#17874) This PR adds "Open Settings" and "Open Docs" to the welcome page, as well as some minor design polish. The welcome page needs a full redesign at some point so I didn't too to much here in terms of structure/content. Before | After: ![CleanShot 2024-09-16 at 08 12 23@2x](https://github.com/user-attachments/assets/722175ec-d129-4060-827f-f02f572115da) --- Release Notes: - Improved welcome page design and added additional links. --- Cargo.lock | 1 + crates/welcome/Cargo.toml | 3 ++- crates/welcome/src/welcome.rs | 43 ++++++++++++++++++++++++----------- 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4cb7a85e35..b752fc8292 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13273,6 +13273,7 @@ dependencies = [ "util", "vim", "workspace", + "zed_actions", ] [[package]] diff --git a/crates/welcome/Cargo.toml b/crates/welcome/Cargo.toml index e747072cde..0db1af9252 100644 --- a/crates/welcome/Cargo.toml +++ b/crates/welcome/Cargo.toml @@ -17,11 +17,11 @@ test-support = [] [dependencies] anyhow.workspace = true client.workspace = true -inline_completion_button.workspace = true db.workspace = true extensions_ui.workspace = true fuzzy.workspace = true gpui.workspace = true +inline_completion_button.workspace = true install_cli.workspace = true picker.workspace = true project.workspace = true @@ -33,6 +33,7 @@ ui.workspace = true util.workspace = true vim.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index fc837c6867..1be2567c0a 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -25,6 +25,7 @@ pub use multibuffer_hint::*; actions!(welcome, [ResetHints]); pub const FIRST_OPEN: &str = "first_open"; +pub const DOCS_URL: &str = "https://zed.dev/docs/"; pub fn init(cx: &mut AppContext) { BaseKeymap::register(cx); @@ -74,27 +75,22 @@ impl Render for WelcomePage { .track_focus(&self.focus_handle) .child( v_flex() - .w_96() - .gap_4() + .w_80() + .gap_6() .mx_auto() .child( svg() .path("icons/logo_96.svg") - .text_color(gpui::white()) - .w(px(96.)) - .h(px(96.)) + .text_color(cx.theme().colors().icon_disabled) + .w(px(80.)) + .h(px(80.)) .mx_auto(), ) - .child( - h_flex() - .justify_center() - .child(Label::new("Code at the speed of thought")), - ) .child( v_flex() .gap_2() .child( - Button::new("choose-theme", "Choose a theme") + Button::new("choose-theme", "Choose Theme") .full_width() .on_click(cx.listener(|this, _, cx| { this.telemetry.report_app_event( @@ -112,7 +108,7 @@ impl Render for WelcomePage { })), ) .child( - Button::new("choose-keymap", "Choose a keymap") + Button::new("choose-keymap", "Choose Keymap") .full_width() .on_click(cx.listener(|this, _, cx| { this.telemetry.report_app_event( @@ -129,6 +125,27 @@ impl Render for WelcomePage { .ok(); })), ) + .child( + Button::new("edit settings", "Edit Settings") + .full_width() + .on_click(cx.listener(|this, _, cx| { + this.telemetry.report_app_event( + "welcome page: edit settings".to_string(), + ); + cx.dispatch_action(Box::new(zed_actions::OpenSettings)); + })), + ) + .child(Button::new("view docs", "View Docs").full_width().on_click( + cx.listener(|this, _, cx| { + this.telemetry + .report_app_event("welcome page: view docs".to_string()); + cx.open_url(DOCS_URL); + }), + )), + ) + .child( + v_flex() + .gap_2() .when(cfg!(target_os = "macos"), |el| { el.child( Button::new("install-cli", "Install the CLI") @@ -304,7 +321,7 @@ impl Item for WelcomePage { type Event = ItemEvent; fn tab_content_text(&self, _cx: &WindowContext) -> Option { - Some("Welcome to Zed!".into()) + Some("Welcome".into()) } fn telemetry_event_text(&self) -> Option<&'static str> { From f986513d0dc0f5ddc88089229970afcbb3f4453b Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 16 Sep 2024 09:00:20 -0400 Subject: [PATCH 096/270] gpui: Remove debug print statement (#17878) Release Notes: - N/A --- crates/gpui/src/app.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ac7d5eb47b..564b893489 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1504,9 +1504,3 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } - -impl Drop for AppContext { - fn drop(&mut self) { - println!("Dropping the App Context"); - } -} From e3d54b22118f54d13488707bc3cb9f0e9143b1c8 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 16 Sep 2024 09:01:20 -0400 Subject: [PATCH 097/270] vim: Fix ctrl-b not moving the cursor (#17808) Closes #17687 Release Notes: - Fixed `ctrl-b` not moving the cursor. --------- Co-authored-by: Abdelhakim Qbaich Co-authored-by: Pete LeVasseur --- crates/editor/src/scroll/scroll_amount.rs | 29 ++++++- crates/vim/src/normal/scroll.rs | 92 +++++++++++++++++++++-- crates/vim/test_data/test_ctrl_f_b.json | 24 ++++++ 3 files changed, 136 insertions(+), 9 deletions(-) create mode 100644 crates/vim/test_data/test_ctrl_f_b.json diff --git a/crates/editor/src/scroll/scroll_amount.rs b/crates/editor/src/scroll/scroll_amount.rs index d115be68a0..ee80b3d86f 100644 --- a/crates/editor/src/scroll/scroll_amount.rs +++ b/crates/editor/src/scroll/scroll_amount.rs @@ -1,6 +1,18 @@ use serde::Deserialize; use ui::{px, Pixels}; +#[derive(Debug)] +pub enum ScrollDirection { + Upwards, + Downwards, +} + +impl ScrollDirection { + pub fn is_upwards(&self) -> bool { + matches!(self, ScrollDirection::Upwards) + } +} + #[derive(Debug, Clone, PartialEq, Deserialize)] pub enum ScrollAmount { // Scroll N lines (positive is towards the end of the document) @@ -15,7 +27,7 @@ impl ScrollAmount { Self::Line(count) => *count, Self::Page(count) => { // for full pages subtract one to leave an anchor line - if count.abs() == 1.0 { + if self.is_full_page() { visible_line_count -= 1.0 } (visible_line_count * count).trunc() @@ -29,4 +41,19 @@ impl ScrollAmount { ScrollAmount::Page(x) => px(height.0 * x), } } + + pub fn is_full_page(&self) -> bool { + match self { + ScrollAmount::Page(count) if count.abs() == 1.0 => true, + _ => false, + } + } + + pub fn direction(&self) -> ScrollDirection { + match self { + Self::Line(amount) if amount.is_sign_positive() => ScrollDirection::Downwards, + Self::Page(amount) if amount.is_sign_positive() => ScrollDirection::Downwards, + _ => ScrollDirection::Upwards, + } + } } diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index f89faa3748..8d1443e633 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -73,14 +73,24 @@ fn scroll_editor( return; } - editor.scroll_screen(amount, cx); + let full_page_up = amount.is_full_page() && amount.direction().is_upwards(); + let amount = match (amount.is_full_page(), editor.visible_line_count()) { + (true, Some(visible_line_count)) => { + if amount.direction().is_upwards() { + ScrollAmount::Line(amount.lines(visible_line_count) + 1.0) + } else { + ScrollAmount::Line(amount.lines(visible_line_count) - 1.0) + } + } + _ => amount.clone(), + }; + + editor.scroll_screen(&amount, cx); if !should_move_cursor { return; } - let visible_line_count = if let Some(visible_line_count) = editor.visible_line_count() { - visible_line_count - } else { + let Some(visible_line_count) = editor.visible_line_count() else { return; }; @@ -115,11 +125,18 @@ fn scroll_editor( } else { DisplayRow(top.row().0 + vertical_scroll_margin) }; - let max_row = DisplayRow(map.max_point().row().0.max(top.row().0.saturating_add( - (visible_line_count as u32).saturating_sub(1 + vertical_scroll_margin), - ))); - let new_row = if head.row() < min_row { + let max_visible_row = top.row().0.saturating_add( + (visible_line_count as u32).saturating_sub(1 + vertical_scroll_margin), + ); + let max_row = DisplayRow(map.max_point().row().0.max(max_visible_row)); + + let new_row = if full_page_up { + // Special-casing ctrl-b/page-up, which is special-cased by Vim, it seems + // to always put the cursor on the last line of the page, even if the cursor + // was before that. + DisplayRow(max_visible_row) + } else if head.row() < min_row { min_row } else if head.row() > max_row { max_row @@ -251,6 +268,7 @@ mod test { ) }); } + #[gpui::test] async fn test_ctrl_d_u(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; @@ -282,6 +300,64 @@ mod test { cx.shared_state().await.assert_matches(); } + #[gpui::test] + async fn test_ctrl_f_b(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + let visible_lines = 10; + cx.set_scroll_height(visible_lines).await; + + // First test without vertical scroll margin + cx.neovim.set_option(&format!("scrolloff={}", 0)).await; + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings::(cx, |s| { + s.vertical_scroll_margin = Some(0.0) + }); + }); + + let content = "ˇ".to_owned() + &sample_text(26, 2, 'a'); + cx.set_shared_state(&content).await; + + // scroll down: ctrl-f + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + // scroll up: ctrl-b + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + + // Now go back to start of file, and test with vertical scroll margin + cx.simulate_shared_keystrokes("g g").await; + cx.shared_state().await.assert_matches(); + + cx.neovim.set_option(&format!("scrolloff={}", 3)).await; + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings::(cx, |s| { + s.vertical_scroll_margin = Some(3.0) + }); + }); + + // scroll down: ctrl-f + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + // scroll up: ctrl-b + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + } + #[gpui::test] async fn test_scroll_beyond_last_line(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_ctrl_f_b.json b/crates/vim/test_data/test_ctrl_f_b.json new file mode 100644 index 0000000000..19c94d8b6e --- /dev/null +++ b/crates/vim/test_data/test_ctrl_f_b.json @@ -0,0 +1,24 @@ +{"SetOption":{"value":"scrolloff=3"}} +{"SetOption":{"value":"lines=12"}} +{"SetOption":{"value":"scrolloff=0"}} +{"Put":{"state":"ˇaa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nˇii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nˇqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nˇrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\nˇjj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"g"} +{"Key":"g"} +{"Get":{"state":"ˇaa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"SetOption":{"value":"scrolloff=3"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nˇll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\nˇtt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\nˇoo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\nˇgg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} From 2baa704af7e6150e99da63d19a9fa42223e8acce Mon Sep 17 00:00:00 2001 From: ihavecoke Date: Mon, 16 Sep 2024 22:29:42 +0800 Subject: [PATCH 098/270] Clamp `tab_size` setting between 1 and 16 (#17882) Release Notes: - Changed `tab_size` setting to require values be between 1 and 16 (inclusive). ### jetbrains settings #### Max value ![image](https://github.com/user-attachments/assets/54b772fd-e670-4d77-b3e9-757b08659f55) When the maximum value is exceeded, the configuration box turns red to remind the user ![image](https://github.com/user-attachments/assets/fcdb6313-be97-4528-b654-5900bcaeafec) If the user still saves, jetbrains does not process it and resets it to the system default value of 4 image Without restrictions, I feel not good. Here is a random setting of a relatively large value https://github.com/user-attachments/assets/c3bdf262-ba08-4bc2-996a-5ad2a37c567f --------- Co-authored-by: Marshall Bowers --- crates/language/src/language_settings.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index e1fcaaba28..b465173cee 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1027,6 +1027,10 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent } merge(&mut settings.tab_size, src.tab_size); + settings.tab_size = settings + .tab_size + .clamp(NonZeroU32::new(1).unwrap(), NonZeroU32::new(16).unwrap()); + merge(&mut settings.hard_tabs, src.hard_tabs); merge(&mut settings.soft_wrap, src.soft_wrap); merge(&mut settings.use_autoclose, src.use_autoclose); From 2cae6f3e088b1031a0a3ffe304613b1aaed454d1 Mon Sep 17 00:00:00 2001 From: Yohanes Bandung Bondowoso Date: Mon, 16 Sep 2024 22:03:51 +0700 Subject: [PATCH 099/270] dart: Respect LSP binary settings (#17494) Enable configuring Dart's LSP from other means of installation types. Some users don't install the `dart` binary, but uses version manager. In the example, I uses [FVM](https://fvm.app/) (short for "Flutter Version Manager"). I have tested this with "Install Dev Extensions". Release Notes: - N/A cc other maintainer: @agent3bood @flo80 --------- Co-authored-by: Marshall Bowers --- docs/src/languages/dart.md | 17 ++++++++++++ extensions/dart/extension.toml | 2 +- extensions/dart/src/dart.rs | 51 ++++++++++++++++++++++++++++++---- 3 files changed, 63 insertions(+), 7 deletions(-) diff --git a/docs/src/languages/dart.md b/docs/src/languages/dart.md index 6f80857c88..6571166b6d 100644 --- a/docs/src/languages/dart.md +++ b/docs/src/languages/dart.md @@ -5,6 +5,23 @@ Dart support is available through the [Dart extension](https://github.com/zed-in - Tree Sitter: [UserNobody14/tree-sitter-dart](https://github.com/UserNobody14/tree-sitter-dart) - Language Server: [dart language-server](https://github.com/dart-lang/sdk) +## Configuration + +The `dart` binary can be configured in a Zed settings file with: + +```json +{ + "lsp": { + "dart": { + "binary": { + "path": "/opt/homebrew/bin/fvm", + "arguments": ["dart", "language-server", "--protocol=lsp"] + } + } + } +} +``` + If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 5 +++-- crates/audio/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 111360b965..b31beee09c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9278,12 +9278,13 @@ dependencies = [ [[package]] name = "rodio" -version = "0.17.3" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b1bb7b48ee48471f55da122c0044fcc7600cfcc85db88240b89cb832935e611" +checksum = "6006a627c1a38d37f3d3a85c6575418cfe34a5392d60a686d0071e1c8d427acb" dependencies = [ "cpal", "hound", + "thiserror", ] [[package]] diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index bfe22de1f0..9502b58f93 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -18,5 +18,5 @@ collections.workspace = true derive_more.workspace = true gpui.workspace = true parking_lot.workspace = true -rodio = { version = "0.17.1", default-features = false, features = ["wav"] } +rodio = { version = "0.19.0", default-features = false, features = ["wav"] } util.workspace = true From 7d97855ed7fd7da2626a90baf9a6e7cba4f11439 Mon Sep 17 00:00:00 2001 From: Galen Elias Date: Mon, 16 Sep 2024 21:23:03 -0700 Subject: [PATCH 127/270] Use AppContext for UI font adjustments (#17858) Appologies if this PR is off base, I'm still not super familiar with the Zed codebase. I was trying to integrate with https://github.com/zed-industries/zed/pull/12940 and found it awkward to hook up global bindings to adjust the UI font size due to the fact it takes a WindowContext. Looking at the API, it seemed odd that it took a WindowContext, yet the editor font methods take an AppContext. I couldn't find a compelling reason for this to be tied to a WindowContext, so I personally think it makes sense to switch it. This does have a behavior change, which hopefully is actually desirable: Currently, if you have two open and visible Zed windows, and trigger a UI font adjustment in one, the non-active windows won't update. However, once you switch focus to the second one it will snap to the new UI font size. This is inconsistent with adjusting the editor font size, which applies to all open windows immediately. Release Notes: - N/A --- crates/theme/src/settings.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 4d0b4f0215..7fa9a870de 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -491,13 +491,13 @@ pub fn setup_ui_font(cx: &mut WindowContext) -> gpui::Font { ui_font } -pub fn get_ui_font_size(cx: &WindowContext) -> Pixels { +pub fn get_ui_font_size(cx: &AppContext) -> Pixels { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; cx.try_global::() .map_or(ui_font_size, |adjusted_size| adjusted_size.0) } -pub fn adjust_ui_font_size(cx: &mut WindowContext, f: fn(&mut Pixels)) { +pub fn adjust_ui_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; let mut adjusted_size = cx .try_global::() @@ -513,7 +513,7 @@ pub fn has_adjusted_ui_font_size(cx: &mut AppContext) -> bool { cx.has_global::() } -pub fn reset_ui_font_size(cx: &mut WindowContext) { +pub fn reset_ui_font_size(cx: &mut AppContext) { if cx.has_global::() { cx.remove_global::(); cx.refresh(); From d56e3d99b4a0dddbacd4e762c9be83d2f39f48ba Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 10:17:03 +0200 Subject: [PATCH 128/270] rust: Fix looking up `rust-analyzer` in `$PATH` by default (#17926) This is a follow-up to https://github.com/zed-industries/zed/pull/17885, which is reverted and fixed in this PR. This PR actually enables the behavior by default. Release Notes: - Changed `rust-analyzer` support to lookup `rust-analyzer` binaries by default in `$PATH`. That changes the default value to something users requested. --- crates/languages/src/rust.rs | 80 ++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 35 deletions(-) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index d102276e44..456ea8e449 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -38,45 +38,55 @@ impl LspAdapter for RustLspAdapter { delegate: &dyn LspAdapterDelegate, cx: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) - }); + let configured_binary = cx + .update(|cx| { + language_server_settings(delegate, Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) + }) + .ok()?; - match configured_binary { - Ok(Some(BinarySettings { - path, + let (path, env, arguments) = match configured_binary { + // If nothing is configured, or path_lookup explicitly enabled, + // we lookup the binary in the path. + None + | Some(BinarySettings { + path: None, + path_lookup: Some(true), + .. + }) + | Some(BinarySettings { + path: None, + path_lookup: None, + .. + }) => { + let path = delegate.which(Self::SERVER_NAME.as_ref()).await; + let env = delegate.shell_env().await; + (path, Some(env), None) + } + // Otherwise, we use the configured binary. + Some(BinarySettings { + path: Some(path), arguments, path_lookup, - })) => { - let (path, env) = match (path, path_lookup) { - (Some(path), lookup) => { - if lookup.is_some() { - log::warn!( - "Both `path` and `path_lookup` are set, ignoring `path_lookup`" - ); - } - (Some(path.into()), None) - } - (None, Some(true)) | (None, None) => { - // Try to lookup rust-analyzer in PATH by default. - let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; - let env = delegate.shell_env().await; - (Some(path), Some(env)) - } - (None, Some(false)) => (None, None), - }; - path.map(|path| LanguageServerBinary { - path, - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env, - }) + }) => { + if path_lookup.is_some() { + log::warn!("Both `path` and `path_lookup` are set, ignoring `path_lookup`"); + } + (Some(path.into()), None, arguments) } - _ => None, - } + + _ => (None, None, None), + }; + + path.map(|path| LanguageServerBinary { + path, + env, + arguments: arguments + .unwrap_or_default() + .iter() + .map(|arg| arg.into()) + .collect(), + }) } async fn fetch_latest_server_version( From 5f0925fb5d26704e37ad943859af2db3ffa57892 Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Tue, 17 Sep 2024 17:17:29 +0800 Subject: [PATCH 129/270] Add Python venv activation support for Windows and PowerShell (#17839) Release Notes: - Add Python venv activation support for Windows and PowerShell Additional: I discovered a related bug on my Windows system. When first opening the project, it fails to detect the virtual environment folder `.venv`. After expanding the .venv folder in the Project Panel, it then becomes able to detect the virtual environment folder. However, I don't know how to fix it. --- assets/settings/default.json | 2 +- crates/project/src/terminals.rs | 31 ++++++++++++++++++------ crates/terminal/src/terminal_settings.rs | 1 + 3 files changed, 26 insertions(+), 8 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 7c0dc831c3..7f8c823210 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -710,7 +710,7 @@ // to the current working directory. We recommend overriding this // in your project's settings, rather than globally. "directories": [".env", "env", ".venv", "venv"], - // Can also be `csh`, `fish`, and `nushell` + // Can also be `csh`, `fish`, `nushell` and `power_shell` "activate_script": "default" } }, diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 6793c028f7..136842d158 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -259,12 +259,16 @@ impl Project { cx: &AppContext, ) -> Option { let venv_settings = settings.detect_venv.as_option()?; + let bin_dir_name = match std::env::consts::OS { + "windows" => "Scripts", + _ => "bin", + }; venv_settings .directories .iter() .map(|virtual_environment_name| abs_path.join(virtual_environment_name)) .find(|venv_path| { - let bin_path = venv_path.join("bin"); + let bin_path = venv_path.join(bin_dir_name); self.find_worktree(&bin_path, cx) .and_then(|(worktree, relative_path)| { worktree.read(cx).entry_for_path(&relative_path) @@ -279,23 +283,36 @@ impl Project { settings: &TerminalSettings, ) -> Option { let venv_settings = settings.detect_venv.as_option()?; + let activate_keyword = match venv_settings.activate_script { + terminal_settings::ActivateScript::Default => match std::env::consts::OS { + "windows" => ".", + _ => "source", + }, + terminal_settings::ActivateScript::Nushell => "overlay use", + terminal_settings::ActivateScript::PowerShell => ".", + _ => "source", + }; let activate_script_name = match venv_settings.activate_script { terminal_settings::ActivateScript::Default => "activate", terminal_settings::ActivateScript::Csh => "activate.csh", terminal_settings::ActivateScript::Fish => "activate.fish", terminal_settings::ActivateScript::Nushell => "activate.nu", + terminal_settings::ActivateScript::PowerShell => "activate.ps1", }; let path = venv_base_directory - .join("bin") + .join(match std::env::consts::OS { + "windows" => "Scripts", + _ => "bin", + }) .join(activate_script_name) .to_string_lossy() .to_string(); let quoted = shlex::try_quote(&path).ok()?; - - Some(match venv_settings.activate_script { - terminal_settings::ActivateScript::Nushell => format!("overlay use {}\n", quoted), - _ => format!("source {}\n", quoted), - }) + let line_ending = match std::env::consts::OS { + "windows" => "\r", + _ => "\n", + }; + Some(format!("{} {}{}", activate_keyword, quoted, line_ending)) } fn activate_python_virtual_environment( diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 6c95052612..4051caf864 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -87,6 +87,7 @@ pub enum ActivateScript { Csh, Fish, Nushell, + PowerShell, } #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] From c34fc5c6e504bb66bc8973a2c95fc358b613cfea Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 11:47:17 +0200 Subject: [PATCH 130/270] lsp store: Refactor to use shared method to find buffer snapshot (#17929) Came across this code while investigating something else and I think we should use the same method. As far as I know, it does the same thing, except that `buffer_snapshot_for_lsp_version` also cleans up the stored snapshots. Release Notes: - N/A --- crates/project/src/lsp_store.rs | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ee02492dd8..fb3d52eb9e 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -6383,21 +6383,16 @@ impl LspStore { let buffer_id = buffer_to_edit.read(cx).remote_id(); let version = if let Some(buffer_version) = op.text_document.version { - this.buffer_snapshots - .get(&buffer_id) - .and_then(|server_to_snapshots| { - let all_snapshots = server_to_snapshots - .get(&language_server.server_id())?; - all_snapshots - .binary_search_by_key(&buffer_version, |snapshot| { - snapshot.version - }) - .ok() - .and_then(|index| all_snapshots.get(index)) - }) - .map(|lsp_snapshot| lsp_snapshot.snapshot.version()) + this.buffer_snapshot_for_lsp_version( + &buffer_to_edit, + language_server.server_id(), + Some(buffer_version), + cx, + ) + .ok() + .map(|snapshot| snapshot.version) } else { - Some(buffer_to_edit.read(cx).saved_version()) + Some(buffer_to_edit.read(cx).saved_version().clone()) }; let most_recent_edit = version.and_then(|version| { From 2165d52d3e97d0477d9769a2c4138b9e6d4b0531 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 14:33:53 +0200 Subject: [PATCH 131/270] project: Update variable and change comment (#17933) Previous this *was* the `cli_environment`, but now it's the project environment. Release Notes: - N/A --- crates/language/src/language_registry.rs | 15 ++++++++------- crates/project/src/lsp_store.rs | 4 ++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 4870c4ed53..918da4873f 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -860,7 +860,7 @@ impl LanguageRegistry { adapter: Arc, root_path: Arc, delegate: Arc, - cli_environment: Shared>>>, + project_environment: Shared>>>, cx: &mut AppContext, ) -> Option { let server_id = self.state.write().next_language_server_id(); @@ -881,7 +881,7 @@ impl LanguageRegistry { let task = cx.spawn({ let container_dir = container_dir.clone(); move |mut cx| async move { - let cli_environment = cli_environment.await; + let project_environment = project_environment.await; let binary_result = adapter .clone() @@ -892,15 +892,16 @@ impl LanguageRegistry { let mut binary = binary_result?; - // If this Zed project was opened from the CLI and the language server command itself + // If we do have a project environment (either by spawning a shell in in the project directory + // or by getting it from the CLI) and the language server command itself // doesn't have an environment (which it would have, if it was found in $PATH), then - // we pass along the CLI environment that we inherited. - if binary.env.is_none() && cli_environment.is_some() { + // we use the project environment. + if binary.env.is_none() && project_environment.is_some() { log::info!( - "using CLI environment for language server {:?}, id: {server_id}", + "using project environment for language server {:?}, id: {server_id}", adapter.name.0 ); - binary.env = cli_environment.clone(); + binary.env = project_environment.clone(); } let options = adapter diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index fb3d52eb9e..e1e6001d24 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4646,7 +4646,7 @@ impl LspStore { let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); let lsp_adapter_delegate = ProjectLspAdapterDelegate::for_local(self, worktree_handle, cx); - let cli_environment = local.environment.update(cx, |environment, cx| { + let project_environment = local.environment.update(cx, |environment, cx| { environment.get_environment(Some(worktree_id), Some(worktree_path.clone()), cx) }); @@ -4656,7 +4656,7 @@ impl LspStore { adapter.clone(), Arc::clone(&worktree_path), lsp_adapter_delegate.clone(), - cli_environment, + project_environment, cx, ) { Some(pending_server) => pending_server, From 103f757c114c2712df5f98f05bb64ff1ff9ef2d0 Mon Sep 17 00:00:00 2001 From: Daste Date: Tue, 17 Sep 2024 14:48:05 +0200 Subject: [PATCH 132/270] tab_switcher: Add file and project search icons (#17115) I found tab switcher file icons to be missing. They were mentioned in the [initial tab switcher issue](https://github.com/zed-industries/zed/issues/7653), but left to be added later (mentioned in https://github.com/zed-industries/zed/pull/7987). I also noticed that the project search icon went missing, but I'm not sure if that's intentional. These changes re-introduce it, as it's provided by the generic `tab_icon()` function. There's a small difference between the terminal item and everything else, because terminal's `tab_content` returns a slightly different layout, which adds a little more space between the icon and text. I'll look into resolving this withouth changing too much stuff around in the terminal crate. If you have any ideas on how to do this well, please comment. The new `tab_switcher` config section only has a single boolean option - `show_icons`. It toggles between icons and not icons, but doesn't disable the terminal icon. Implementing this would probably also require some refactoring in terminal's `tab_content` function. Release Notes: - Added file icons to the tab switcher Screenshot: ![image](https://github.com/user-attachments/assets/17f3f4a3-1f95-4830-aef1-cda280726385) --- crates/diagnostics/src/diagnostics.rs | 59 ++++++++++++----------- crates/tab_switcher/src/tab_switcher.rs | 3 ++ crates/terminal_view/src/terminal_view.rs | 2 +- 3 files changed, 36 insertions(+), 28 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ddf39e0bfa..eec4f735ec 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -645,37 +645,42 @@ impl Item for ProjectDiagnosticsEditor { } fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement { - if self.summary.error_count == 0 && self.summary.warning_count == 0 { - Label::new("No problems") - .color(params.text_color()) - .into_any_element() - } else { - h_flex() - .gap_1() - .when(self.summary.error_count > 0, |then| { + h_flex() + .gap_1() + .when( + self.summary.error_count == 0 && self.summary.warning_count == 0, + |then| { then.child( h_flex() .gap_1() - .child(Icon::new(IconName::XCircle).color(Color::Error)) - .child( - Label::new(self.summary.error_count.to_string()) - .color(params.text_color()), - ), + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new("No problems").color(params.text_color())), ) - }) - .when(self.summary.warning_count > 0, |then| { - then.child( - h_flex() - .gap_1() - .child(Icon::new(IconName::Warning).color(Color::Warning)) - .child( - Label::new(self.summary.warning_count.to_string()) - .color(params.text_color()), - ), - ) - }) - .into_any_element() - } + }, + ) + .when(self.summary.error_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child( + Label::new(self.summary.error_count.to_string()) + .color(params.text_color()), + ), + ) + }) + .when(self.summary.warning_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child( + Label::new(self.summary.warning_count.to_string()) + .color(params.text_color()), + ), + ) + }) + .into_any_element() } fn telemetry_event_text(&self) -> Option<&'static str> { diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index e8966ac5b9..0b3eaf9e8a 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -378,6 +378,9 @@ impl PickerDelegate for TabSwitcherDelegate { .inset(true) .selected(selected) .child(h_flex().w_full().child(label)) + .when_some(tab_match.item.tab_icon(cx), |el, icon| { + el.start_slot(div().child(icon)) + }) .map(|el| { if self.selected_index == ix { el.end_slot::(close_button) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 1869e33383..f19bfa7010 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1008,7 +1008,7 @@ impl Item for TerminalView { }; h_flex() - .gap_2() + .gap_1() .group("term-tab-icon") .child( h_flex() From 4139a9a75862cbb8922a7f74537e09aad5ca1e62 Mon Sep 17 00:00:00 2001 From: iugo Date: Tue, 17 Sep 2024 21:23:48 +0800 Subject: [PATCH 133/270] docs: Document usage of deno fmt (#17918) Clarify in the settings description that the default formatter leverages `deno fmt`. This makes it clearer for users what to expect and how formatting is handled out of the box. Co-authored-by: Peter Tripp --- docs/src/languages/deno.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/deno.md b/docs/src/languages/deno.md index 5b92acfeeb..9ba1526ab0 100644 --- a/docs/src/languages/deno.md +++ b/docs/src/languages/deno.md @@ -26,7 +26,8 @@ To use the Deno Language Server with TypeScript and TSX files, you will likely w "!typescript-language-server", "!vtsls", "!eslint" - ] + ], + "formatter": "language_server" }, "TSX": { "language_servers": [ @@ -34,7 +35,8 @@ To use the Deno Language Server with TypeScript and TSX files, you will likely w "!typescript-language-server", "!vtsls", "!eslint" - ] + ], + "formatter": "language_server" } } } From f1d21362fa2b40406c2a71da7b4353d622095ea7 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 15:37:43 +0200 Subject: [PATCH 134/270] editor: Fix cursor_shape regression by not setting it to "bar" (#17934) This fixes the regression introduced here: https://github.com/zed-industries/zed/pull/17572#issuecomment-2355632615 Essentially: instead of always setting the value when saving settings, we don't set it by default, but fall back to the default value if it's not set. That fixes Vim mode's cursor being overwritten when settings change. Release Notes: - N/A --- assets/settings/default.json | 4 ++-- crates/editor/src/editor.rs | 8 ++++++-- crates/editor/src/editor_settings.rs | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 7f8c823210..35b2ca20f2 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -121,8 +121,8 @@ // 4. A box drawn around the following character // "hollow" // - // Default: bar - "cursor_shape": "bar", + // Default: not set, defaults to "bar" + "cursor_shape": null, // How to highlight the current line in the editor. // // 1. Don't highlight the current line: diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e583f2d963..c3c54e4951 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1904,7 +1904,9 @@ impl Editor { linked_editing_range_task: Default::default(), pending_rename: Default::default(), searchable: true, - cursor_shape: EditorSettings::get_global(cx).cursor_shape, + cursor_shape: EditorSettings::get_global(cx) + .cursor_shape + .unwrap_or_default(), current_line_highlight: None, autoindent_mode: Some(AutoindentMode::EachLine), collapse_matches: false, @@ -11820,7 +11822,9 @@ impl Editor { cx, ); let editor_settings = EditorSettings::get_global(cx); - self.cursor_shape = editor_settings.cursor_shape; + if let Some(cursor_shape) = editor_settings.cursor_shape { + self.cursor_shape = cursor_shape; + } self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index c6c5f111cb..d651e76c2c 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -7,7 +7,7 @@ use settings::{Settings, SettingsSources}; #[derive(Deserialize, Clone)] pub struct EditorSettings { pub cursor_blink: bool, - pub cursor_shape: CursorShape, + pub cursor_shape: Option, pub current_line_highlight: CurrentLineHighlight, pub hover_popover_enabled: bool, pub show_completions_on_input: bool, @@ -182,7 +182,7 @@ pub struct EditorSettingsContent { /// Cursor shape for the default editor. /// Can be "bar", "block", "underscore", or "hollow". /// - /// Default: bar + /// Default: None pub cursor_shape: Option, /// How to highlight the current line in the editor. /// From c48584fb7964d998b7bfe8f426f60f909db496d4 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 17 Sep 2024 07:04:33 -0700 Subject: [PATCH 135/270] supermaven: Fix incorrect offset calculation (#17925) Fixes a bug in https://github.com/zed-industries/zed/pull/17578 when computing the offset. Specifically, `offset.add_assign()` should be incremented on every loop match instead of only when the completion text is found. Before: ![image](https://github.com/user-attachments/assets/cc09dbf9-03e8-4453-a1c7-11f838c1d959) After: ![image](https://github.com/user-attachments/assets/f3513769-d9e1-451f-97dc-b9ad3a57ce3a) Release Notes: - Fixed a wrong offset calculation in the Supermaven inline completion provider. --- crates/supermaven/src/supermaven_completion_provider.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index 4119771714..261ce372d9 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -77,10 +77,10 @@ fn completion_state_from_diff( snapshot.anchor_after(offset), completion_text[i..i + k].into(), )); - offset.add_assign(j); } i += k + 1; j += 1; + offset.add_assign(1); } None => { // there are no more matching completions, so drop the remaining From a20c0eb62648adb99834c37cfba0961daf771d36 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 10:06:33 -0400 Subject: [PATCH 136/270] Improve error message when docs need fixing with Prettier (#17907) --- .github/workflows/docs.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index be0f3c5a82..13fe0411a0 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,5 +20,11 @@ jobs: with: version: 9 - - run: pnpm dlx prettier . --check + - run: | + pnpm dlx prettier . --check || { + echo "To fix, run from the root of the zed repo:" + echo " cd docs && pnpm dlx prettier . --write && cd .." + false + } + working-directory: ./docs From 54b8232be2013e955e7b7d2a61cc82dc18cea5fa Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 17 Sep 2024 08:47:08 -0600 Subject: [PATCH 137/270] Introduce a new `/delta` command (#17903) Release Notes: - Added a new `/delta` command to re-insert changed files that were previously included in a context. --------- Co-authored-by: Roy --- Cargo.lock | 1 + crates/assistant/src/assistant.rs | 7 +- crates/assistant/src/assistant_panel.rs | 17 +- crates/assistant/src/context.rs | 15 +- crates/assistant/src/context/context_tests.rs | 5 +- crates/assistant/src/slash_command.rs | 1 + .../src/slash_command/auto_command.rs | 4 +- .../slash_command/context_server_command.rs | 6 +- .../src/slash_command/default_command.rs | 5 +- .../src/slash_command/delta_command.rs | 109 ++++++++ .../src/slash_command/diagnostics_command.rs | 236 ++++++---------- .../src/slash_command/docs_command.rs | 5 +- .../src/slash_command/fetch_command.rs | 5 +- .../src/slash_command/file_command.rs | 258 ++++++++---------- .../src/slash_command/now_command.rs | 5 +- .../src/slash_command/project_command.rs | 5 +- .../src/slash_command/prompt_command.rs | 5 +- .../src/slash_command/search_command.rs | 3 + .../src/slash_command/symbols_command.rs | 5 +- .../src/slash_command/tab_command.rs | 47 +--- .../src/slash_command/terminal_command.rs | 5 +- .../src/slash_command/workflow_command.rs | 5 +- crates/assistant_slash_command/Cargo.toml | 1 + .../src/assistant_slash_command.rs | 13 +- .../extension/src/extension_slash_command.rs | 5 +- crates/proto/proto/zed.proto | 1 + 26 files changed, 408 insertions(+), 366 deletions(-) create mode 100644 crates/assistant/src/slash_command/delta_command.rs diff --git a/Cargo.lock b/Cargo.lock index b31beee09c..6d0f7f54a0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -455,6 +455,7 @@ dependencies = [ "language", "parking_lot", "serde", + "serde_json", "workspace", ] diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 7a73c188ec..af7f03ebb3 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,9 +41,9 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - auto_command, context_server_command, default_command, diagnostics_command, docs_command, - fetch_command, file_command, now_command, project_command, prompt_command, search_command, - symbols_command, tab_command, terminal_command, workflow_command, + auto_command, context_server_command, default_command, delta_command, diagnostics_command, + docs_command, fetch_command, file_command, now_command, project_command, prompt_command, + search_command, symbols_command, tab_command, terminal_command, workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -367,6 +367,7 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut let slash_command_registry = SlashCommandRegistry::global(cx); slash_command_registry.register_command(file_command::FileSlashCommand, true); + slash_command_registry.register_command(delta_command::DeltaSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); slash_command_registry.register_command(project_command::ProjectSlashCommand, true); diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 59f5e81d05..52838b5c77 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -1906,7 +1906,22 @@ impl ContextEditor { cx: &mut ViewContext, ) { if let Some(command) = SlashCommandRegistry::global(cx).command(name) { - let output = command.run(arguments, workspace, self.lsp_adapter_delegate.clone(), cx); + let context = self.context.read(cx); + let sections = context + .slash_command_output_sections() + .into_iter() + .filter(|section| section.is_valid(context.buffer().read(cx))) + .cloned() + .collect::>(); + let snapshot = context.buffer().read(cx).snapshot(); + let output = command.run( + arguments, + §ions, + snapshot, + workspace, + self.lsp_adapter_delegate.clone(), + cx, + ); self.context.update(cx, |context, cx| { context.insert_command_output( command_range, diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 38ccddb962..d55b1aee08 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -48,7 +48,7 @@ use std::{ }; use telemetry_events::AssistantKind; use text::BufferSnapshot; -use util::{post_inc, TryFutureExt}; +use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; #[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)] @@ -162,6 +162,9 @@ impl ContextOperation { )?, icon: section.icon_name.parse()?, label: section.label.into(), + metadata: section + .metadata + .and_then(|metadata| serde_json::from_str(&metadata).log_err()), }) }) .collect::>>()?, @@ -242,6 +245,9 @@ impl ContextOperation { )), icon_name: icon_name.to_string(), label: section.label.to_string(), + metadata: section.metadata.as_ref().and_then(|metadata| { + serde_json::to_string(metadata).log_err() + }), } }) .collect(), @@ -635,12 +641,13 @@ impl Context { .slash_command_output_sections .iter() .filter_map(|section| { - let range = section.range.to_offset(buffer); - if section.range.start.is_valid(buffer) && !range.is_empty() { + if section.is_valid(buffer) { + let range = section.range.to_offset(buffer); Some(assistant_slash_command::SlashCommandOutputSection { range, icon: section.icon, label: section.label.clone(), + metadata: section.metadata.clone(), }) } else { None @@ -1825,6 +1832,7 @@ impl Context { ..buffer.anchor_before(start + section.range.end), icon: section.icon, label: section.label, + metadata: section.metadata, }) .collect::>(); sections.sort_by(|a, b| a.range.cmp(&b.range, buffer)); @@ -2977,6 +2985,7 @@ impl SavedContext { ..buffer.anchor_before(section.range.end), icon: section.icon, label: section.label, + metadata: section.metadata, } }) .collect(), diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index c851ca7438..842ac05078 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -12,7 +12,7 @@ use assistant_slash_command::{ use collections::HashSet; use fs::FakeFs; use gpui::{AppContext, Model, SharedString, Task, TestAppContext, WeakView}; -use language::{Buffer, LanguageRegistry, LspAdapterDelegate}; +use language::{Buffer, BufferSnapshot, LanguageRegistry, LspAdapterDelegate}; use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Role}; use parking_lot::Mutex; use project::Project; @@ -1089,6 +1089,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std range: section_start..section_end, icon: ui::IconName::Ai, label: "section".into(), + metadata: None, }); } @@ -1425,6 +1426,8 @@ impl SlashCommand for FakeSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index 387e8231e4..cf957a15c6 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -22,6 +22,7 @@ use workspace::Workspace; pub mod auto_command; pub mod context_server_command; pub mod default_command; +pub mod delta_command; pub mod diagnostics_command; pub mod docs_command; pub mod fetch_command; diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index cedfc63702..e1f20c311b 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -1,7 +1,7 @@ use super::create_label_for_command; use super::{SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Result}; -use assistant_slash_command::ArgumentCompletion; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext, Task, WeakView}; @@ -87,6 +87,8 @@ impl SlashCommand for AutoCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, diff --git a/crates/assistant/src/slash_command/context_server_command.rs b/crates/assistant/src/slash_command/context_server_command.rs index 8ae9430a99..6b1ae39186 100644 --- a/crates/assistant/src/slash_command/context_server_command.rs +++ b/crates/assistant/src/slash_command/context_server_command.rs @@ -9,7 +9,7 @@ use context_servers::{ protocol::PromptInfo, }; use gpui::{Task, WeakView, WindowContext}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; use std::sync::atomic::AtomicBool; use std::sync::Arc; use text::LineEnding; @@ -96,7 +96,6 @@ impl SlashCommand for ContextServerSlashCommand { replace_previous_arguments: false, }) .collect(); - Ok(completions) }) } else { @@ -107,6 +106,8 @@ impl SlashCommand for ContextServerSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -141,6 +142,7 @@ impl SlashCommand for ContextServerSlashCommand { .description .unwrap_or(format!("Result from {}", prompt_name)), ), + metadata: None, }], text: prompt, run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/default_command.rs b/crates/assistant/src/slash_command/default_command.rs index 18db87b322..4199840300 100644 --- a/crates/assistant/src/slash_command/default_command.rs +++ b/crates/assistant/src/slash_command/default_command.rs @@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore; use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::{ fmt::Write, sync::{atomic::AtomicBool, Arc}, @@ -43,6 +43,8 @@ impl SlashCommand for DefaultSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -70,6 +72,7 @@ impl SlashCommand for DefaultSlashCommand { range: 0..text.len(), icon: IconName::Library, label: "Default".into(), + metadata: None, }], text, run_commands_in_text: true, diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs new file mode 100644 index 0000000000..6a66ad3f09 --- /dev/null +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -0,0 +1,109 @@ +use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand}; +use anyhow::Result; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, +}; +use collections::HashSet; +use futures::future; +use gpui::{Task, WeakView, WindowContext}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use std::sync::{atomic::AtomicBool, Arc}; +use text::OffsetRangeExt; +use workspace::Workspace; + +pub(crate) struct DeltaSlashCommand; + +impl SlashCommand for DeltaSlashCommand { + fn name(&self) -> String { + "delta".into() + } + + fn description(&self) -> String { + "re-insert changed files".into() + } + + fn menu_text(&self) -> String { + "Re-insert Changed Files".into() + } + + fn requires_argument(&self) -> bool { + false + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancellation_flag: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + unimplemented!() + } + + fn run( + self: Arc, + _arguments: &[String], + context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: BufferSnapshot, + workspace: WeakView, + delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let mut paths = HashSet::default(); + let mut file_command_old_outputs = Vec::new(); + let mut file_command_new_outputs = Vec::new(); + for section in context_slash_command_output_sections.iter().rev() { + if let Some(metadata) = section + .metadata + .as_ref() + .and_then(|value| serde_json::from_value::(value.clone()).ok()) + { + if paths.insert(metadata.path.clone()) { + file_command_old_outputs.push( + context_buffer + .as_rope() + .slice(section.range.to_offset(&context_buffer)), + ); + file_command_new_outputs.push(Arc::new(FileSlashCommand).run( + &[metadata.path.clone()], + context_slash_command_output_sections, + context_buffer.clone(), + workspace.clone(), + delegate.clone(), + cx, + )); + } + } + } + + cx.background_executor().spawn(async move { + let mut output = SlashCommandOutput::default(); + + let file_command_new_outputs = future::join_all(file_command_new_outputs).await; + for (old_text, new_output) in file_command_old_outputs + .into_iter() + .zip(file_command_new_outputs) + { + if let Ok(new_output) = new_output { + if let Some(file_command_range) = new_output.sections.first() { + let new_text = &new_output.text[file_command_range.range.clone()]; + if old_text.chars().ne(new_text.chars()) { + output.sections.extend(new_output.sections.into_iter().map( + |section| SlashCommandOutputSection { + range: output.text.len() + section.range.start + ..output.text.len() + section.range.end, + icon: section.icon, + label: section.label, + metadata: section.metadata, + }, + )); + output.text.push_str(&new_output.text); + } + } + } + } + + Ok(output) + }) + } +} diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 2105830651..3f79c01675 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -9,10 +9,9 @@ use language::{ }; use project::{DiagnosticSummary, PathMatchCandidateSet, Project}; use rope::Point; -use std::fmt::Write; -use std::path::{Path, PathBuf}; use std::{ - ops::Range, + fmt::Write, + path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, }; use ui::prelude::*; @@ -163,6 +162,8 @@ impl SlashCommand for DiagnosticsSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -175,68 +176,7 @@ impl SlashCommand for DiagnosticsSlashCommand { let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx); - cx.spawn(move |_| async move { - let Some((text, sections)) = task.await? else { - return Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: 0..1, - icon: IconName::Library, - label: "No Diagnostics".into(), - }], - text: "\n".to_string(), - run_commands_in_text: true, - }); - }; - - let sections = sections - .into_iter() - .map(|(range, placeholder_type)| SlashCommandOutputSection { - range, - icon: match placeholder_type { - PlaceholderType::Root(_, _) => IconName::Warning, - PlaceholderType::File(_) => IconName::File, - PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle, - PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => { - IconName::Warning - } - }, - label: match placeholder_type { - PlaceholderType::Root(summary, source) => { - let mut label = String::new(); - label.push_str("Diagnostics"); - if let Some(source) = source { - write!(label, " ({})", source).unwrap(); - } - - if summary.error_count > 0 || summary.warning_count > 0 { - label.push(':'); - - if summary.error_count > 0 { - write!(label, " {} errors", summary.error_count).unwrap(); - if summary.warning_count > 0 { - label.push_str(","); - } - } - - if summary.warning_count > 0 { - write!(label, " {} warnings", summary.warning_count).unwrap(); - } - } - - label.into() - } - PlaceholderType::File(file_path) => file_path.into(), - PlaceholderType::Diagnostic(_, message) => message.into(), - }, - }) - .collect(); - - Ok(SlashCommandOutput { - text, - sections, - run_commands_in_text: false, - }) - }) + cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) }) } } @@ -277,7 +217,7 @@ fn collect_diagnostics( project: Model, options: Options, cx: &mut AppContext, -) -> Task, PlaceholderType)>)>>> { +) -> Task>> { let error_source = if let Some(path_matcher) = &options.path_matcher { debug_assert_eq!(path_matcher.sources().len(), 1); Some(path_matcher.sources().first().cloned().unwrap_or_default()) @@ -318,13 +258,13 @@ fn collect_diagnostics( .collect(); cx.spawn(|mut cx| async move { - let mut text = String::new(); + let mut output = SlashCommandOutput::default(); + if let Some(error_source) = error_source.as_ref() { - writeln!(text, "diagnostics: {}", error_source).unwrap(); + writeln!(output.text, "diagnostics: {}", error_source).unwrap(); } else { - writeln!(text, "diagnostics").unwrap(); + writeln!(output.text, "diagnostics").unwrap(); } - let mut sections: Vec<(Range, PlaceholderType)> = Vec::new(); let mut project_summary = DiagnosticSummary::default(); for (project_path, path, summary) in diagnostic_summaries { @@ -341,10 +281,10 @@ fn collect_diagnostics( continue; } - let last_end = text.len(); + let last_end = output.text.len(); let file_path = path.to_string_lossy().to_string(); if !glob_is_exact_file_match { - writeln!(&mut text, "{file_path}").unwrap(); + writeln!(&mut output.text, "{file_path}").unwrap(); } if let Some(buffer) = project_handle @@ -352,75 +292,73 @@ fn collect_diagnostics( .await .log_err() { - collect_buffer_diagnostics( - &mut text, - &mut sections, - cx.read_model(&buffer, |buffer, _| buffer.snapshot())?, - options.include_warnings, - ); + let snapshot = cx.read_model(&buffer, |buffer, _| buffer.snapshot())?; + collect_buffer_diagnostics(&mut output, &snapshot, options.include_warnings); } if !glob_is_exact_file_match { - sections.push(( - last_end..text.len().saturating_sub(1), - PlaceholderType::File(file_path), - )) + output.sections.push(SlashCommandOutputSection { + range: last_end..output.text.len().saturating_sub(1), + icon: IconName::File, + label: file_path.into(), + metadata: None, + }); } } // No diagnostics found - if sections.is_empty() { + if output.sections.is_empty() { return Ok(None); } - sections.push(( - 0..text.len(), - PlaceholderType::Root(project_summary, error_source), - )); - Ok(Some((text, sections))) + let mut label = String::new(); + label.push_str("Diagnostics"); + if let Some(source) = error_source { + write!(label, " ({})", source).unwrap(); + } + + if project_summary.error_count > 0 || project_summary.warning_count > 0 { + label.push(':'); + + if project_summary.error_count > 0 { + write!(label, " {} errors", project_summary.error_count).unwrap(); + if project_summary.warning_count > 0 { + label.push_str(","); + } + } + + if project_summary.warning_count > 0 { + write!(label, " {} warnings", project_summary.warning_count).unwrap(); + } + } + + output.sections.insert( + 0, + SlashCommandOutputSection { + range: 0..output.text.len(), + icon: IconName::Warning, + label: label.into(), + metadata: None, + }, + ); + + Ok(Some(output)) }) } -pub fn buffer_has_error_diagnostics(snapshot: &BufferSnapshot) -> bool { - for (_, group) in snapshot.diagnostic_groups(None) { - let entry = &group.entries[group.primary_ix]; - if entry.diagnostic.severity == DiagnosticSeverity::ERROR { - return true; - } - } - false -} - -pub fn write_single_file_diagnostics( - output: &mut String, - path: Option<&Path>, +pub fn collect_buffer_diagnostics( + output: &mut SlashCommandOutput, snapshot: &BufferSnapshot, -) -> bool { - if let Some(path) = path { - if buffer_has_error_diagnostics(&snapshot) { - output.push_str("/diagnostics "); - output.push_str(&path.to_string_lossy()); - return true; - } - } - false -} - -fn collect_buffer_diagnostics( - text: &mut String, - sections: &mut Vec<(Range, PlaceholderType)>, - snapshot: BufferSnapshot, include_warnings: bool, ) { for (_, group) in snapshot.diagnostic_groups(None) { let entry = &group.entries[group.primary_ix]; - collect_diagnostic(text, sections, entry, &snapshot, include_warnings) + collect_diagnostic(output, entry, &snapshot, include_warnings) } } fn collect_diagnostic( - text: &mut String, - sections: &mut Vec<(Range, PlaceholderType)>, + output: &mut SlashCommandOutput, entry: &DiagnosticEntry, snapshot: &BufferSnapshot, include_warnings: bool, @@ -428,17 +366,17 @@ fn collect_diagnostic( const EXCERPT_EXPANSION_SIZE: u32 = 2; const MAX_MESSAGE_LENGTH: usize = 2000; - let ty = match entry.diagnostic.severity { + let (ty, icon) = match entry.diagnostic.severity { DiagnosticSeverity::WARNING => { if !include_warnings { return; } - DiagnosticType::Warning + ("warning", IconName::Warning) } - DiagnosticSeverity::ERROR => DiagnosticType::Error, + DiagnosticSeverity::ERROR => ("error", IconName::XCircle), _ => return, }; - let prev_len = text.len(); + let prev_len = output.text.len(); let range = entry.range.to_point(snapshot); let diagnostic_row_number = range.start.row + 1; @@ -448,11 +386,11 @@ fn collect_diagnostic( let excerpt_range = Point::new(start_row, 0).to_offset(&snapshot)..Point::new(end_row, 0).to_offset(&snapshot); - text.push_str("```"); + output.text.push_str("```"); if let Some(language_name) = snapshot.language().map(|l| l.code_fence_block_name()) { - text.push_str(&language_name); + output.text.push_str(&language_name); } - text.push('\n'); + output.text.push('\n'); let mut buffer_text = String::new(); for chunk in snapshot.text_for_range(excerpt_range) { @@ -461,46 +399,26 @@ fn collect_diagnostic( for (i, line) in buffer_text.lines().enumerate() { let line_number = start_row + i as u32 + 1; - writeln!(text, "{}", line).unwrap(); + writeln!(output.text, "{}", line).unwrap(); if line_number == diagnostic_row_number { - text.push_str("//"); - let prev_len = text.len(); - write!(text, " {}: ", ty.as_str()).unwrap(); - let padding = text.len() - prev_len; + output.text.push_str("//"); + let prev_len = output.text.len(); + write!(output.text, " {}: ", ty).unwrap(); + let padding = output.text.len() - prev_len; let message = util::truncate(&entry.diagnostic.message, MAX_MESSAGE_LENGTH) .replace('\n', format!("\n//{:padding$}", "").as_str()); - writeln!(text, "{message}").unwrap(); + writeln!(output.text, "{message}").unwrap(); } } - writeln!(text, "```").unwrap(); - sections.push(( - prev_len..text.len().saturating_sub(1), - PlaceholderType::Diagnostic(ty, entry.diagnostic.message.clone()), - )) -} - -#[derive(Clone)] -pub enum PlaceholderType { - Root(DiagnosticSummary, Option), - File(String), - Diagnostic(DiagnosticType, String), -} - -#[derive(Copy, Clone)] -pub enum DiagnosticType { - Warning, - Error, -} - -impl DiagnosticType { - pub fn as_str(&self) -> &'static str { - match self { - DiagnosticType::Warning => "warning", - DiagnosticType::Error => "error", - } - } + writeln!(output.text, "```").unwrap(); + output.sections.push(SlashCommandOutputSection { + range: prev_len..output.text.len().saturating_sub(1), + icon, + label: entry.diagnostic.message.clone().into(), + metadata: None, + }); } diff --git a/crates/assistant/src/slash_command/docs_command.rs b/crates/assistant/src/slash_command/docs_command.rs index e114cfeab7..399ede9d99 100644 --- a/crates/assistant/src/slash_command/docs_command.rs +++ b/crates/assistant/src/slash_command/docs_command.rs @@ -12,7 +12,7 @@ use indexed_docs::{ DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName, ProviderId, }; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use project::{Project, ProjectPath}; use ui::prelude::*; use util::{maybe, ResultExt}; @@ -269,6 +269,8 @@ impl SlashCommand for DocsSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -349,6 +351,7 @@ impl SlashCommand for DocsSlashCommand { range, icon: IconName::FileDoc, label: format!("docs ({provider}): {key}",).into(), + metadata: None, }) .collect(), run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 8ecb6de759..23d3c884a8 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -11,7 +11,7 @@ use futures::AsyncReadExt; use gpui::{Task, WeakView}; use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler}; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -128,6 +128,8 @@ impl SlashCommand for FetchSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -161,6 +163,7 @@ impl SlashCommand for FetchSlashCommand { range, icon: IconName::AtSign, label: format!("fetch {}", url).into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index e5d8f1b2d6..0df8b5d4e0 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -1,10 +1,11 @@ -use super::{diagnostics_command::write_single_file_diagnostics, SlashCommand, SlashCommandOutput}; +use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection}; use fuzzy::PathMatch; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; use project::{PathMatchCandidateSet, Project}; +use serde::{Deserialize, Serialize}; use std::{ fmt::Write, ops::Range, @@ -175,6 +176,8 @@ impl SlashCommand for FileSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -187,54 +190,15 @@ impl SlashCommand for FileSlashCommand { return Task::ready(Err(anyhow!("missing path"))); }; - let task = collect_files(workspace.read(cx).project().clone(), arguments, cx); - - cx.foreground_executor().spawn(async move { - let output = task.await?; - Ok(SlashCommandOutput { - text: output.completion_text, - sections: output - .files - .into_iter() - .map(|file| { - build_entry_output_section( - file.range_in_text, - Some(&file.path), - file.entry_type == EntryType::Directory, - None, - ) - }) - .collect(), - run_commands_in_text: true, - }) - }) + collect_files(workspace.read(cx).project().clone(), arguments, cx) } } -#[derive(Clone, Copy, PartialEq, Debug)] -enum EntryType { - File, - Directory, -} - -#[derive(Clone, PartialEq, Debug)] -struct FileCommandOutput { - completion_text: String, - files: Vec, -} - -#[derive(Clone, PartialEq, Debug)] -struct OutputFile { - range_in_text: Range, - path: PathBuf, - entry_type: EntryType, -} - fn collect_files( project: Model, glob_inputs: &[String], cx: &mut AppContext, -) -> Task> { +) -> Task> { let Ok(matchers) = glob_inputs .into_iter() .map(|glob_input| { @@ -254,8 +218,7 @@ fn collect_files( .collect::>(); cx.spawn(|mut cx| async move { - let mut text = String::new(); - let mut ranges = Vec::new(); + let mut output = SlashCommandOutput::default(); for snapshot in snapshots { let worktree_id = snapshot.id(); let mut directory_stack: Vec<(Arc, String, usize)> = Vec::new(); @@ -279,11 +242,12 @@ fn collect_files( break; } let (_, entry_name, start) = directory_stack.pop().unwrap(); - ranges.push(OutputFile { - range_in_text: start..text.len().saturating_sub(1), - path: PathBuf::from(entry_name), - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len().saturating_sub(1), + Some(&PathBuf::from(entry_name)), + true, + None, + )); } let filename = entry @@ -315,21 +279,23 @@ fn collect_files( continue; } let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/"); - let entry_start = text.len(); + let entry_start = output.text.len(); if prefix_paths.is_empty() { if is_top_level_directory { - text.push_str(&path_including_worktree_name.to_string_lossy()); + output + .text + .push_str(&path_including_worktree_name.to_string_lossy()); is_top_level_directory = false; } else { - text.push_str(&filename); + output.text.push_str(&filename); } directory_stack.push((entry.path.clone(), filename, entry_start)); } else { let entry_name = format!("{}/{}", prefix_paths, &filename); - text.push_str(&entry_name); + output.text.push_str(&entry_name); directory_stack.push((entry.path.clone(), entry_name, entry_start)); } - text.push('\n'); + output.text.push('\n'); } else if entry.is_file() { let Some(open_buffer_task) = project_handle .update(&mut cx, |project, cx| { @@ -340,28 +306,13 @@ fn collect_files( continue; }; if let Some(buffer) = open_buffer_task.await.log_err() { - let buffer_snapshot = - cx.read_model(&buffer, |buffer, _| buffer.snapshot())?; - let prev_len = text.len(); - collect_file_content( - &mut text, - &buffer_snapshot, - path_including_worktree_name.to_string_lossy().to_string(), - ); - text.push('\n'); - if !write_single_file_diagnostics( - &mut text, + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + append_buffer_to_output( + &snapshot, Some(&path_including_worktree_name), - &buffer_snapshot, - ) { - text.pop(); - } - ranges.push(OutputFile { - range_in_text: prev_len..text.len(), - path: path_including_worktree_name, - entry_type: EntryType::File, - }); - text.push('\n'); + &mut output, + ) + .log_err(); } } } @@ -371,42 +322,26 @@ fn collect_files( let mut root_path = PathBuf::new(); root_path.push(snapshot.root_name()); root_path.push(&dir); - ranges.push(OutputFile { - range_in_text: start..text.len(), - path: root_path, - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len(), + Some(&root_path), + true, + None, + )); } else { - ranges.push(OutputFile { - range_in_text: start..text.len(), - path: PathBuf::from(entry.as_str()), - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len(), + Some(&PathBuf::from(entry.as_str())), + true, + None, + )); } } } - Ok(FileCommandOutput { - completion_text: text, - files: ranges, - }) + Ok(output) }) } -fn collect_file_content(buffer: &mut String, snapshot: &BufferSnapshot, filename: String) { - let mut content = snapshot.text(); - LineEnding::normalize(&mut content); - buffer.reserve(filename.len() + content.len() + 9); - buffer.push_str(&codeblock_fence_for_path( - Some(&PathBuf::from(filename)), - None, - )); - buffer.push_str(&content); - if !buffer.ends_with('\n') { - buffer.push('\n'); - } - buffer.push_str("```"); -} - pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option>) -> String { let mut text = String::new(); write!(text, "```").unwrap(); @@ -429,6 +364,11 @@ pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option, path: Option<&Path>, @@ -454,6 +394,16 @@ pub fn build_entry_output_section( range, icon, label: label.into(), + metadata: if is_directory { + None + } else { + path.and_then(|path| { + serde_json::to_value(FileCommandMetadata { + path: path.to_string_lossy().to_string(), + }) + .ok() + }) + }, } } @@ -539,6 +489,36 @@ mod custom_path_matcher { } } +pub fn append_buffer_to_output( + buffer: &BufferSnapshot, + path: Option<&Path>, + output: &mut SlashCommandOutput, +) -> Result<()> { + let prev_len = output.text.len(); + + let mut content = buffer.text(); + LineEnding::normalize(&mut content); + output.text.push_str(&codeblock_fence_for_path(path, None)); + output.text.push_str(&content); + if !output.text.ends_with('\n') { + output.text.push('\n'); + } + output.text.push_str("```"); + output.text.push('\n'); + + let section_ix = output.sections.len(); + collect_buffer_diagnostics(output, buffer, false); + + output.sections.insert( + section_ix, + build_entry_output_section(prev_len..output.text.len(), path, false, None), + ); + + output.text.push('\n'); + + Ok(()) +} + #[cfg(test)] mod test { use fs::FakeFs; @@ -591,9 +571,9 @@ mod test { .await .unwrap(); - assert!(result_1.completion_text.starts_with("root/dir")); + assert!(result_1.text.starts_with("root/dir")); // 4 files + 2 directories - assert_eq!(6, result_1.files.len()); + assert_eq!(result_1.sections.len(), 6); let result_2 = cx .update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx)) @@ -607,9 +587,9 @@ mod test { .await .unwrap(); - assert!(result.completion_text.starts_with("root/dir")); + assert!(result.text.starts_with("root/dir")); // 5 files + 2 directories - assert_eq!(7, result.files.len()); + assert_eq!(result.sections.len(), 7); // Ensure that the project lasts until after the last await drop(project); @@ -654,36 +634,27 @@ mod test { .unwrap(); // Sanity check - assert!(result.completion_text.starts_with("zed/assets/themes\n")); - assert_eq!(7, result.files.len()); + assert!(result.text.starts_with("zed/assets/themes\n")); + assert_eq!(result.sections.len(), 7); // Ensure that full file paths are included in the real output - assert!(result - .completion_text - .contains("zed/assets/themes/andromeda/LICENSE")); - assert!(result - .completion_text - .contains("zed/assets/themes/ayu/LICENSE")); - assert!(result - .completion_text - .contains("zed/assets/themes/summercamp/LICENSE")); + assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE")); + assert!(result.text.contains("zed/assets/themes/ayu/LICENSE")); + assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE")); - assert_eq!("summercamp", result.files[5].path.to_string_lossy()); + assert_eq!(result.sections[5].label, "summercamp"); // Ensure that things are in descending order, with properly relativized paths assert_eq!( - "zed/assets/themes/andromeda/LICENSE", - result.files[0].path.to_string_lossy() + result.sections[0].label, + "zed/assets/themes/andromeda/LICENSE" ); - assert_eq!("andromeda", result.files[1].path.to_string_lossy()); + assert_eq!(result.sections[1].label, "andromeda"); + assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE"); + assert_eq!(result.sections[3].label, "ayu"); assert_eq!( - "zed/assets/themes/ayu/LICENSE", - result.files[2].path.to_string_lossy() - ); - assert_eq!("ayu", result.files[3].path.to_string_lossy()); - assert_eq!( - "zed/assets/themes/summercamp/LICENSE", - result.files[4].path.to_string_lossy() + result.sections[4].label, + "zed/assets/themes/summercamp/LICENSE" ); // Ensure that the project lasts until after the last await @@ -723,27 +694,24 @@ mod test { .await .unwrap(); - assert!(result.completion_text.starts_with("zed/assets/themes\n")); + assert!(result.text.starts_with("zed/assets/themes\n")); + assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE"); assert_eq!( - "zed/assets/themes/LICENSE", - result.files[0].path.to_string_lossy() + result.sections[1].label, + "zed/assets/themes/summercamp/LICENSE" ); assert_eq!( - "zed/assets/themes/summercamp/LICENSE", - result.files[1].path.to_string_lossy() + result.sections[2].label, + "zed/assets/themes/summercamp/subdir/LICENSE" ); assert_eq!( - "zed/assets/themes/summercamp/subdir/LICENSE", - result.files[2].path.to_string_lossy() + result.sections[3].label, + "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE" ); - assert_eq!( - "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE", - result.files[3].path.to_string_lossy() - ); - assert_eq!("subsubdir", result.files[4].path.to_string_lossy()); - assert_eq!("subdir", result.files[5].path.to_string_lossy()); - assert_eq!("summercamp", result.files[6].path.to_string_lossy()); - assert_eq!("zed/assets/themes", result.files[7].path.to_string_lossy()); + assert_eq!(result.sections[4].label, "subsubdir"); + assert_eq!(result.sections[5].label, "subdir"); + assert_eq!(result.sections[6].label, "summercamp"); + assert_eq!(result.sections[7].label, "zed/assets/themes"); // Ensure that the project lasts until after the last await drop(project); diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index eb6277a7d9..eb0ca926f0 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -7,7 +7,7 @@ use assistant_slash_command::{ }; use chrono::Local; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -43,6 +43,8 @@ impl SlashCommand for NowSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, @@ -57,6 +59,7 @@ impl SlashCommand for NowSlashCommand { range, icon: IconName::CountdownTimer, label: now.to_rfc2822().into(), + metadata: None, }], run_commands_in_text: false, })) diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 8182734e72..3e8596d942 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -3,7 +3,7 @@ use anyhow::{anyhow, Context, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use fs::Fs; use gpui::{AppContext, Model, Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use project::{Project, ProjectPath}; use std::{ fmt::Write, @@ -118,6 +118,8 @@ impl SlashCommand for ProjectSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -140,6 +142,7 @@ impl SlashCommand for ProjectSlashCommand { range, icon: IconName::FileTree, label: "Project".into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index 4d64bba2ed..effbcc0f90 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore; use anyhow::{anyhow, Context, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::{atomic::AtomicBool, Arc}; use ui::prelude::*; use workspace::Workspace; @@ -56,6 +56,8 @@ impl SlashCommand for PromptSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -95,6 +97,7 @@ impl SlashCommand for PromptSlashCommand { range, icon: IconName::Library, label: title, + metadata: None, }], run_commands_in_text: true, }) diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 3a513ed9ad..72d86ec5c5 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -60,6 +60,8 @@ impl SlashCommand for SearchSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -168,6 +170,7 @@ impl SlashCommand for SearchSlashCommand { range: 0..text.len(), icon: IconName::MagnifyingGlass, label: query, + metadata: None, }); SlashCommandOutput { diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index c9582f2882..1cf8536c0d 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -3,7 +3,7 @@ use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use editor::Editor; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::Arc; use std::{path::Path, sync::atomic::AtomicBool}; use ui::{IconName, WindowContext}; @@ -41,6 +41,8 @@ impl SlashCommand for OutlineSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -77,6 +79,7 @@ impl SlashCommand for OutlineSlashCommand { range: 0..outline_text.len(), icon: IconName::ListTree, label: path.to_string_lossy().to_string().into(), + metadata: None, }], text: outline_text, run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index 1a6884b853..bdf8450d43 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -1,21 +1,17 @@ -use super::{ - diagnostics_command::write_single_file_diagnostics, - file_command::{build_entry_output_section, codeblock_fence_for_path}, - SlashCommand, SlashCommandOutput, -}; +use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput}; use anyhow::{Context, Result}; -use assistant_slash_command::ArgumentCompletion; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use collections::{HashMap, HashSet}; use editor::Editor; use futures::future::join_all; use gpui::{Entity, Task, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate}; use std::{ - fmt::Write, path::PathBuf, sync::{atomic::AtomicBool, Arc}, }; use ui::{ActiveTheme, WindowContext}; +use util::ResultExt; use workspace::Workspace; pub(crate) struct TabSlashCommand; @@ -131,6 +127,8 @@ impl SlashCommand for TabSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -144,40 +142,11 @@ impl SlashCommand for TabSlashCommand { ); cx.background_executor().spawn(async move { - let mut sections = Vec::new(); - let mut text = String::new(); - let mut has_diagnostics = false; + let mut output = SlashCommandOutput::default(); for (full_path, buffer, _) in tab_items_search.await? { - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None)); - for chunk in buffer.as_rope().chunks() { - text.push_str(chunk); - } - if !text.ends_with('\n') { - text.push('\n'); - } - writeln!(text, "```").unwrap(); - if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) { - has_diagnostics = true; - } - if !text.ends_with('\n') { - text.push('\n'); - } - - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - full_path.as_deref(), - false, - None, - )); + append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err(); } - - Ok(SlashCommandOutput { - text, - sections, - run_commands_in_text: has_diagnostics, - }) + Ok(output) }) } } diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 04baabd396..1d0293c235 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -6,7 +6,7 @@ use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, }; use gpui::{AppContext, Task, View, WeakView}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; use terminal_view::{terminal_panel::TerminalPanel, TerminalView}; use ui::prelude::*; use workspace::{dock::Panel, Workspace}; @@ -57,6 +57,8 @@ impl SlashCommand for TerminalSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -91,6 +93,7 @@ impl SlashCommand for TerminalSlashCommand { range, icon: IconName::Terminal, label: "Terminal".into(), + metadata: None, }], run_commands_in_text: false, })) diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index f588fe848d..c66dd9bebf 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -8,7 +8,7 @@ use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, }; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -53,6 +53,8 @@ impl SlashCommand for WorkflowSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -68,6 +70,7 @@ impl SlashCommand for WorkflowSlashCommand { range, icon: IconName::Route, label: "Workflow".into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant_slash_command/Cargo.toml b/crates/assistant_slash_command/Cargo.toml index 3d764bb0be..a58a84312f 100644 --- a/crates/assistant_slash_command/Cargo.toml +++ b/crates/assistant_slash_command/Cargo.toml @@ -19,4 +19,5 @@ gpui.workspace = true language.workspace = true parking_lot.workspace = true serde.workspace = true +serde_json.workspace = true workspace.workspace = true diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index c5dece11ca..36e229d49a 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -2,7 +2,7 @@ mod slash_command_registry; use anyhow::Result; use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt}; use serde::{Deserialize, Serialize}; pub use slash_command_registry::*; use std::{ @@ -77,6 +77,8 @@ pub trait SlashCommand: 'static + Send + Sync { fn run( self: Arc, arguments: &[String], + context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: BufferSnapshot, workspace: WeakView, // TODO: We're just using the `LspAdapterDelegate` here because that is // what the extension API is already expecting. @@ -94,7 +96,7 @@ pub type RenderFoldPlaceholder = Arc< + Fn(ElementId, Arc, &mut WindowContext) -> AnyElement, >; -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct SlashCommandOutput { pub text: String, pub sections: Vec>, @@ -106,4 +108,11 @@ pub struct SlashCommandOutputSection { pub range: Range, pub icon: IconName, pub label: SharedString, + pub metadata: Option, +} + +impl SlashCommandOutputSection { + pub fn is_valid(&self, buffer: &language::TextBuffer) -> bool { + self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty() + } } diff --git a/crates/extension/src/extension_slash_command.rs b/crates/extension/src/extension_slash_command.rs index 60b027ef9d..3dfbc4c03d 100644 --- a/crates/extension/src/extension_slash_command.rs +++ b/crates/extension/src/extension_slash_command.rs @@ -6,7 +6,7 @@ use assistant_slash_command::{ }; use futures::FutureExt; use gpui::{Task, WeakView, WindowContext}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use wasmtime_wasi::WasiView; use workspace::Workspace; @@ -82,6 +82,8 @@ impl SlashCommand for ExtensionSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, delegate: Option>, cx: &mut WindowContext, @@ -121,6 +123,7 @@ impl SlashCommand for ExtensionSlashCommand { range: section.range.into(), icon: IconName::Code, label: section.label.into(), + metadata: None, }) .collect(), run_commands_in_text: false, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a10b3798a4..77942c8a94 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -2390,6 +2390,7 @@ message SlashCommandOutputSection { AnchorRange range = 1; string icon_name = 2; string label = 3; + optional string metadata = 4; } message ContextOperation { From ddaee2e8dd507d5eab009e789e57c0d4fba6c5bf Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 17:03:10 +0200 Subject: [PATCH 138/270] ssh: Handle BufferSaved ssh message (#17936) Release Notes: - N/A Co-authored-by: Conrad --- crates/project/src/project.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 14524180ab..5d9ac5e821 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -797,9 +797,8 @@ impl Project { ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer); client.add_model_message_handler(Self::handle_update_worktree); client.add_model_message_handler(Self::handle_create_buffer_for_peer); - client.add_model_message_handler(BufferStore::handle_update_buffer_file); - client.add_model_message_handler(BufferStore::handle_update_diff_base); client.add_model_request_handler(BufferStore::handle_update_buffer); + BufferStore::init(&client); LspStore::init(&client); SettingsObserver::init(&client); From ecd183079371d126e2740b4e5b838ce92cd45591 Mon Sep 17 00:00:00 2001 From: Erick Guan <297343+erickguan@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:19:07 +0200 Subject: [PATCH 139/270] Fix opening file with colon (#17281) Closes #14100 Release Notes: - Fixed unable to open file with a colon from Zed CLI ----- I didn't make change to tests for the first two commits. I changed them to easily find offending test cases. Behavior changes are in last commit message. In the last commit, I changed how `PathWithPosition` should intreprete file paths. If my assumptions are off, please advise so that I can make another approach. I also believe further constraints would be better for `PathWithPosition`'s intention. But people can make future improvements to `PathWithPosition`. --- crates/util/src/paths.rs | 500 +++++++++++++++++++++++---------------- 1 file changed, 301 insertions(+), 199 deletions(-) diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index cd5beedf47..f4ecfefc52 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -98,10 +98,6 @@ impl> PathExt for T { /// A delimiter to use in `path_query:row_number:column_number` strings parsing. pub const FILE_ROW_COLUMN_DELIMITER: char = ':'; -/// Extracts filename and row-column suffixes. -/// Parenthesis format is used by [MSBuild](https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks) compatible tools -// NOTE: All cases need to have exactly three capture groups for extract(): file_name, row and column. -// Valid patterns that don't contain row and/or column should have empty groups in their place. const ROW_COL_CAPTURE_REGEX: &str = r"(?x) ([^\(]+)(?: \((\d+),(\d+)\) # filename(row,column) @@ -109,12 +105,12 @@ const ROW_COL_CAPTURE_REGEX: &str = r"(?x) \((\d+)\)() # filename(row) ) | - ([^\:]+)(?: - \:(\d+)\:(\d+) # filename:row:column + (.+?)(?: + \:+(\d+)\:(\d+)\:*$ # filename:row:column | - \:(\d+)() # filename:row + \:+(\d+)\:*()$ # filename:row | - \:()() # filename: + \:*()()$ # filename: )"; /// A representation of a path-like string with optional row and column numbers. @@ -136,9 +132,92 @@ impl PathWithPosition { column: None, } } + /// Parses a string that possibly has `:row:column` or `(row, column)` suffix. + /// Parenthesis format is used by [MSBuild](https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks) compatible tools /// Ignores trailing `:`s, so `test.rs:22:` is parsed as `test.rs:22`. /// If the suffix parsing fails, the whole string is parsed as a path. + /// + /// Be mindful that `test_file:10:1:` is a valid posix filename. + /// `PathWithPosition` class assumes that the ending position-like suffix is **not** part of the filename. + /// + /// # Examples + /// + /// ``` + /// # use util::paths::PathWithPosition; + /// # use std::path::PathBuf; + /// assert_eq!(PathWithPosition::parse_str("test_file"), PathWithPosition { + /// path: PathBuf::from("test_file"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file:10"), PathWithPosition { + /// path: PathBuf::from("test_file"), + /// row: Some(10), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1:2"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: Some(2), + /// }); + /// ``` + /// + /// # Expected parsing results when encounter ill-formatted inputs. + /// ``` + /// # use util::paths::PathWithPosition; + /// # use std::path::PathBuf; + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:a"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:a"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:a:b"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:a:b"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs::"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs::1"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1::"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs::1:2"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: Some(2), + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1::2"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:1"), + /// row: Some(2), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1:2:3"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:1"), + /// row: Some(2), + /// column: Some(3), + /// }); + /// ``` pub fn parse_str(s: &str) -> Self { let trimmed = s.trim(); let path = Path::new(trimmed); @@ -359,206 +438,229 @@ mod tests { } #[test] - fn path_with_position_parsing_positive() { - let input_and_expected = [ - ( - "test_file.rs", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: None, - column: None, - }, - ), - ( - "test_file.rs:1", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: None, - }, - ), - ( - "test_file.rs:1:2", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: Some(2), - }, - ), - ]; + fn path_with_position_parse_posix_path() { + // Test POSIX filename edge cases + // Read more at https://en.wikipedia.org/wiki/Filename + assert_eq!( + PathWithPosition::parse_str(" test_file"), + PathWithPosition { + path: PathBuf::from("test_file"), + row: None, + column: None + } + ); - for (input, expected) in input_and_expected { - let actual = PathWithPosition::parse_str(input); - assert_eq!( - actual, expected, - "For positive case input str '{input}', got a parse mismatch" - ); - } + assert_eq!( + PathWithPosition::parse_str("a:bc:.zip:1"), + PathWithPosition { + path: PathBuf::from("a:bc:.zip"), + row: Some(1), + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("one.second.zip:1"), + PathWithPosition { + path: PathBuf::from("one.second.zip"), + row: Some(1), + column: None + } + ); + + // Trim off trailing `:`s for otherwise valid input. + assert_eq!( + PathWithPosition::parse_str("test_file:10:1:"), + PathWithPosition { + path: PathBuf::from("test_file"), + row: Some(10), + column: Some(1) + } + ); + + assert_eq!( + PathWithPosition::parse_str("test_file.rs:"), + PathWithPosition { + path: PathBuf::from("test_file.rs"), + row: None, + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("test_file.rs:1:"), + PathWithPosition { + path: PathBuf::from("test_file.rs"), + row: Some(1), + column: None + } + ); } #[test] - fn path_with_position_parsing_negative() { - for (input, row, column) in [ - ("test_file.rs:a", None, None), - ("test_file.rs:a:b", None, None), - ("test_file.rs::", None, None), - ("test_file.rs::1", None, None), - ("test_file.rs:1::", Some(1), None), - ("test_file.rs::1:2", None, None), - ("test_file.rs:1::2", Some(1), None), - ("test_file.rs:1:2:3", Some(1), Some(2)), - ] { - let actual = PathWithPosition::parse_str(input); - assert_eq!( - actual, - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row, - column, - }, - "For negative case input str '{input}', got a parse mismatch" - ); - } + #[cfg(not(target_os = "windows"))] + fn path_with_position_parse_posix_path_with_suffix() { + assert_eq!( + PathWithPosition::parse_str("app-editors:zed-0.143.6:20240710-201212.log:34:"), + PathWithPosition { + path: PathBuf::from("app-editors:zed-0.143.6:20240710-201212.log"), + row: Some(34), + column: None, + } + ); + + assert_eq!( + PathWithPosition::parse_str("crates/file_finder/src/file_finder.rs:1902:13:"), + PathWithPosition { + path: PathBuf::from("crates/file_finder/src/file_finder.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("crate/utils/src/test:today.log:34"), + PathWithPosition { + path: PathBuf::from("crate/utils/src/test:today.log"), + row: Some(34), + column: None, + } + ); } - // Trim off trailing `:`s for otherwise valid input. #[test] - fn path_with_position_parsing_special() { - #[cfg(not(target_os = "windows"))] - let input_and_expected = [ - ( - "test_file.rs:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: None, - column: None, - }, - ), - ( - "test_file.rs:1:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: None, - }, - ), - ( - "crates/file_finder/src/file_finder.rs:1902:13:", - PathWithPosition { - path: PathBuf::from("crates/file_finder/src/file_finder.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ]; + #[cfg(target_os = "windows")] + fn path_with_position_parse_windows_path() { + assert_eq!( + PathWithPosition::parse_str("crates\\utils\\paths.rs"), + PathWithPosition { + path: PathBuf::from("crates\\utils\\paths.rs"), + row: None, + column: None + } + ); - #[cfg(target_os = "windows")] - let input_and_expected = [ - ( - "test_file.rs:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: None, - column: None, - }, - ), - ( - "test_file.rs:1:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: None, - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:15:", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs:1902:::15:", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: None, - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs(1902,13):", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs(1902):", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: None, - }, - ), - ( - "C:\\Users\\someone\\test_file.rs:1902:13:", - PathWithPosition { - path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "crates/utils/paths.rs", - PathWithPosition { - path: PathBuf::from("crates\\utils\\paths.rs"), - row: None, - column: None, - }, - ), - ( - "C:\\Users\\someone\\test_file.rs(1902,13):", - PathWithPosition { - path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "C:\\Users\\someone\\test_file.rs(1902):", - PathWithPosition { - path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: None, - }, - ), - ( - "crates/utils/paths.rs:101", - PathWithPosition { - path: PathBuf::from("crates\\utils\\paths.rs"), - row: Some(101), - column: None, - }, - ), - ]; + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: None, + column: None + } + ); + } - for (input, expected) in input_and_expected { - let actual = PathWithPosition::parse_str(input); - assert_eq!( - actual, expected, - "For special case input str '{input}', got a parse mismatch" - ); - } + #[test] + #[cfg(target_os = "windows")] + fn path_with_position_parse_windows_path_with_suffix() { + assert_eq!( + PathWithPosition::parse_str("crates\\utils\\paths.rs:101"), + PathWithPosition { + path: PathBuf::from("crates\\utils\\paths.rs"), + row: Some(101), + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1:20"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1), + column: Some(20) + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs(1902,13)"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13) + } + ); + + // Trim off trailing `:`s for otherwise valid input. + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13) + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:15:"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs:1902"), + row: Some(13), + column: Some(15) + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1902:::15:"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs:1902"), + row: Some(15), + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs(1902,13):"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs(1902):"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: None, + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs:1902:13:"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs(1902,13):"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs(1902):"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: None, + } + ); + + assert_eq!( + PathWithPosition::parse_str("crates/utils/paths.rs:101"), + PathWithPosition { + path: PathBuf::from("crates\\utils\\paths.rs"), + row: Some(101), + column: None, + } + ); } #[test] From 469dfe759c7489ffb1352a74689cb19cae51a84f Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 17:21:20 +0200 Subject: [PATCH 140/270] ssh: Handle `~` in ssh filenames (#17939) This allows users to open `ssh://user@host/~/my-home-dir-folder`. Release Notes: - N/A Co-authored-by: Conrad --- crates/project/src/worktree_store.rs | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 8b620de43e..07764d4a05 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -182,14 +182,23 @@ impl WorktreeStore { visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let abs_path = abs_path.as_ref(); - let root_name = abs_path.file_name().unwrap().to_string_lossy().to_string(); - let path = abs_path.to_string_lossy().to_string(); + let mut abs_path = abs_path.as_ref().to_string_lossy().to_string(); + // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` + // in which case want to strip the leading the `/` and expand the tilde. + // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850 + if abs_path.starts_with("/~") { + abs_path = shellexpand::tilde(&abs_path[1..]).to_string(); + } + let root_name = PathBuf::from(abs_path.clone()) + .file_name() + .unwrap() + .to_string_lossy() + .to_string(); cx.spawn(|this, mut cx| async move { let response = client .request(proto::AddWorktree { project_id: SSH_PROJECT_ID, - path: path.clone(), + path: abs_path.clone(), }) .await?; let worktree = cx.update(|cx| { @@ -200,7 +209,7 @@ impl WorktreeStore { id: response.worktree_id, root_name, visible, - abs_path: path, + abs_path, }, client, cx, From 10cfaecffaa8c3705085d9faab714121a78e6f3d Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Tue, 17 Sep 2024 18:28:03 +0300 Subject: [PATCH 141/270] ocaml: Add Dune language support (#17886) This uses my fork of the dune tree-sitter grammar to include the generated files Release Notes: - N/A --- extensions/ocaml/extension.toml | 4 ++++ extensions/ocaml/languages/dune/config.toml | 8 ++++++++ extensions/ocaml/languages/dune/highlights.scm | 5 +++++ extensions/ocaml/languages/dune/injections.scm | 2 ++ 4 files changed, 19 insertions(+) create mode 100644 extensions/ocaml/languages/dune/config.toml create mode 100644 extensions/ocaml/languages/dune/highlights.scm create mode 100644 extensions/ocaml/languages/dune/injections.scm diff --git a/extensions/ocaml/extension.toml b/extensions/ocaml/extension.toml index 99f83d1263..0523ba83b6 100644 --- a/extensions/ocaml/extension.toml +++ b/extensions/ocaml/extension.toml @@ -19,3 +19,7 @@ path = "grammars/ocaml" repository = "https://github.com/tree-sitter/tree-sitter-ocaml" commit = "0b12614ded3ec7ed7ab7933a9ba4f695ba4c342e" path = "grammars/interface" + +[grammars.dune] +repository = "https://github.com/WHForks/tree-sitter-dune" +commit = "b3f7882e1b9a1d8811011bf6f0de1c74c9c93949" diff --git a/extensions/ocaml/languages/dune/config.toml b/extensions/ocaml/languages/dune/config.toml new file mode 100644 index 0000000000..332e3f112e --- /dev/null +++ b/extensions/ocaml/languages/dune/config.toml @@ -0,0 +1,8 @@ +name = "Dune" +grammar = "dune" +path_suffixes = ["dune", "dune-project"] +brackets = [ + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } +] +tab_size = 2 diff --git a/extensions/ocaml/languages/dune/highlights.scm b/extensions/ocaml/languages/dune/highlights.scm new file mode 100644 index 0000000000..e7a21cd2c5 --- /dev/null +++ b/extensions/ocaml/languages/dune/highlights.scm @@ -0,0 +1,5 @@ +(stanza_name) @function +(field_name) @property +(quoted_string) @string +(multiline_string) @string +(action_name) @keyword diff --git a/extensions/ocaml/languages/dune/injections.scm b/extensions/ocaml/languages/dune/injections.scm new file mode 100644 index 0000000000..441998290e --- /dev/null +++ b/extensions/ocaml/languages/dune/injections.scm @@ -0,0 +1,2 @@ +((ocaml_syntax) @injection.content + (#set! injection.language "ocaml")) From 7c54965b11c096341aeee64a0ed8079890c307d1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:39:06 +0200 Subject: [PATCH 142/270] docs: Add dark mode (#17940) Closes https://github.com/zed-industries/zed/issues/17911 This PR enables dark mode on the documentation. This is done without any special plugins, just pure JavaScript and CSS variables. I may open fast-follow PRs to fine-tune design and code details that haven't been super polished yet. For example, when switching to dark mode, the `class` attribute on the `html` tag would change immediately, whereas other attributes such as `data-theme` and `data-color-scheme` would require a full refresh. This seems to be resolved, but not 100% confident yet. --- Release Notes: - Enabled dark mode on the documentation --- docs/theme/css/chrome.css | 47 +++++++++------- docs/theme/css/general.css | 14 ++--- docs/theme/css/variables.css | 105 ++++++++++++++++++++++++++++++++--- docs/theme/index.hbs | 38 ++++++++++--- docs/theme/page-toc.css | 2 +- docs/theme/plugins.css | 2 +- docs/theme/plugins.js | 62 +++++++++++++++++++++ 7 files changed, 224 insertions(+), 46 deletions(-) diff --git a/docs/theme/css/chrome.css b/docs/theme/css/chrome.css index 7afeabc704..11f16848d7 100644 --- a/docs/theme/css/chrome.css +++ b/docs/theme/css/chrome.css @@ -3,7 +3,7 @@ @import "variables.css"; html { - background-color: rgb(246, 245, 240); + background-color: var(--bg); scrollbar-color: var(--scrollbar) var(--bg); } #searchresults a, @@ -58,7 +58,7 @@ a > .hljs { height: var(--menu-bar-height); } #menu-bar.bordered { - border-block-end-color: var(--table-border-color); + border-block-end-color: var(--divider); } #menu-bar i, #menu-bar .icon-button { @@ -73,7 +73,7 @@ a > .hljs { transition: color 0.5s; } #menu-bar .icon-button:hover { - background-color: hsl(219, 93%, 42%, 0.15); + background-color: var(--icon-btn-bg-hover); } @media only screen and (max-width: 420px) { @@ -116,6 +116,7 @@ a > .hljs { align-items: center; flex: 1; overflow: hidden; + filter: var(--logo-brightness); } .js .menu-title { cursor: pointer; @@ -249,9 +250,10 @@ a:hover > .hljs { } pre { - background-color: white; - border: 1px rgba(8, 76, 207, 0.3) solid; - box-shadow: rgba(8, 76, 207, 0.07) 4px 4px 0px 0px; + background-color: var(--pre-bg); + border: 1px solid; + border-color: var(--pre-border); + box-shadow: var(--pre-shadow) 4px 4px 0px 0px; position: relative; } pre > .hljs { @@ -445,7 +447,8 @@ ul#searchresults span.teaser em { overscroll-behavior-y: contain; background-color: var(--sidebar-bg); color: var(--sidebar-fg); - border-right: 1px solid hsl(219, 93%, 42%, 0.15); + border-right: 1px solid; + border-color: var(--divider); } [dir="rtl"] .sidebar { left: unset; @@ -606,7 +609,7 @@ ul#searchresults span.teaser em { margin: 5px 0px; } .chapter .spacer { - background-color: var(--sidebar-spacer); + background-color: var(--divider); } @media (-moz-touch-enabled: 1), (pointer: coarse) { @@ -628,11 +631,11 @@ ul#searchresults span.teaser em { .theme-popup { position: absolute; - left: 10px; - top: var(--menu-bar-height); + left: 32px; + top: calc(var(--menu-bar-height) - 12px); z-index: 1000; border-radius: 4px; - font-size: 0.7em; + font-size: 1.4rem; color: var(--fg); background: var(--theme-popup-bg); border: 1px solid var(--theme-popup-border); @@ -654,7 +657,7 @@ ul#searchresults span.teaser em { width: 100%; border: 0; margin: 0; - padding: 2px 20px; + padding: 2px 24px; line-height: 25px; white-space: nowrap; text-align: start; @@ -662,32 +665,36 @@ ul#searchresults span.teaser em { color: inherit; background: inherit; font-size: inherit; + font-family: inherit; } .theme-popup .theme:hover { background-color: var(--theme-hover); } .theme-selected::before { + font-family: Arial, Helvetica, sans-serif; + text-align: center; display: inline-block; content: "✓"; - margin-inline-start: -14px; - width: 14px; + margin-inline-start: -20px; + width: 20px; } .download-button { - background: hsl(220, 60%, 95%); - color: hsl(220, 60%, 30%); + background: var(--download-btn-bg); + color: var(--download-btn-color); padding: 4px 8px; - border: 1px solid hsla(220, 60%, 40%, 0.2); + border: 1px solid; + border-color: var(--download-btn-border); font-size: 1.4rem; border-radius: 4px; - box-shadow: hsla(220, 40%, 60%, 0.1) 0px -2px 0px 0px inset; + box-shadow: var(--download-btn-shadow) 0px -2px 0px 0px inset; transition: 100ms; transition-property: box-shadow, border-color, background-color; } .download-button:hover { - background: hsl(220, 60%, 93%); - border-color: hsla(220, 60%, 50%, 0.2); + background: var(--download-btn-bg); + border-color: var(--download-btn-border-hover); box-shadow: none; } diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index 9a20751f21..e6905b47bf 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -174,10 +174,10 @@ h6:target::before { } .content a { text-decoration: underline; - text-decoration-color: hsl(219, 93%, 42%, 0.2); + text-decoration-color: var(--link-line-decoration); } .content a:hover { - text-decoration-color: hsl(219, 93%, 42%, 0.5); + text-decoration-color: var(--link-line-decoration-hover); } .content img, .content video { @@ -219,7 +219,7 @@ table thead td { } table thead th { padding: 6px 12px; - color: #000; + color: var(--full-contrast); text-align: left; border: 1px var(--table-border-color) solid; } @@ -235,7 +235,7 @@ blockquote { margin: auto; margin-top: 1rem; padding: 1rem 1.25rem; - color: #000; + color: var(--full-contrast); background-color: var(--quote-bg); border: 1px solid var(--quote-border); } @@ -315,7 +315,7 @@ kbd { font-size: 1.4rem; margin: 0.5em 0; border-bottom: 1px solid; - border-color: var(--border-light); + border-color: var(--divider); } .footnote-definition p { display: inline; @@ -356,6 +356,6 @@ kbd { } code.hljs { - color: hsl(221, 13%, 10%) !important; - background-color: hsla(221, 93%, 42%, 0.1); + color: var(--code-text) !important; + background-color: var(--code-bg); } diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index 481cb5a6c1..59f2398264 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -1,6 +1,10 @@ /* Globals */ :root { + --color-scheme: light; + + --logo-brightness: brightness(1); + --sidebar-width: 300px; --sidebar-resize-indicator-width: 0px; --sidebar-resize-indicator-space: 2px; @@ -24,18 +28,30 @@ --sidebar-fg: hsl(0, 0%, 0%); --sidebar-non-existant: #aaaaaa; - --sidebar-active: hsl(219, 93%, 42%); - --sidebar-active-bg: hsl(219, 93%, 42%, 0.1); - --sidebar-spacer: #f4f4f4; + --sidebar-active: hsl(220, 93%, 42%); + --sidebar-active-bg: hsl(220, 93%, 42%, 0.1); + --divider: hsl(220, 93%, 42%, 0.15); --scrollbar: #8f8f8f; --icons: #747474; --icons-hover: #000000; + --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.15); - --links: rgb(8, 76, 207); + --links: hsl(220, 92%, 42%); + --link-line-decoration: hsl(220, 93%, 42%, 0.2); + --link-line-decoration-hover: hsl(220, 93%, 42%, 0.5); + + --full-contrast: #000; --inline-code-color: #301900; + --code-text: hsl(220, 13%, 10%); + --code-bg: hsl(220, 93%, 42%, 0.1); + --keybinding-bg: hsl(0, 0%, 94%); + + --pre-bg: #fff; + --pre-border: hsla(220, 93%, 42%, 0.3); + --pre-shadow: hsla(220, 93%, 42%, 0.07); --theme-popup-bg: #fafafa; --theme-popup-border: #cccccc; @@ -48,9 +64,9 @@ --warning-bg: hsl(42, 100%, 60%, 0.1); --warning-icon: hsl(42, 100%, 30%); - --table-header-bg: hsl(219, 50%, 90%, 0.4); - --table-border-color: hsl(219, 93%, 42%, 0.15); - --table-alternate-bg: hsl(219, 10%, 90%, 0.4); + --table-header-bg: hsl(220, 50%, 90%, 0.4); + --table-border-color: hsl(220, 93%, 42%, 0.15); + --table-alternate-bg: hsl(220, 10%, 90%, 0.4); --searchbar-border-color: #aaa; --searchbar-bg: #fafafa; @@ -61,5 +77,78 @@ --searchresults-li-bg: #e4f2fe; --search-mark-bg: #a2cff5; - --color-scheme: light; + --download-btn-bg: hsl(220, 60%, 95%); + --download-btn-bg-hover: hsl(220, 60%, 93%); + --download-btn-color: hsl(220, 60%, 30%); + --download-btn-border: hsla(220, 60%, 40%, 0.2); + --download-btn-border-hover: hsla(220, 60%, 50%, 0.2); + --download-btn-shadow: hsla(220, 40%, 60%, 0.1); +} + +.dark { + --color-scheme: dark; + + --logo-brightness: brightness(2); + + --bg: hsl(220, 13%, 10%); + --fg: hsl(220, 14%, 70%); + --title-color: hsl(220, 92%, 80%); + + --border: hsl(220, 13%, 20%); + --border-light: hsl(220, 13%, 90%); + --border-hover: hsl(220, 13%, 40%); + + --sidebar-bg: hsl(220, 13%, 10%); + --sidebar-fg: hsl(220, 14%, 71%); + --sidebar-non-existant: #505254; + --sidebar-active: hsl(220, 92%, 75%); + --sidebar-active-bg: hsl(220, 93%, 42%, 0.25); + + --divider: hsl(220, 13%, 20%); + --scrollbar: hsl(220, 13%, 30%); + + --icons: hsl(220, 14%, 71%); + --icons-hover: hsl(220, 14%, 90%); + --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.4); + + --links: hsl(220, 93%, 70%); + --link-line-decoration: hsl(220, 92%, 80%, 0.4); + --link-line-decoration-hover: hsl(220, 92%, 80%, 0.8); + --full-contrast: #fff; + + --inline-code-color: hsl(40, 100%, 80%); + --code-text: hsl(220, 13%, 95%); + --code-bg: hsl(220, 93%, 50%, 0.2); + --keybinding-bg: hsl(0, 0%, 12%); + + --pre-bg: hsl(220, 13%, 5%); + --pre-border: hsla(220, 93%, 70%, 0.3); + --pre-shadow: hsla(220, 93%, 70%, 0.1); + + --theme-popup-bg: hsl(220, 13%, 15%); + --theme-popup-border: hsl(220, 13%, 20%); + --theme-hover: hsl(220, 13%, 25%); + + --quote-bg: hsl(220, 13%, 25%, 0.4); + --quote-border: hsl(220, 13%, 32%, 0.5); + + --table-border-color: hsl(220, 13%, 30%, 0.5); + --table-header-bg: hsl(220, 13%, 25%, 0.5); + --table-alternate-bg: hsl(220, 13%, 20%, 0.4); + + --searchbar-border-color: hsl(220, 13%, 30%); + --searchbar-bg: hsl(220, 13%, 22%, 0.5); + --searchbar-fg: hsl(220, 14%, 71%); + --searchbar-shadow-color: hsl(220, 13%, 15%); + --searchresults-header-fg: hsl(220, 14%, 60%); + --searchresults-border-color: hsl(220, 13%, 30%); + --searchresults-li-bg: hsl(220, 13%, 25%); + --search-mark-bg: hsl(220, 93%, 60%); + + --download-btn-bg: hsl(220, 90%, 90%, 0.1); + --download-btn-bg-hover: hsl(220, 90%, 50%, 0.2); + --download-btn-color: hsl(220, 90%, 95%); + --download-btn-border: hsla(220, 90%, 80%, 0.2); + --download-btn-border-hover: hsla(220, 90%, 80%, 0.4); + --download-btn-shadow: hsla(220, 50%, 60%, 0.15); } diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 8976b54bd9..c4154b46d3 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -1,5 +1,5 @@ - + @@ -56,13 +56,15 @@ var default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? "{{ preferred_dark_theme }}" : "{{ default_theme }}"; - + + + diff --git a/docs/theme/page-toc.css b/docs/theme/page-toc.css index dacd61a09b..af9b2fbbe1 100644 --- a/docs/theme/page-toc.css +++ b/docs/theme/page-toc.css @@ -74,6 +74,6 @@ margin-bottom: 12px; padding-left: 12px; font-size: 1.4rem; - color: #000; + color: var(--full-contrast); } } diff --git a/docs/theme/plugins.css b/docs/theme/plugins.css index 9deee5d5ba..9d5d09fe73 100644 --- a/docs/theme/plugins.css +++ b/docs/theme/plugins.css @@ -1,5 +1,5 @@ kbd.keybinding { - background-color: #f0f0f0; + background-color: var(--keybinding-bg); padding: 2px 4px; border-radius: 3px; font-family: monospace; diff --git a/docs/theme/plugins.js b/docs/theme/plugins.js index eee842f15a..76a295353f 100644 --- a/docs/theme/plugins.js +++ b/docs/theme/plugins.js @@ -48,3 +48,65 @@ console.log("Operating System:", os); // Start the process from the body walkDOM(document.body); })(); + +function darkModeToggle() { + var html = document.documentElement; + var themeToggleButton = document.getElementById("theme-toggle"); + var themePopup = document.getElementById("theme-list"); + var themePopupButtons = themePopup.querySelectorAll("button"); + + function setTheme(theme) { + html.setAttribute("data-theme", theme); + html.setAttribute("data-color-scheme", theme); + html.className = theme; + localStorage.setItem("mdbook-theme", theme); + + // Force a repaint to ensure the changes take effect in the client immediately + document.body.style.display = "none"; + document.body.offsetHeight; + document.body.style.display = ""; + } + + themeToggleButton.addEventListener("click", function (event) { + event.preventDefault(); + themePopup.style.display = + themePopup.style.display === "block" ? "none" : "block"; + }); + + themePopupButtons.forEach(function (button) { + button.addEventListener("click", function () { + setTheme(this.id); + themePopup.style.display = "none"; + }); + }); + + document.addEventListener("click", function (event) { + if ( + !themePopup.contains(event.target) && + !themeToggleButton.contains(event.target) + ) { + themePopup.style.display = "none"; + } + }); + + // Set initial theme + var currentTheme = localStorage.getItem("mdbook-theme"); + if (currentTheme) { + setTheme(currentTheme); + } else { + // If no theme is set, use the system's preference + var systemPreference = window.matchMedia("(prefers-color-scheme: dark)") + .matches + ? "dark" + : "light"; + setTheme(systemPreference); + } + + // Listen for system's preference changes + const darkModeMediaQuery = window.matchMedia("(prefers-color-scheme: dark)"); + darkModeMediaQuery.addEventListener("change", function (e) { + if (!localStorage.getItem("mdbook-theme")) { + setTheme(e.matches ? "dark" : "light"); + } + }); +} From d5003e1121045b1c8f505b3ff592630d2e277605 Mon Sep 17 00:00:00 2001 From: VacheDesNeiges <33199153+VacheDesNeiges@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:40:43 +0200 Subject: [PATCH 143/270] Update C++ Tree-sitter queries (#17471) Closes #16443 Release Notes: - Fixed C++ functions being wrongly tagged as variables when called after two or more scope resolution operators. - Added a "namespace" tag for highlighting purposes Before : ![image](https://github.com/user-attachments/assets/743b8407-4e62-4549-9c6a-ed6608ea7e43) After : ![image](https://github.com/user-attachments/assets/de563621-e722-463c-97a1-a99b925f126e) --- crates/languages/src/cpp/highlights.scm | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index 42d05bde33..4c9db56928 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -1,10 +1,33 @@ (identifier) @variable (field_identifier) @property +(namespace_identifier) @namespace (call_expression function: (qualified_identifier name: (identifier) @function)) +(call_expression + (qualified_identifier + (identifier) @function.call)) + +(call_expression + (qualified_identifier + (qualified_identifier + (identifier) @function.call))) + +(call_expression + (qualified_identifier + (qualified_identifier + (qualified_identifier + (identifier) @function.call)))) + +((qualified_identifier + (qualified_identifier + (qualified_identifier + (qualified_identifier + (identifier) @function.call)))) @_parent + (#has-ancestor? @_parent call_expression)) + (call_expression function: (identifier) @function) From e88b48a9c7467aa69a4f7d6dd657bbc1a294e32a Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 11:42:20 -0400 Subject: [PATCH 144/270] ocaml: Fix indentation in files (#17942) This PR fixes the indentation in the Dune-related files after https://github.com/zed-industries/zed/pull/17886. Release Notes: - N/A --- extensions/ocaml/languages/dune/config.toml | 4 ++-- extensions/ocaml/languages/dune/injections.scm | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/extensions/ocaml/languages/dune/config.toml b/extensions/ocaml/languages/dune/config.toml index 332e3f112e..b4f79850b6 100644 --- a/extensions/ocaml/languages/dune/config.toml +++ b/extensions/ocaml/languages/dune/config.toml @@ -2,7 +2,7 @@ name = "Dune" grammar = "dune" path_suffixes = ["dune", "dune-project"] brackets = [ - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } ] tab_size = 2 diff --git a/extensions/ocaml/languages/dune/injections.scm b/extensions/ocaml/languages/dune/injections.scm index 441998290e..654b5b2c13 100644 --- a/extensions/ocaml/languages/dune/injections.scm +++ b/extensions/ocaml/languages/dune/injections.scm @@ -1,2 +1,2 @@ ((ocaml_syntax) @injection.content - (#set! injection.language "ocaml")) + (#set! injection.language "ocaml")) From 27f09957c2066f0f4131f5daafb95085e3ce6dbb Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 11:53:52 -0400 Subject: [PATCH 145/270] Improve CSS highlighting for property_name (#17324) --- crates/languages/src/css/highlights.scm | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/css/highlights.scm b/crates/languages/src/css/highlights.scm index e271d8583c..4ddfe9a418 100644 --- a/crates/languages/src/css/highlights.scm +++ b/crates/languages/src/css/highlights.scm @@ -35,10 +35,11 @@ (class_name) (id_name) (namespace_name) - (property_name) (feature_name) ] @property +(property_name) @constant + (function_name) @function ( @@ -75,4 +76,17 @@ [ "," ":" + "." + "::" + ";" + "#" ] @punctuation.delimiter + +[ + "{" + ")" + "(" + "}" + "[" + "]" +] @punctuation.bracket From accff826ca842fc8c297fab317f464bc9c695d75 Mon Sep 17 00:00:00 2001 From: Albert Marashi Date: Wed, 18 Sep 2024 01:32:25 +0930 Subject: [PATCH 146/270] svelte: Migrate to `tree-sitter-grammars/tree-sitter-svelte` (#17529) > [!NOTE] > The https://github.com/tree-sitter-grammars/tree-sitter-svelte repository seems to be more well maintained, with higher quality code, and as per https://github.com/zed-extensions/svelte/issues/1 it was suggested that we swap to this repository for Svelte grammars - Closes https://github.com/zed-industries/zed/issues/17310 - Closes https://github.com/zed-industries/zed/issues/10893 - Closes https://github.com/zed-industries/zed/issues/12833 - Closes https://github.com/zed-extensions/svelte/issues/1 - Closes https://github.com/zed-industries/zed/issues/14943 - Closes https://github.com/zed-extensions/svelte/issues/2 - Added: buffer/file symbol outlines for `.svelte` (`outlines.scm`) - Improved: Attribute directives & modifiers in `.svelte` files can be styled independently. - Fixed: issue where svelte expression inside quotes failed parsing - Improved: Svelte components in Markup are styled differently from tags. - Added: Support for Svelte 5 syntax (`{#snippet children()}`, `{@render foo()`) - Change: Svelte now using [tree-sitter-grammars/tree-sitter-svelte](https://github.com/tree-sitter-grammars/tree-sitter-svelte) for language highlighting - Added: Support for typescript syntax in svelte expressions ![image](https://github.com/user-attachments/assets/49d199ee-7550-49a7-912d-070cf691b029) ![image](https://github.com/user-attachments/assets/848ac5b6-62da-4c42-8e24-b7023504f8af) Release Notes: - N/A --- **tree-sitter-grammar things to improve** - [ ] snippet functions aren't being treated as JS code - [ ] we should be able to detect @component comments and treat them as markdown - [x] `foo:bar` style/class/prop directives - [x] `--foo="..."` var fields - [ ] snippet/if blocks's children may need to be indented a little further Will implement some of the rest of these in a separate PR --------- Co-authored-by: Marshall Bowers --- docs/src/languages/svelte.md | 23 ++- extensions/svelte/extension.toml | 4 +- .../svelte/languages/svelte/brackets.scm | 7 + .../svelte/languages/svelte/config.toml | 12 +- .../svelte/languages/svelte/highlights.scm | 125 ++++++++++----- .../svelte/languages/svelte/injections.scm | 144 ++++++++++-------- .../svelte/languages/svelte/outline.scm | 69 +++++++++ 7 files changed, 271 insertions(+), 113 deletions(-) create mode 100644 extensions/svelte/languages/svelte/brackets.scm create mode 100644 extensions/svelte/languages/svelte/outline.scm diff --git a/docs/src/languages/svelte.md b/docs/src/languages/svelte.md index 1c6fd49b3e..157a57d43e 100644 --- a/docs/src/languages/svelte.md +++ b/docs/src/languages/svelte.md @@ -2,16 +2,29 @@ Svelte support is available through the [Svelte extension](https://github.com/zed-industries/zed/tree/main/extensions/svelte). -- Tree Sitter: [Himujjal/tree-sitter-svelte](https://github.com/Himujjal/tree-sitter-svelte) +- Tree Sitter: [tree-sitter-grammars/tree-sitter-svelte](https://github.com/tree-sitter-grammars/tree-sitter-svelte) - Language Server: [sveltejs/language-tools](https://github.com/sveltejs/language-tools) - +## Extra theme styling configuration + +You can modify how certain styles such as directives and modifiers appear in attributes: + +```json +"syntax": { + // Styling for directives (e.g., `class:foo` or `on:click`) (the `on` or `class` part of the attribute). + "attribute.function": { + "color": "#ff0000" + }, + // Styling for modifiers at the end of attributes, e.g. `on:` + "attribute.special": { + "color": "#00ff00" + } +} +``` ## Inlay Hints -Zed sets the following initialization options for inlay Hints: +Zed sets the following initialization options for inlay hints: ```json "inlayHints": { diff --git a/extensions/svelte/extension.toml b/extensions/svelte/extension.toml index 9ca1d6c5da..694fdec2a6 100644 --- a/extensions/svelte/extension.toml +++ b/extensions/svelte/extension.toml @@ -11,5 +11,5 @@ name = "Svelte Language Server" language = "Svelte" [grammars.svelte] -repository = "https://github.com/Himujjal/tree-sitter-svelte" -commit = "b08d070e303d2a385d6d0ab3add500f8fa514443" +repository = "https://github.com/tree-sitter-grammars/tree-sitter-svelte" +commit = "3f06f705410683adb17d146b5eca28c62fe81ba6" diff --git a/extensions/svelte/languages/svelte/brackets.scm b/extensions/svelte/languages/svelte/brackets.scm new file mode 100644 index 0000000000..deb34f80a8 --- /dev/null +++ b/extensions/svelte/languages/svelte/brackets.scm @@ -0,0 +1,7 @@ +("<" @open ">" @close) +("{" @open "}" @close) +("'" @open "'" @close) +("\"" @open "\"" @close) +("(" @open ")" @close) +; ("[" @open "]" @close) +; ("`" @open "`" @close) diff --git a/extensions/svelte/languages/svelte/config.toml b/extensions/svelte/languages/svelte/config.toml index 3bab2f2943..4db968be8c 100644 --- a/extensions/svelte/languages/svelte/config.toml +++ b/extensions/svelte/languages/svelte/config.toml @@ -2,16 +2,16 @@ name = "Svelte" grammar = "svelte" path_suffixes = ["svelte"] block_comment = [""] -autoclose_before = ";:.,=}])>" +autoclose_before = ":\"'}]>" brackets = [ { start = "{", end = "}", close = true, newline = true }, + { start = "<", end = ">", close = true, newline = true, not_in = ["string"] }, { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string"] }, - { start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] }, + { start = "!--", end = " --", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = true, not_in = ["string"] }, + { start = "'", end = "'", close = true, newline = true, not_in = ["string"] }, + { start = "`", end = "`", close = true, newline = true, not_in = ["string"] }, ] scope_opt_in_language_servers = ["tailwindcss-language-server"] prettier_parser_name = "svelte" diff --git a/extensions/svelte/languages/svelte/highlights.scm b/extensions/svelte/languages/svelte/highlights.scm index 4e317489d8..01d8bb8db6 100755 --- a/extensions/svelte/languages/svelte/highlights.scm +++ b/extensions/svelte/languages/svelte/highlights.scm @@ -1,50 +1,107 @@ -; Special identifiers -;-------------------- -; Treat capitalized tag names as constructors and types -((tag_name) @type - (#match? @type "^[A-Z]")) - -; Regular (lowercase) tag names -((tag_name) @tag - (#match? @tag "^[a-z]")) - -; TODO: -(attribute_name) @property -(erroneous_end_tag_name) @keyword +; comments (comment) @comment -[ - (attribute_value) - (quoted_attribute_value) -] @string +; property attribute +(attribute_directive) @attribute.function +(attribute_identifier) @attribute +(attribute_modifier) @attribute.special -[ - (text) - (raw_text_expr) - (raw_text_each) -] @none +; Style component attributes as @property +(start_tag + ( + (tag_name) @_tag_name + (#match? @_tag_name "^[A-Z]") + ) + (attribute + (attribute_name + (attribute_identifier) @tag.property + ) + ) +) -[ - (special_block_keyword) - (then) - (as) -] @keyword +(self_closing_tag + ( + (tag_name) @_tag_name + (#match? @_tag_name "^[A-Z]") + ) + (attribute + (attribute_name + (attribute_identifier) @tag.property + ) + ) +) -[ - "{" - "}" -] @punctuation.bracket -"=" @operator +; style elements starting with lowercase letters as tags +( + (tag_name) @tag + (#match? @tag "^[a-z]") +) + +; style elements starting with uppercase letters as components (types) +; Also valid might be to treat them as constructors +( + (tag_name) @tag @tag.component.type.constructor + (#match? @tag "^[A-Z]") +) [ "<" ">" "" +] @tag.punctuation.bracket + + +[ + "{" + "}" +] @punctuation.bracket + +[ + "|" +] @punctuation.delimiter + + +[ + "@" "#" ":" "/" - "@" -] @tag.delimiter +] @tag.punctuation.special + +"=" @operator + + +; Treating (if, each, ...) as a keyword inside of blocks +; like {#if ...} or {#each ...} +(block_start_tag + tag: _ @tag.keyword +) + +(block_tag + tag: _ @tag.keyword +) + +(block_end_tag + tag: _ @tag.keyword +) + +(expression_tag + tag: _ @tag.keyword +) + +; Style quoted string attribute values +(quoted_attribute_value) @string + + +; Highlight the `as` keyword in each blocks +(each_start + ("as") @tag.keyword +) + + +; Highlight the snippet name as a function +; (e.g. {#snippet foo(bar)} +(snippet_name) @function diff --git a/extensions/svelte/languages/svelte/injections.scm b/extensions/svelte/languages/svelte/injections.scm index 24f9425803..73d2b9abb1 100755 --- a/extensions/svelte/languages/svelte/injections.scm +++ b/extensions/svelte/languages/svelte/injections.scm @@ -1,74 +1,86 @@ -; injections.scm -; -------------- +; ; injections.scm +; ; -------------- -; match script tags without a lang tag -((script_element - (start_tag - (attribute - (attribute_name) @_name)*) - (raw_text) @content) - (#not-eq? @_name "lang") - (#set! "language" "javascript")) +; Match script tags with a lang attribute +(script_element + (start_tag + (attribute + (attribute_name) @_attr_name + (#eq? @_attr_name "lang") + (quoted_attribute_value + (attribute_value) @language + ) + ) + ) + (raw_text) @content +) -; match javascript -((script_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "js") - (#set! "language" "javascript")) +; Match script tags without a lang attribute +(script_element + (start_tag + (attribute + (attribute_name) @_attr_name + )* + ) + (raw_text) @content + (#not-any-of? @_attr_name "lang") + (#set! language "javascript") +) -; match typescript -((script_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "ts") - (#set! "language" "typescript")) +; Match the contents of the script's generics="T extends string" as typescript code +; +; Disabled for the time-being because tree-sitter is treating the generics +; attribute as a top-level typescript statement, where `T extends string` is +; not a valid top-level typescript statement. +; +; (script_element +; (start_tag +; (attribute +; (attribute_name) @_attr_name +; (#eq? @_attr_name "generics") +; (quoted_attribute_value +; (attribute_value) @content +; ) +; ) +; ) +; (#set! language "typescript") +; ) + +; Mark everything as typescript because it's +; a more generic superset of javascript +; Not sure if it's possible to somehow refer to the +; script's language attribute here. +((svelte_raw_text) @content + (#set! "language" "ts") +) + +; Match style tags with a lang attribute (style_element - (raw_text) @content - (#set! "language" "css")) + (start_tag + (attribute + (attribute_name) @_attr_name + (#eq? @_attr_name "lang") + (quoted_attribute_value + (attribute_value) @language + ) + ) + ) + (raw_text) @content +) -; match style tags without a lang tag -((style_element - (start_tag - (attribute - (attribute_name) @_name)*) - (raw_text) @content) - (#not-eq? @_name "lang") - (#set! "language" "css")) +; Match style tags without a lang attribute +(style_element + (start_tag + (attribute + (attribute_name) @_attr_name + )* + ) + (raw_text) @content + (#not-any-of? @_attr_name "lang") + (#set! language "css") +) -; match css -((style_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "css") - (#set! "language" "css")) -; match scss -((style_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "scss") - (#set! "language" "scss")) - -((raw_text_expr) @content - (#set! "language" "javascript")) - -((raw_text_each) @content - (#set! "language" "javascript")) +; Downstream TODO: Style highlighting for `style:background="red"` and `style="background: red"` strings +; Downstream TODO: Style component comments as markdown diff --git a/extensions/svelte/languages/svelte/outline.scm b/extensions/svelte/languages/svelte/outline.scm new file mode 100644 index 0000000000..8242ada243 --- /dev/null +++ b/extensions/svelte/languages/svelte/outline.scm @@ -0,0 +1,69 @@ + +(script_element + (start_tag) @name + (raw_text) @context @item +) + +(script_element + (end_tag) @name @item +) + +(style_element + (start_tag) @name + (raw_text) @context +) @item + + +(document) @item + +(comment) @annotation + +(if_statement + (if_start) @name +) @item + +(else_block + (else_start) @name +) @item + +(else_if_block + (else_if_start) @name +) @item + +(element + (start_tag) @name +) @item + +(element + (self_closing_tag) @name +) @item + + +; (if_end) @name @item + +(each_statement + (each_start) @name +) @item + + +(snippet_statement + (snippet_start) @name +) @item + +(snippet_end) @name @item + +(html_tag) @name @item + +(const_tag) @name @item + +(await_statement + (await_start) @name +) @item + +(then_block + (then_start) @name +) @item + +(catch_block + (catch_start) @name +) @item From 76603a5fc6b27e7ac86b908429134036f1c49984 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 12:13:06 -0400 Subject: [PATCH 147/270] ocaml: Bump to v0.1.0 (#17945) This PR bumps the OCaml extension to v0.1.0. Changes: - https://github.com/zed-industries/zed/pull/16955 - https://github.com/zed-industries/zed/pull/17886 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/ocaml/Cargo.toml | 2 +- extensions/ocaml/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d0f7f54a0..6d6d8e85e6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14481,7 +14481,7 @@ dependencies = [ [[package]] name = "zed_ocaml" -version = "0.0.2" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/ocaml/Cargo.toml b/extensions/ocaml/Cargo.toml index 6e07327036..6df98bec4c 100644 --- a/extensions/ocaml/Cargo.toml +++ b/extensions/ocaml/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_ocaml" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/ocaml/extension.toml b/extensions/ocaml/extension.toml index 0523ba83b6..bff7c380b5 100644 --- a/extensions/ocaml/extension.toml +++ b/extensions/ocaml/extension.toml @@ -1,7 +1,7 @@ id = "ocaml" name = "OCaml" description = "OCaml support." -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = ["Rashid Almheiri <69181766+huwaireb@users.noreply.github.com>"] repository = "https://github.com/zed-industries/zed" From 3d69942f7141c689a8f607aa11f534303560862b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 17 Sep 2024 12:34:36 -0400 Subject: [PATCH 148/270] Use dev icons for dev bundles (#17944) Follow-up of https://github.com/zed-industries/zed/pull/17486/ actually using the dev icons for dev bundles Release Notes: - N/A --- crates/zed/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index ad02d4f388..28d2c7f825 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -134,7 +134,7 @@ tree-sitter-rust.workspace = true workspace = { workspace = true, features = ["test-support"] } [package.metadata.bundle-dev] -icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"] +icon = ["resources/app-icon-dev@2x.png", "resources/app-icon-dev.png"] identifier = "dev.zed.Zed-Dev" name = "Zed Dev" osx_minimum_system_version = "10.15.7" From 0070635b4de672af2510df73d8bbd2f1c8f92cce Mon Sep 17 00:00:00 2001 From: Albert Marashi Date: Wed, 18 Sep 2024 02:21:31 +0930 Subject: [PATCH 149/270] Styling option for separating terminal view background from background color (#17611) Closes #17313 Release Notes: - Added theme styling option to separate terminal background view from terminal background color, for transparent terminal backgrounds --- crates/repl/src/outputs/plain.rs | 2 +- crates/terminal_view/src/terminal_element.rs | 6 +++--- crates/theme/src/default_colors.rs | 2 ++ crates/theme/src/one_themes.rs | 1 + crates/theme/src/schema.rs | 8 ++++++++ crates/theme/src/styles/colors.rs | 5 ++++- 6 files changed, 19 insertions(+), 5 deletions(-) diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index 5b9960d7b8..4bf007b292 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -75,7 +75,7 @@ pub fn text_style(cx: &mut WindowContext) -> TextStyle { font_size: theme::get_buffer_font_size(cx).into(), font_style: FontStyle::Normal, line_height: cx.line_height().into(), - background_color: Some(theme.colors().terminal_background), + background_color: Some(theme.colors().terminal_ansi_background), white_space: WhiteSpace::Normal, truncate: None, // These are going to be overridden per-cell diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index b3d2b40e74..ce4be0e679 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -662,7 +662,7 @@ impl Element for TerminalElement { font_size: font_size.into(), font_style: FontStyle::Normal, line_height: line_height.into(), - background_color: Some(theme.colors().terminal_background), + background_color: Some(theme.colors().terminal_ansi_background), white_space: WhiteSpace::Normal, truncate: None, // These are going to be overridden per-cell @@ -778,7 +778,7 @@ impl Element for TerminalElement { &[TextRun { len, font: text_style.font(), - color: theme.colors().terminal_background, + color: theme.colors().terminal_ansi_background, background_color: None, underline: Default::default(), strikethrough: None, @@ -1158,7 +1158,7 @@ pub fn convert_color(fg: &terminal::alacritty_terminal::vte::ansi::Color, theme: NamedColor::BrightCyan => colors.terminal_ansi_bright_cyan, NamedColor::BrightWhite => colors.terminal_ansi_bright_white, NamedColor::Foreground => colors.terminal_foreground, - NamedColor::Background => colors.terminal_background, + NamedColor::Background => colors.terminal_ansi_background, NamedColor::Cursor => theme.players().local().cursor, NamedColor::DimBlack => colors.terminal_ansi_dim_black, NamedColor::DimRed => colors.terminal_ansi_dim_red, diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 48e67a4cc0..4def0bb8d7 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -84,6 +84,7 @@ impl ThemeColors { terminal_foreground: black().light().step_12(), terminal_bright_foreground: black().light().step_11(), terminal_dim_foreground: black().light().step_10(), + terminal_ansi_background: neutral().light().step_1(), terminal_ansi_bright_black: black().light().step_11(), terminal_ansi_bright_red: red().light().step_10(), terminal_ansi_bright_green: green().light().step_10(), @@ -179,6 +180,7 @@ impl ThemeColors { editor_document_highlight_read_background: neutral().dark_alpha().step_4(), editor_document_highlight_write_background: neutral().dark_alpha().step_4(), terminal_background: neutral().dark().step_1(), + terminal_ansi_background: neutral().dark().step_1(), terminal_foreground: white().dark().step_12(), terminal_bright_foreground: white().dark().step_11(), terminal_dim_foreground: white().dark().step_10(), diff --git a/crates/theme/src/one_themes.rs b/crates/theme/src/one_themes.rs index 241344b8f3..69e69ce23d 100644 --- a/crates/theme/src/one_themes.rs +++ b/crates/theme/src/one_themes.rs @@ -105,6 +105,7 @@ pub(crate) fn one_dark() -> Theme { terminal_background: bg, // todo("Use one colors for terminal") + terminal_ansi_background: crate::black().dark().step_12(), terminal_foreground: crate::white().dark().step_12(), terminal_bright_foreground: crate::white().dark().step_11(), terminal_dim_foreground: crate::white().dark().step_10(), diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index a47dc23a7c..cc62e2976b 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -421,6 +421,10 @@ pub struct ThemeColorsContent { #[serde(rename = "terminal.foreground")] pub terminal_foreground: Option, + /// Terminal ansi background color. + #[serde(rename = "terminal.ansi.background")] + pub terminal_ansi_background: Option, + /// Bright terminal foreground color. #[serde(rename = "terminal.bright_foreground")] pub terminal_bright_foreground: Option, @@ -792,6 +796,10 @@ impl ThemeColorsContent { .terminal_background .as_ref() .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_background: self + .terminal_ansi_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), terminal_foreground: self .terminal_foreground .as_ref() diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 349a79ba66..04aba89a0d 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -175,7 +175,7 @@ pub struct ThemeColors { // === // Terminal // === - /// Terminal background color. + /// Terminal layout background color. pub terminal_background: Hsla, /// Terminal foreground color. pub terminal_foreground: Hsla, @@ -184,6 +184,9 @@ pub struct ThemeColors { /// Dim terminal foreground color. pub terminal_dim_foreground: Hsla, + /// Terminal ansi background color. + pub terminal_ansi_background: Hsla, + /// Black ANSI terminal color. pub terminal_ansi_black: Hsla, /// Bright black ANSI terminal color. From ac5c35b3df32a0a68e74ed6dbf288bcc7055e050 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 13:26:34 -0400 Subject: [PATCH 150/270] theme: Fix casing of "ANSI" in doc comments (#17952) This PR fixes the casing of "ANSI" in some doc comments after #17611. Release Notes: - N/A --- crates/theme/src/schema.rs | 2 +- crates/theme/src/styles/colors.rs | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index cc62e2976b..0229b1ea98 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -421,7 +421,7 @@ pub struct ThemeColorsContent { #[serde(rename = "terminal.foreground")] pub terminal_foreground: Option, - /// Terminal ansi background color. + /// Terminal ANSI background color. #[serde(rename = "terminal.ansi.background")] pub terminal_ansi_background: Option, diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 04aba89a0d..0b37be0992 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -183,10 +183,8 @@ pub struct ThemeColors { pub terminal_bright_foreground: Hsla, /// Dim terminal foreground color. pub terminal_dim_foreground: Hsla, - - /// Terminal ansi background color. + /// Terminal ANSI background color. pub terminal_ansi_background: Hsla, - /// Black ANSI terminal color. pub terminal_ansi_black: Hsla, /// Bright black ANSI terminal color. From ee8668ef45378bda7ca7b1f543310c8fcf226725 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 13:33:09 -0400 Subject: [PATCH 151/270] Bind `editor::Rewrap` to `alt-q` (#17953) This PR adds a keybinding for the `editor: rewrap` command. It is bound to `alt-q`, by default. In Vim mode, it is bound to `g q`. Release Notes: - N/A --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + assets/keymaps/vim.json | 1 + 3 files changed, 3 insertions(+) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index bb5673dde6..02fc6d8e04 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -56,6 +56,7 @@ "shift-tab": "editor::TabPrev", "ctrl-k": "editor::CutToEndOfLine", // "ctrl-t": "editor::Transpose", + "alt-q": "editor::Rewrap", "ctrl-backspace": "editor::DeleteToPreviousWordStart", "ctrl-delete": "editor::DeleteToNextWordEnd", "shift-delete": "editor::Cut", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 9a0c08c3dc..33536cc9ff 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -51,6 +51,7 @@ "shift-tab": "editor::TabPrev", "ctrl-k": "editor::CutToEndOfLine", "ctrl-t": "editor::Transpose", + "alt-q": "editor::Rewrap", "cmd-backspace": "editor::DeleteToBeginningOfLine", "cmd-delete": "editor::DeleteToEndOfLine", "alt-backspace": "editor::DeleteToPreviousWordStart", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 54905b2267..18b38384ef 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -124,6 +124,7 @@ "g i": "vim::InsertAtPrevious", "g ,": "vim::ChangeListNewer", "g ;": "vim::ChangeListOlder", + "g q": "editor::Rewrap", "shift-h": "vim::WindowTop", "shift-m": "vim::WindowMiddle", "shift-l": "vim::WindowBottom", From ccfd4b1887e7dc30e839334d86713b671449c55b Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 19:45:29 +0200 Subject: [PATCH 152/270] rust: Test rust-analyzer binary after finding in PATH (#17951) Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- crates/language/src/language.rs | 1 + crates/languages/src/rust.rs | 23 ++++++++++++++++++++++- crates/project/src/lsp_store.rs | 24 ++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 8cd8c8079d..7901a49d00 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -285,6 +285,7 @@ pub trait LspAdapterDelegate: Send + Sync { async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; async fn read_text_file(&self, path: PathBuf) -> Result; + async fn try_exec(&self, binary: LanguageServerBinary) -> Result<()>; } #[async_trait(?Send)] diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 456ea8e449..5055bb69c1 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -61,7 +61,28 @@ impl LspAdapter for RustLspAdapter { }) => { let path = delegate.which(Self::SERVER_NAME.as_ref()).await; let env = delegate.shell_env().await; - (path, Some(env), None) + + if let Some(path) = path { + // It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to + // /usr/bin/rust-analyzer that fails when you run it; so we need to test it. + log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`"); + match delegate + .try_exec(LanguageServerBinary { + path: path.clone(), + arguments: vec!["--help".into()], + env: Some(env.clone()), + }) + .await + { + Ok(()) => (Some(path), Some(env), None), + Err(err) => { + log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {:?}", path, err); + (None, None, None) + } + } + } else { + (None, None, None) + } } // Otherwise, we use the configured binary. Some(BinarySettings { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index e1e6001d24..2c718a42ab 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7133,6 +7133,30 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { which::which(command).ok() } + async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { + if self.fs.is_none() { + return Ok(()); + } + + let working_dir = self.worktree_root_path(); + let output = smol::process::Command::new(&command.path) + .args(command.arguments) + .envs(command.env.clone().unwrap_or_default()) + .current_dir(working_dir) + .output() + .await?; + + if output.status.success() { + return Ok(()); + } + Err(anyhow!( + "{}, stdout: {:?}, stderr: {:?}", + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + )) + } + fn update_status( &self, server_name: LanguageServerName, From 5cdca6d8dd8c2179a85293376e8db324c34b6945 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 13:49:52 -0400 Subject: [PATCH 153/270] multi_buffer: Fix a panic when expanding an excerpt with the cursor at the end (#17955) This PR fixes a panic when expanding an excerpt within a multibuffer that could occur when the cursor was at the end of the buffer. You can reproduce this by opening a multibuffer, putting your cursor at the very end of that buffer, and then expanding the excerpt (Shift + Enter). Release Notes: - Fixed a panic that could occur when expanding an excerpt within a multibuffer when the cursor was at the end of the excerpt. Co-authored-by: Antonio --- crates/multi_buffer/src/multi_buffer.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index f7172ccf08..5b6eddd5b1 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -3790,7 +3790,7 @@ impl MultiBufferSnapshot { } } - // Returns the locators referenced by the given excerpt ids, sorted by locator. + /// Returns the locators referenced by the given excerpt IDs, sorted by locator. fn excerpt_locators_for_ids( &self, ids: impl IntoIterator, @@ -3801,13 +3801,17 @@ impl MultiBufferSnapshot { while sorted_ids.last() == Some(&ExcerptId::max()) { sorted_ids.pop(); - locators.push(Locator::max()); + if let Some(mapping) = self.excerpt_ids.last() { + locators.push(mapping.locator.clone()); + } } let mut sorted_ids = sorted_ids.into_iter().dedup().peekable(); if sorted_ids.peek() == Some(&ExcerptId::min()) { sorted_ids.next(); - locators.push(Locator::min()); + if let Some(mapping) = self.excerpt_ids.first() { + locators.push(mapping.locator.clone()); + } } let mut cursor = self.excerpt_ids.cursor::(); From 345efa4e36dd40a3eaf50ab5d024a06ed4b1f7b9 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Wed, 18 Sep 2024 01:50:36 +0800 Subject: [PATCH 154/270] gpui: Fix img element to render correct SVG color (#15488) Release Notes: - N/A It should convert RGBA to BGRA. > I added an example color svg, that was I make based on [Lucide grip icon](https://lucide.dev/icons/grip). ## Before image ## After image Co-authored-by: Marshall Bowers --- crates/gpui/examples/image/color.svg | 13 +++++++++++++ crates/gpui/examples/image/image.rs | 2 +- crates/gpui/src/elements/img.rs | 7 ++++++- 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 crates/gpui/examples/image/color.svg diff --git a/crates/gpui/examples/image/color.svg b/crates/gpui/examples/image/color.svg new file mode 100644 index 0000000000..84e9809d09 --- /dev/null +++ b/crates/gpui/examples/image/color.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index cc8e1a686c..ac7af186d3 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -131,7 +131,7 @@ fn main() { PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(), ), remote_resource: "https://picsum.photos/512/512".into(), - asset_resource: "image/app-icon.png".into(), + asset_resource: "image/color.svg".into(), }) }) .unwrap(); diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 07f5acc95b..f1e8bb68e3 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -408,9 +408,14 @@ impl Asset for ImageAsset { // TODO: Can we make svgs always rescale? svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?; - let buffer = + let mut buffer = ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap(); + // Convert from RGBA to BGRA. + for pixel in buffer.chunks_exact_mut(4) { + pixel.swap(0, 2); + } + RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1)) }; From 7246a0f39ce1f529d4e041c3316491e96152f1fe Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 13:51:11 -0400 Subject: [PATCH 155/270] macos: Use ~/Library/Caches/Zed instead of ~/.cache/zed (#17949) --- crates/paths/src/paths.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index b80bef5f2d..7f662d0325 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -59,6 +59,12 @@ pub fn support_dir() -> &'static PathBuf { pub fn temp_dir() -> &'static PathBuf { static TEMP_DIR: OnceLock = OnceLock::new(); TEMP_DIR.get_or_init(|| { + if cfg!(target_os = "macos") { + return dirs::cache_dir() + .expect("failed to determine cachesDirectory directory") + .join("Zed"); + } + if cfg!(target_os = "windows") { return dirs::cache_dir() .expect("failed to determine LocalAppData directory") From e6c4076ef0e59d4cbbdbbecb22ac57c23e6e69c2 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 17 Sep 2024 14:07:50 -0400 Subject: [PATCH 156/270] Add cmake to dev build instructions (#17943) Release Notes: - N/A --- docs/src/development/macos.md | 6 ++++++ docs/src/development/windows.md | 1 + script/linux | 6 ++++++ 3 files changed, 13 insertions(+) diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 160ae3dd0e..1407b0f610 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -29,6 +29,12 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). rustup target add wasm32-wasip1 ``` +- Install `cmake` (required by [a dependency](https://docs.rs/wasmtime-c-api-impl/latest/wasmtime_c_api/)) + + ```sh + brew install cmake + ``` + ## Backend Dependencies If you are developing collaborative features of Zed, you'll need to install the dependencies of zed's `collab` server: diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 600c030931..86eb21965e 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -22,6 +22,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). - Install [Visual Studio](https://visualstudio.microsoft.com/downloads/) with the optional component `MSVC v*** - VS YYYY C++ x64/x86 build tools` (`v***` is your VS version and `YYYY` is year when your VS was released) - Install Windows 11 or 10 SDK depending on your system, but ensure that at least `Windows 10 SDK version 2104 (10.0.20348.0)` is installed on your machine. You can download it from the [Windows SDK Archive](https://developer.microsoft.com/windows/downloads/windows-sdk/) +- Install [CMake](https://cmake.org/download) ## Backend dependencies diff --git a/script/linux b/script/linux index 051748ad45..d894d33ea8 100755 --- a/script/linux +++ b/script/linux @@ -25,6 +25,7 @@ if [[ -n $apt ]]; then libvulkan1 libgit2-dev make + cmake clang mold jq @@ -44,6 +45,7 @@ if [[ -n $dnf ]]; then gcc g++ clang + cmake mold alsa-lib-devel fontconfig-devel @@ -78,6 +80,7 @@ if [[ -n $zyp ]]; then gcc-c++ clang make + cmake alsa-devel fontconfig-devel wayland-devel @@ -99,6 +102,7 @@ if [[ -n $pacman ]]; then deps=( gcc clang + cmake alsa-lib fontconfig wayland @@ -122,6 +126,7 @@ if [[ -n $xbps ]]; then deps=( gettext-devel clang + cmake jq elfutils-devel gcc @@ -148,6 +153,7 @@ if [[ -n $emerge ]]; then app-arch/zstd dev-libs/openssl dev-libs/wayland + dev-util/cmake media-libs/alsa-lib media-libs/fontconfig media-libs/vulkan-loader From fc43b21e787adb365c09abc68bc3c602b842e74f Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Wed, 18 Sep 2024 02:10:37 +0800 Subject: [PATCH 157/270] ruff: Fix wrong Ruff path on Windows (#17883) Log: 2024-09-16T22:32:04.7715712+08:00 [ERROR] failed to start language server "ruff": failed to spawn command. path: "...\\AppData\\Local\\Zed\\extensions\\work\\ruff\\ruff-0.6.5\\ruff-x86_64-pc-windows-msvc\\ruff" The right path: `...\\AppData\\Local\\Zed\\extensions\\work\\ruff\\ruff-0.6.5\\ruff.exe` Release Notes: - N/A --- extensions/ruff/src/ruff.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/extensions/ruff/src/ruff.rs b/extensions/ruff/src/ruff.rs index c7c51ec7b9..048c5893ca 100644 --- a/extensions/ruff/src/ruff.rs +++ b/extensions/ruff/src/ruff.rs @@ -89,7 +89,10 @@ impl RuffExtension { .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?; let version_dir = format!("ruff-{}", release.version); - let binary_path = format!("{version_dir}/{asset_stem}/ruff"); + let binary_path = match platform { + zed::Os::Windows => format!("{version_dir}/ruff.exe"), + _ => format!("{version_dir}/{asset_stem}/ruff"), + }; if !fs::metadata(&binary_path).map_or(false, |stat| stat.is_file()) { zed::set_language_server_installation_status( From ab7a7d3480c9bc553c320911867236a8554ae0ef Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 14:12:11 -0400 Subject: [PATCH 158/270] docs: Mention how to open the Prompt Library (#17957) --- docs/src/assistant/prompting.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/assistant/prompting.md b/docs/src/assistant/prompting.md index f6f68880ba..0dca671b47 100644 --- a/docs/src/assistant/prompting.md +++ b/docs/src/assistant/prompting.md @@ -52,6 +52,8 @@ More on prompt engineering: Zed allows you to customize the default prompt used in new context editors. Or to be more precise, it uses a series of prompts that are combined to form the default prompt. +To edit prompts, select "Prompt Library" from the menu icon (three horizontal lines) in the upper right hand corner or using the `cmd-k l` keyboard shortcut. + A default prompt might look something like: ```plaintext From 869a72bb3f8b3786c92ec89fecc2c0f7cf96baf0 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 14:21:06 -0400 Subject: [PATCH 159/270] ruff: Bump to v0.1.0 (#17960) This PR bumps the Ruff extension to v0.1.0. Changes: - https://github.com/zed-industries/zed/pull/15852 - https://github.com/zed-industries/zed/pull/16955 - https://github.com/zed-industries/zed/pull/17883 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/ruff/Cargo.toml | 2 +- extensions/ruff/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d6d8e85e6..6eeec558ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14516,7 +14516,7 @@ dependencies = [ [[package]] name = "zed_ruff" -version = "0.0.2" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/ruff/Cargo.toml b/extensions/ruff/Cargo.toml index 50e0ae3908..b6c31ebbc8 100644 --- a/extensions/ruff/Cargo.toml +++ b/extensions/ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_ruff" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/ruff/extension.toml b/extensions/ruff/extension.toml index d622b37c68..63929fc191 100644 --- a/extensions/ruff/extension.toml +++ b/extensions/ruff/extension.toml @@ -1,7 +1,7 @@ id = "ruff" name = "Ruff" description = "Support for Ruff, the Python linter and formatter" -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = [] repository = "https://github.com/zed-industries/zed" From 447a5d6e6e95c0bbc9fb008cb27de925a1fcf778 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 17 Sep 2024 11:35:45 -0700 Subject: [PATCH 160/270] Fix the rendering of warning text in our docs (#17958) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit cc: @danilo-leal Before: Screenshot 2024-09-17 at 10 53 13 AM After: Screenshot 2024-09-17 at 10 53 35 AM Light mode: Screenshot 2024-09-17 at 10 54 17 AM Release Notes: - N/A --- docs/theme/css/general.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index e6905b47bf..6f086a1052 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -268,7 +268,7 @@ blockquote .warning:before { .warning { margin: auto; padding: 1rem 1.25rem; - color: #000; + color: var(--fg); background-color: var(--warning-bg); border: 1px solid var(--warning-border); } From c28b22d1cfa3326a0336c69236baf63670473398 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 15:08:14 -0400 Subject: [PATCH 161/270] Update typos-cli to v1.24.6. Add scripts/check-spelling. Fix typos (#17961) --- .github/workflows/ci.yml | 11 +---------- crates/collab/src/db/ids.rs | 2 +- crates/editor/src/inlay_hint_cache.rs | 2 +- crates/language/src/buffer.rs | 2 +- crates/language/src/buffer_tests.rs | 2 +- crates/picker/src/picker.rs | 2 +- crates/terminal/src/terminal.rs | 2 +- crates/ui/src/components/icon.rs | 2 +- crates/zed/src/reliability.rs | 2 +- script/check-spelling | 11 +++++++++++ 10 files changed, 20 insertions(+), 18 deletions(-) create mode 100755 script/check-spelling diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 12b7dbd806..3ee4c04580 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,16 +39,7 @@ jobs: run: git clean -df - name: Check spelling - run: | - if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then - echo "Installing typos-cli@$TYPOS_CLI_VERSION..." - cargo install "typos-cli@$TYPOS_CLI_VERSION" - else - echo "typos-cli@$TYPOS_CLI_VERSION is already installed." - fi - typos - env: - TYPOS_CLI_VERSION: "1.23.3" + run: script/check-spelling - name: Run style checks uses: ./.github/actions/check_style diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 82ff8a56e5..1434bc07cf 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -104,7 +104,7 @@ pub enum ChannelRole { /// Admin can read/write and change permissions. #[sea_orm(string_value = "admin")] Admin, - /// Member can read/write, but not change pemissions. + /// Member can read/write, but not change permissions. #[sea_orm(string_value = "member")] #[default] Member, diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index ce7ec5a4b0..62c5cde9d8 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -337,7 +337,7 @@ impl InlayHintCache { /// If needed, queries LSP for new inlay hints, using the invalidation strategy given. /// To reduce inlay hint jumping, attempts to query a visible range of the editor(s) first, /// followed by the delayed queries of the same range above and below the visible one. - /// This way, concequent refresh invocations are less likely to trigger LSP queries for the invisible ranges. + /// This way, subsequent refresh invocations are less likely to trigger LSP queries for the invisible ranges. pub(super) fn spawn_hint_refresh( &mut self, reason_description: &'static str, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 59b2670daf..76058ffd9b 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -3025,7 +3025,7 @@ impl BufferSnapshot { let mut start = text.len(); let end = start + buffer_range.len(); - // When multiple names are captured, then the matcheable text + // When multiple names are captured, then the matchable text // includes the whitespace in between the names. if !name_ranges.is_empty() { start -= 1; diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 5162269f4f..50dea8d256 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -382,7 +382,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(text, cx)); // Spawn a task to format the buffer's whitespace. - // Pause so that the foratting task starts running. + // Pause so that the formatting task starts running. let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx)); smol::future::yield_now().await; diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index dc661d91fb..8350be2b20 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -509,7 +509,7 @@ impl Picker { .on_mouse_up( MouseButton::Right, cx.listener(move |this, event: &MouseUpEvent, cx| { - // We specficially want to use the platform key here, as + // We specifically want to use the platform key here, as // ctrl will already be held down for the tab switcher. this.handle_click(ix, event.modifiers.platform, cx) }), diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 12a7349af0..8f8982f02c 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -1619,7 +1619,7 @@ fn task_summary(task: &TaskState, error_code: Option) -> (bool, String, Str /// the cursor's `point` is not updated to the new line and column values /// /// * ??? there could be more consequences, and any further "proper" streaming from the PTY might bug and/or panic. -/// Still, concequent `append_text_to_term` invocations are possible and display the contents correctly. +/// Still, subsequent `append_text_to_term` invocations are possible and display the contents correctly. /// /// Despite the quirks, this is the simplest approach to appending text to the terminal: its alternative, `grid_mut` manipulations, /// do not properly set the scrolling state and display odd text after appending; also those manipulations are more tedious and error-prone. diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index a71c3e9872..323181e841 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -46,7 +46,7 @@ impl RenderOnce for AnyIcon { /// The decoration for an icon. /// /// For example, this can show an indicator, an "x", -/// or a diagonal strkethrough to indicate something is disabled. +/// or a diagonal strikethrough to indicate something is disabled. #[derive(Debug, PartialEq, Copy, Clone, EnumIter)] pub enum IconDecoration { Strikethrough, diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 9731401b30..188cf417f7 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -176,7 +176,7 @@ pub fn monitor_main_thread_hangs( let background_executor = cx.background_executor(); let telemetry_settings = *client::TelemetrySettings::get_global(cx); - // Initialize SIGUSR2 handler to send a backrace to a channel. + // Initialize SIGUSR2 handler to send a backtrace to a channel. let (backtrace_tx, backtrace_rx) = mpsc::channel(); static BACKTRACE: Mutex> = Mutex::new(Vec::new()); static BACKTRACE_SENDER: OnceLock> = OnceLock::new(); diff --git a/script/check-spelling b/script/check-spelling new file mode 100755 index 0000000000..65c674057f --- /dev/null +++ b/script/check-spelling @@ -0,0 +1,11 @@ +#!/bin/sh + +TYPOS_CLI_VERSION=1.24.6 + +if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then + echo "Installing typos-cli@$TYPOS_CLI_VERSION..." + cargo install "typos-cli@$TYPOS_CLI_VERSION" +else + echo "typos-cli@$TYPOS_CLI_VERSION is already installed." +fi +typos From 06a13c298395f543e6e2e1dfd33c7a7ebe37f59e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 15:33:28 -0400 Subject: [PATCH 162/270] svelte: Bump to v0.2.0 (#17962) This PR bumps the Svelte extension to v0.2.0. Changes: - https://github.com/zed-industries/zed/pull/17529 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/svelte/Cargo.toml | 2 +- extensions/svelte/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6eeec558ec..3b3a370c36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14531,7 +14531,7 @@ dependencies = [ [[package]] name = "zed_svelte" -version = "0.1.1" +version = "0.2.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/svelte/Cargo.toml b/extensions/svelte/Cargo.toml index d07d517af6..09ad78aec2 100644 --- a/extensions/svelte/Cargo.toml +++ b/extensions/svelte/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_svelte" -version = "0.1.1" +version = "0.2.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/svelte/extension.toml b/extensions/svelte/extension.toml index 694fdec2a6..a4ecf460a8 100644 --- a/extensions/svelte/extension.toml +++ b/extensions/svelte/extension.toml @@ -1,7 +1,7 @@ id = "svelte" name = "Svelte" description = "Svelte support" -version = "0.1.1" +version = "0.2.0" schema_version = 1 authors = [] repository = "https://github.com/zed-extensions/svelte" From d3d3a093b4a757f1e82cd870a7f9e3cbee9ae933 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 17 Sep 2024 12:44:33 -0700 Subject: [PATCH 163/270] Add an `eval` binary that evaluates our semantic index against CodeSearchNet (#17375) This PR is the beginning of an evaluation framework for our AI features. Right now, we're evaluating our semantic search feature against the [CodeSearchNet](https://github.com/github/CodeSearchNet) code search dataset. This dataset is very limited (for the most part, only 1 known good search result per repo) but it has surfaced some problems with our search already. Release Notes: - N/A --------- Co-authored-by: Jason Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Nathan Co-authored-by: Richard --- .github/workflows/ci.yml | 17 +- Cargo.lock | 27 + Cargo.toml | 1 + crates/assistant/src/assistant_panel.rs | 2 +- .../src/slash_command/file_command.rs | 9 +- .../src/slash_command/search_command.rs | 54 +- crates/evals/Cargo.toml | 37 + crates/evals/LICENSE-GPL | 1 + crates/evals/build.rs | 14 + crates/evals/src/eval.rs | 631 ++++++++++++++++++ crates/http_client/src/http_client.rs | 3 +- crates/semantic_index/src/embedding_index.rs | 87 ++- crates/semantic_index/src/project_index.rs | 17 +- crates/semantic_index/src/semantic_index.rs | 125 ++-- 14 files changed, 881 insertions(+), 144 deletions(-) create mode 100644 crates/evals/Cargo.toml create mode 120000 crates/evals/LICENSE-GPL create mode 100644 crates/evals/build.rs create mode 100644 crates/evals/src/eval.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ee4c04580..c55a3a9907 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -101,7 +101,7 @@ jobs: timeout-minutes: 60 name: (Linux) Run Clippy and tests runs-on: - - hosted-linux-x86-1 + - buildjet-16vcpu-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -111,6 +111,11 @@ jobs: with: clean: false + - name: Cache dependencies + uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: Install Linux dependencies run: ./script/linux @@ -264,7 +269,7 @@ jobs: timeout-minutes: 60 name: Create a Linux bundle runs-on: - - hosted-linux-x86-1 + - buildjet-16vcpu-ubuntu-2204 if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -279,9 +284,6 @@ jobs: - name: Install Linux dependencies run: ./script/linux - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 - - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | @@ -335,7 +337,7 @@ jobs: timeout-minutes: 60 name: Create arm64 Linux bundle runs-on: - - hosted-linux-arm-1 + - buildjet-16vcpu-ubuntu-2204-arm if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -350,9 +352,6 @@ jobs: - name: Install Linux dependencies run: ./script/linux - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 - - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | diff --git a/Cargo.lock b/Cargo.lock index 3b3a370c36..9cff895393 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4000,6 +4000,33 @@ dependencies = [ "num-traits", ] +[[package]] +name = "evals" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "client", + "clock", + "collections", + "env_logger", + "feature_flags", + "fs", + "git", + "gpui", + "http_client", + "language", + "languages", + "node_runtime", + "open_ai", + "project", + "semantic_index", + "serde", + "serde_json", + "settings", + "smol", +] + [[package]] name = "event-listener" version = "2.5.3" diff --git a/Cargo.toml b/Cargo.toml index e1af231c7e..eea510edf2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ members = [ "crates/diagnostics", "crates/docs_preprocessor", "crates/editor", + "crates/evals", "crates/extension", "crates/extension_api", "crates/extension_cli", diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 52838b5c77..6eaa86f4a7 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3282,7 +3282,7 @@ impl ContextEditor { let fence = codeblock_fence_for_path( filename.as_deref(), - Some(selection.start.row..selection.end.row), + Some(selection.start.row..=selection.end.row), ); if let Some((line_comment_prefix, outline_text)) = diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 0df8b5d4e0..260c6b0e2a 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -8,7 +8,7 @@ use project::{PathMatchCandidateSet, Project}; use serde::{Deserialize, Serialize}; use std::{ fmt::Write, - ops::Range, + ops::{Range, RangeInclusive}, path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, }; @@ -342,7 +342,10 @@ fn collect_files( }) } -pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option>) -> String { +pub fn codeblock_fence_for_path( + path: Option<&Path>, + row_range: Option>, +) -> String { let mut text = String::new(); write!(text, "```").unwrap(); @@ -357,7 +360,7 @@ pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option, + }, +} + +#[derive(Clone, Deserialize, Serialize)] +struct EvaluationProject { + repo: String, + sha: String, + queries: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +struct EvaluationQuery { + query: String, + expected_results: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] +struct EvaluationSearchResult { + file: String, + lines: RangeInclusive, +} + +#[derive(Clone, Deserialize, Serialize)] +struct EvaluationProjectOutcome { + repo: String, + sha: String, + queries: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +struct EvaluationQueryOutcome { + repo: String, + query: String, + expected_results: Vec, + actual_results: Vec, + covered_file_count: usize, + overlapped_result_count: usize, + covered_result_count: usize, + total_result_count: usize, + covered_result_indices: Vec, +} + +fn main() -> Result<()> { + let cli = Cli::parse(); + env_logger::init(); + + gpui::App::headless().run(move |cx| { + let executor = cx.background_executor().clone(); + + match cli.command { + Commands::Fetch {} => { + executor + .clone() + .spawn(async move { + if let Err(err) = fetch_evaluation_resources(&executor).await { + eprintln!("Error: {}", err); + exit(1); + } + exit(0); + }) + .detach(); + } + Commands::Run { repo } => { + cx.spawn(|mut cx| async move { + if let Err(err) = run_evaluation(repo, &executor, &mut cx).await { + eprintln!("Error: {}", err); + exit(1); + } + exit(0); + }) + .detach(); + } + } + }); + + Ok(()) +} + +async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> { + let http_client = http_client::HttpClientWithProxy::new(None, None); + fetch_code_search_net_resources(&http_client).await?; + fetch_eval_repos(executor, &http_client).await?; + Ok(()) +} + +async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result<()> { + eprintln!("Fetching CodeSearchNet evaluations..."); + + let annotations_url = "https://raw.githubusercontent.com/github/CodeSearchNet/master/resources/annotationStore.csv"; + + let dataset_dir = Path::new(CODESEARCH_NET_DIR); + fs::create_dir_all(&dataset_dir).expect("failed to create CodeSearchNet directory"); + + // Fetch the annotations CSV, which contains the human-annotated search relevances + let annotations_path = dataset_dir.join("annotations.csv"); + let annotations_csv_content = if annotations_path.exists() { + fs::read_to_string(&annotations_path).expect("failed to read annotations") + } else { + let response = http_client + .get(annotations_url, Default::default(), true) + .await + .expect("failed to fetch annotations csv"); + let mut body = String::new(); + response + .into_body() + .read_to_string(&mut body) + .await + .expect("failed to read annotations.csv response"); + fs::write(annotations_path, &body).expect("failed to write annotations.csv"); + body + }; + + // Parse the annotations CSV. Skip over queries with zero relevance. + let rows = annotations_csv_content.lines().filter_map(|line| { + let mut values = line.split(','); + let _language = values.next()?; + let query = values.next()?; + let github_url = values.next()?; + let score = values.next()?; + + if score == "0" { + return None; + } + + let url_path = github_url.strip_prefix("https://github.com/")?; + let (url_path, hash) = url_path.split_once('#')?; + let (repo_name, url_path) = url_path.split_once("/blob/")?; + let (sha, file_path) = url_path.split_once('/')?; + let line_range = if let Some((start, end)) = hash.split_once('-') { + start.strip_prefix("L")?.parse::().ok()?..=end.strip_prefix("L")?.parse().ok()? + } else { + let row = hash.strip_prefix("L")?.parse().ok()?; + row..=row + }; + Some((repo_name, sha, query, file_path, line_range)) + }); + + // Group the annotations by repo and sha. + let mut evaluations_by_repo = BTreeMap::new(); + for (repo_name, sha, query, file_path, lines) in rows { + let evaluation_project = evaluations_by_repo + .entry((repo_name, sha)) + .or_insert_with(|| EvaluationProject { + repo: repo_name.to_string(), + sha: sha.to_string(), + queries: Vec::new(), + }); + + let ix = evaluation_project + .queries + .iter() + .position(|entry| entry.query == query) + .unwrap_or_else(|| { + evaluation_project.queries.push(EvaluationQuery { + query: query.to_string(), + expected_results: Vec::new(), + }); + evaluation_project.queries.len() - 1 + }); + let results = &mut evaluation_project.queries[ix].expected_results; + let result = EvaluationSearchResult { + file: file_path.to_string(), + lines, + }; + if !results.contains(&result) { + results.push(result); + } + } + + let evaluations = evaluations_by_repo.into_values().collect::>(); + let evaluations_path = dataset_dir.join("evaluations.json"); + fs::write( + &evaluations_path, + serde_json::to_vec_pretty(&evaluations).unwrap(), + ) + .unwrap(); + + eprintln!( + "Fetched CodeSearchNet evaluations into {}", + evaluations_path.display() + ); + + Ok(()) +} + +async fn run_evaluation( + only_repo: Option, + executor: &BackgroundExecutor, + cx: &mut AsyncAppContext, +) -> Result<()> { + cx.update(|cx| { + let mut store = SettingsStore::new(cx); + store + .set_default_settings(settings::default_settings().as_ref(), cx) + .unwrap(); + cx.set_global(store); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + cx.update_flags(false, vec![]); + }) + .unwrap(); + + let dataset_dir = Path::new(CODESEARCH_NET_DIR); + let evaluations_path = dataset_dir.join("evaluations.json"); + let repos_dir = Path::new(EVAL_REPOS_DIR); + let db_path = Path::new(EVAL_DB_PATH); + let http_client = http_client::HttpClientWithProxy::new(None, None); + let api_key = std::env::var("OPENAI_API_KEY").unwrap(); + let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); + let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc; + let clock = Arc::new(RealSystemClock); + let client = cx + .update(|cx| { + Client::new( + clock, + Arc::new(http_client::HttpClientWithUrl::new( + "https://zed.dev", + None, + None, + )), + cx, + ) + }) + .unwrap(); + let user_store = cx + .new_model(|cx| UserStore::new(client.clone(), cx)) + .unwrap(); + let node_runtime = Arc::new(FakeNodeRuntime {}); + + let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); + let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); + + let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new( + http_client.clone(), + OpenAiEmbeddingModel::TextEmbedding3Small, + open_ai::OPEN_AI_API_URL.to_string(), + api_key, + )); + + let language_registry = Arc::new(LanguageRegistry::new(executor.clone())); + cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx)) + .unwrap(); + + let mut covered_result_count = 0; + let mut overlapped_result_count = 0; + let mut covered_file_count = 0; + let mut total_result_count = 0; + eprint!("Running evals."); + + for evaluation_project in evaluations { + if only_repo + .as_ref() + .map_or(false, |only_repo| only_repo != &evaluation_project.repo) + { + continue; + } + + eprint!("\r\x1B[2K"); + eprint!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...", + covered_result_count, + total_result_count, + overlapped_result_count, + total_result_count, + covered_file_count, + total_result_count, + evaluation_project.repo + ); + + let repo_db_path = + db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); + let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx) + .await + .unwrap(); + + let repo_dir = repos_dir.join(&evaluation_project.repo); + if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() { + eprintln!("Skipping {}: directory not found", evaluation_project.repo); + continue; + } + + let project = cx + .update(|cx| { + Project::local( + client.clone(), + node_runtime.clone(), + user_store.clone(), + language_registry.clone(), + fs.clone(), + None, + cx, + ) + }) + .unwrap(); + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(repo_dir, true, cx) + })? + .await?; + + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + }) + .unwrap() + .await; + + let project_index = cx + .update(|cx| semantic_index.create_project_index(project.clone(), cx)) + .unwrap(); + wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + + for query in evaluation_project.queries { + let results = cx + .update(|cx| { + let project_index = project_index.read(cx); + project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) + }) + .unwrap() + .await + .unwrap(); + + let results = SemanticDb::load_results(results, &fs.clone(), &cx) + .await + .unwrap(); + + let mut project_covered_result_count = 0; + let mut project_overlapped_result_count = 0; + let mut project_covered_file_count = 0; + let mut covered_result_indices = Vec::new(); + for expected_result in &query.expected_results { + let mut file_matched = false; + let mut range_overlapped = false; + let mut range_covered = false; + + for (ix, result) in results.iter().enumerate() { + if result.path.as_ref() == Path::new(&expected_result.file) { + file_matched = true; + let start_matched = + result.row_range.contains(&expected_result.lines.start()); + let end_matched = result.row_range.contains(&expected_result.lines.end()); + + if start_matched || end_matched { + range_overlapped = true; + } + + if start_matched && end_matched { + range_covered = true; + covered_result_indices.push(ix); + break; + } + } + } + + if range_covered { + project_covered_result_count += 1 + }; + if range_overlapped { + project_overlapped_result_count += 1 + }; + if file_matched { + project_covered_file_count += 1 + }; + } + let outcome_repo = evaluation_project.repo.clone(); + + let query_results = EvaluationQueryOutcome { + repo: outcome_repo, + query: query.query, + total_result_count: query.expected_results.len(), + covered_result_count: project_covered_result_count, + overlapped_result_count: project_overlapped_result_count, + covered_file_count: project_covered_file_count, + expected_results: query.expected_results, + actual_results: results + .iter() + .map(|result| EvaluationSearchResult { + file: result.path.to_string_lossy().to_string(), + lines: result.row_range.clone(), + }) + .collect(), + covered_result_indices, + }; + + overlapped_result_count += query_results.overlapped_result_count; + covered_result_count += query_results.covered_result_count; + covered_file_count += query_results.covered_file_count; + total_result_count += query_results.total_result_count; + + println!("{}", serde_json::to_string(&query_results).unwrap()); + } + } + + eprint!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.", + covered_result_count, + total_result_count, + overlapped_result_count, + total_result_count, + covered_file_count, + total_result_count, + ); + + Ok(()) +} + +async fn wait_for_indexing_complete( + project_index: &Model, + cx: &mut AsyncAppContext, + timeout: Option, +) { + let (tx, rx) = bounded(1); + let subscription = cx.update(|cx| { + cx.subscribe(project_index, move |_, event, _| { + if let Status::Idle = event { + let _ = tx.try_send(*event); + } + }) + }); + + let result = match timeout { + Some(timeout_duration) => { + smol::future::or( + async { + rx.recv().await.map_err(|_| ())?; + Ok(()) + }, + async { + Timer::after(timeout_duration).await; + Err(()) + }, + ) + .await + } + None => rx.recv().await.map(|_| ()).map_err(|_| ()), + }; + + match result { + Ok(_) => (), + Err(_) => { + if let Some(timeout) = timeout { + eprintln!("Timeout: Indexing did not complete within {:?}", timeout); + } + } + } + + drop(subscription); +} + +async fn fetch_eval_repos( + executor: &BackgroundExecutor, + http_client: &dyn HttpClient, +) -> Result<()> { + let dataset_dir = Path::new(CODESEARCH_NET_DIR); + let evaluations_path = dataset_dir.join("evaluations.json"); + let repos_dir = Path::new(EVAL_REPOS_DIR); + + let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); + let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); + + eprint!("Fetching evaluation repositories..."); + + executor + .scoped(move |scope| { + let done_count = Arc::new(AtomicUsize::new(0)); + let len = evaluations.len(); + for chunk in evaluations.chunks(evaluations.len() / 8) { + let chunk = chunk.to_vec(); + let done_count = done_count.clone(); + scope.spawn(async move { + for EvaluationProject { repo, sha, .. } in chunk { + eprint!( + "\rFetching evaluation repositories ({}/{})...", + done_count.load(SeqCst), + len, + ); + + fetch_eval_repo(repo, sha, repos_dir, http_client).await; + done_count.fetch_add(1, SeqCst); + } + }); + } + }) + .await; + + Ok(()) +} + +async fn fetch_eval_repo( + repo: String, + sha: String, + repos_dir: &Path, + http_client: &dyn HttpClient, +) { + let Some((owner, repo_name)) = repo.split_once('/') else { + return; + }; + let repo_dir = repos_dir.join(owner).join(repo_name); + fs::create_dir_all(&repo_dir).unwrap(); + let skip_eval_path = repo_dir.join(SKIP_EVAL_PATH); + if skip_eval_path.exists() { + return; + } + if let Ok(head_content) = fs::read_to_string(&repo_dir.join(".git").join("HEAD")) { + if head_content.trim() == sha { + return; + } + } + let repo_response = http_client + .send( + http_client::Request::builder() + .method(Method::HEAD) + .uri(format!("https://github.com/{}", repo)) + .body(Default::default()) + .expect(""), + ) + .await + .expect("failed to check github repo"); + if !repo_response.status().is_success() && !repo_response.status().is_redirection() { + fs::write(&skip_eval_path, "").unwrap(); + eprintln!( + "Repo {repo} is no longer public ({:?}). Skipping", + repo_response.status() + ); + return; + } + if !repo_dir.join(".git").exists() { + let init_output = Command::new("git") + .current_dir(&repo_dir) + .args(&["init"]) + .output() + .unwrap(); + if !init_output.status.success() { + eprintln!( + "Failed to initialize git repository for {}: {}", + repo, + String::from_utf8_lossy(&init_output.stderr) + ); + return; + } + } + let url = format!("https://github.com/{}.git", repo); + Command::new("git") + .current_dir(&repo_dir) + .args(&["remote", "add", "-f", "origin", &url]) + .stdin(Stdio::null()) + .output() + .unwrap(); + let fetch_output = Command::new("git") + .current_dir(&repo_dir) + .args(&["fetch", "--depth", "1", "origin", &sha]) + .stdin(Stdio::null()) + .output() + .unwrap(); + if !fetch_output.status.success() { + eprintln!( + "Failed to fetch {} for {}: {}", + sha, + repo, + String::from_utf8_lossy(&fetch_output.stderr) + ); + return; + } + let checkout_output = Command::new("git") + .current_dir(&repo_dir) + .args(&["checkout", &sha]) + .output() + .unwrap(); + + if !checkout_output.status.success() { + eprintln!( + "Failed to checkout {} for {}: {}", + sha, + repo, + String::from_utf8_lossy(&checkout_output.stderr) + ); + } +} diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 1841a1f394..7ea0029d79 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -5,6 +5,7 @@ use derive_more::Deref; use futures::future::BoxFuture; use futures_lite::FutureExt; use isahc::config::{Configurable, RedirectPolicy}; +pub use isahc::http; pub use isahc::{ http::{Method, StatusCode, Uri}, AsyncBody, Error, HttpClient as IsahcHttpClient, Request, Response, @@ -226,7 +227,7 @@ pub fn client(user_agent: Option, proxy: Option) -> Arc { - let language = language_registry - .language_for_file_path(&entry.path) - .await - .ok(); - let chunked_file = ChunkedFile { - chunks: chunking::chunk_text( - &text, - language.as_ref(), - &entry.path, - ), - handle, - path: entry.path, - mtime: entry.mtime, - text, - }; + if let Some(text) = fs.load(&entry_abs_path).await.ok() { + let language = language_registry + .language_for_file_path(&entry.path) + .await + .ok(); + let chunked_file = ChunkedFile { + chunks: chunking::chunk_text( + &text, + language.as_ref(), + &entry.path, + ), + handle, + path: entry.path, + mtime: entry.mtime, + text, + }; - if chunked_files_tx.send(chunked_file).await.is_err() { - return; - } - } - Err(_)=> { - log::error!("Failed to read contents into a UTF-8 string: {entry_abs_path:?}"); + if chunked_files_tx.send(chunked_file).await.is_err() { + return; } } } @@ -358,33 +353,37 @@ impl EmbeddingIndex { fn persist_embeddings( &self, mut deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, - embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, + mut embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, cx: &AppContext, ) -> Task> { let db_connection = self.db_connection.clone(); let db = self.db; + cx.background_executor().spawn(async move { - while let Some(deletion_range) = deleted_entry_ranges.next().await { - let mut txn = db_connection.write_txn()?; - let start = deletion_range.0.as_ref().map(|start| start.as_str()); - let end = deletion_range.1.as_ref().map(|end| end.as_str()); - log::debug!("deleting embeddings in range {:?}", &(start, end)); - db.delete_range(&mut txn, &(start, end))?; - txn.commit()?; - } - - let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2)); - while let Some(embedded_files) = embedded_files.next().await { - let mut txn = db_connection.write_txn()?; - for (file, _) in &embedded_files { - log::debug!("saving embedding for file {:?}", file.path); - let key = db_key_for_path(&file.path); - db.put(&mut txn, &key, file)?; + loop { + // Interleave deletions and persists of embedded files + futures::select_biased! { + deletion_range = deleted_entry_ranges.next() => { + if let Some(deletion_range) = deletion_range { + let mut txn = db_connection.write_txn()?; + let start = deletion_range.0.as_ref().map(|start| start.as_str()); + let end = deletion_range.1.as_ref().map(|end| end.as_str()); + log::debug!("deleting embeddings in range {:?}", &(start, end)); + db.delete_range(&mut txn, &(start, end))?; + txn.commit()?; + } + }, + file = embedded_files.next() => { + if let Some((file, _)) = file { + let mut txn = db_connection.write_txn()?; + log::debug!("saving embedding for file {:?}", file.path); + let key = db_key_for_path(&file.path); + db.put(&mut txn, &key, &file)?; + txn.commit()?; + } + }, + complete => break, } - txn.commit()?; - - drop(embedded_files); - log::debug!("committed"); } Ok(()) diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs index 84a72c1a3d..5c35c93fa9 100644 --- a/crates/semantic_index/src/project_index.rs +++ b/crates/semantic_index/src/project_index.rs @@ -15,7 +15,14 @@ use log; use project::{Project, Worktree, WorktreeId}; use serde::{Deserialize, Serialize}; use smol::channel; -use std::{cmp::Ordering, future::Future, num::NonZeroUsize, ops::Range, path::Path, sync::Arc}; +use std::{ + cmp::Ordering, + future::Future, + num::NonZeroUsize, + ops::{Range, RangeInclusive}, + path::{Path, PathBuf}, + sync::Arc, +}; use util::ResultExt; #[derive(Debug)] @@ -26,6 +33,14 @@ pub struct SearchResult { pub score: f32, } +pub struct LoadedSearchResult { + pub path: Arc, + pub range: Range, + pub full_path: PathBuf, + pub file_content: String, + pub row_range: RangeInclusive, +} + pub struct WorktreeSearchResult { pub worktree_id: WorktreeId, pub path: Arc, diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index f2b325ead6..3435d0a9ca 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -10,14 +10,16 @@ mod worktree_index; use anyhow::{Context as _, Result}; use collections::HashMap; +use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; use project::Project; -use project_index::ProjectIndex; use std::{path::PathBuf, sync::Arc}; use ui::ViewContext; +use util::ResultExt as _; use workspace::Workspace; pub use embedding::*; +pub use project_index::{LoadedSearchResult, ProjectIndex, SearchResult, Status}; pub use project_index_debug_view::ProjectIndexDebugView; pub use summary_index::FileSummary; @@ -56,27 +58,7 @@ impl SemanticDb { if cx.has_global::() { cx.update_global::(|this, cx| { - let project_index = cx.new_model(|cx| { - ProjectIndex::new( - project.clone(), - this.db_connection.clone(), - this.embedding_provider.clone(), - cx, - ) - }); - - let project_weak = project.downgrade(); - this.project_indices - .insert(project_weak.clone(), project_index); - - cx.on_release(move |_, _, cx| { - if cx.has_global::() { - cx.update_global::(|this, _| { - this.project_indices.remove(&project_weak); - }) - } - }) - .detach(); + this.create_project_index(project, cx); }) } else { log::info!("No SemanticDb, skipping project index") @@ -94,6 +76,50 @@ impl SemanticDb { }) } + pub async fn load_results( + results: Vec, + fs: &Arc, + cx: &AsyncAppContext, + ) -> Result> { + let mut loaded_results = Vec::new(); + for result in results { + let (full_path, file_content) = result.worktree.read_with(cx, |worktree, _cx| { + let entry_abs_path = worktree.abs_path().join(&result.path); + let mut entry_full_path = PathBuf::from(worktree.root_name()); + entry_full_path.push(&result.path); + let file_content = async { + let entry_abs_path = entry_abs_path; + fs.load(&entry_abs_path).await + }; + (entry_full_path, file_content) + })?; + if let Some(file_content) = file_content.await.log_err() { + let range_start = result.range.start.min(file_content.len()); + let range_end = result.range.end.min(file_content.len()); + + let start_row = file_content[0..range_start].matches('\n').count() as u32; + let end_row = file_content[0..range_end].matches('\n').count() as u32; + let start_line_byte_offset = file_content[0..range_start] + .rfind('\n') + .map(|pos| pos + 1) + .unwrap_or_default(); + let end_line_byte_offset = file_content[range_end..] + .find('\n') + .map(|pos| range_end + pos) + .unwrap_or_else(|| file_content.len()); + + loaded_results.push(LoadedSearchResult { + path: result.path, + range: start_line_byte_offset..end_line_byte_offset, + full_path, + file_content, + row_range: start_row..=end_row, + }); + } + } + Ok(loaded_results) + } + pub fn project_index( &mut self, project: Model, @@ -113,6 +139,36 @@ impl SemanticDb { }) }) } + + pub fn create_project_index( + &mut self, + project: Model, + cx: &mut AppContext, + ) -> Model { + let project_index = cx.new_model(|cx| { + ProjectIndex::new( + project.clone(), + self.db_connection.clone(), + self.embedding_provider.clone(), + cx, + ) + }); + + let project_weak = project.downgrade(); + self.project_indices + .insert(project_weak.clone(), project_index.clone()); + + cx.observe_release(&project, move |_, cx| { + if cx.has_global::() { + cx.update_global::(|this, _| { + this.project_indices.remove(&project_weak); + }) + } + }) + .detach(); + + project_index + } } #[cfg(test)] @@ -230,34 +286,13 @@ mod tests { let project = Project::test(fs, [project_path], cx).await; - cx.update(|cx| { + let project_index = cx.update(|cx| { let language_registry = project.read(cx).languages().clone(); let node_runtime = project.read(cx).node_runtime().unwrap().clone(); languages::init(language_registry, node_runtime, cx); - - // Manually create and insert the ProjectIndex - let project_index = cx.new_model(|cx| { - ProjectIndex::new( - project.clone(), - semantic_index.db_connection.clone(), - semantic_index.embedding_provider.clone(), - cx, - ) - }); - semantic_index - .project_indices - .insert(project.downgrade(), project_index); + semantic_index.create_project_index(project.clone(), cx) }); - let project_index = cx - .update(|_cx| { - semantic_index - .project_indices - .get(&project.downgrade()) - .cloned() - }) - .unwrap(); - cx.run_until_parked(); while cx .update(|cx| semantic_index.remaining_summaries(&project.downgrade(), cx)) From 8a6c65c63b6b2b8071ad2380ea392e4f51226c8f Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Tue, 17 Sep 2024 22:49:12 +0300 Subject: [PATCH 164/270] Allow task context providers to access project env (#17964) Closes #13106 Release Notes: - Task context providers now have access to the local shell environment, allowing local rust tool installations to work Before: Screenshot 2024-09-17 at 22 09 38 After: Screenshot 2024-09-17 at 22 09 58 --- crates/language/src/task_context.rs | 1 + crates/languages/src/go.rs | 2 ++ crates/languages/src/python.rs | 2 ++ crates/languages/src/rust.rs | 31 ++++++++++++++++++------ crates/project/src/project.rs | 36 +++++++++++++++------------- crates/project/src/task_inventory.rs | 3 ++- 6 files changed, 50 insertions(+), 25 deletions(-) diff --git a/crates/language/src/task_context.rs b/crates/language/src/task_context.rs index cc3f29558e..73150eb8e7 100644 --- a/crates/language/src/task_context.rs +++ b/crates/language/src/task_context.rs @@ -25,6 +25,7 @@ pub trait ContextProvider: Send + Sync { &self, _variables: &TaskVariables, _location: &Location, + _project_env: Option<&HashMap>, _cx: &mut AppContext, ) -> Result { Ok(TaskVariables::default()) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a83a11fd49..2ddf779681 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -1,5 +1,6 @@ use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; +use collections::HashMap; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext, Task}; use http_client::github::latest_github_release; @@ -454,6 +455,7 @@ impl ContextProvider for GoContextProvider { &self, variables: &TaskVariables, location: &Location, + _: Option<&HashMap>, cx: &mut gpui::AppContext, ) -> Result { let local_abs_path = location diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 99018647ea..ee127c00cc 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -1,5 +1,6 @@ use anyhow::Result; use async_trait::async_trait; +use collections::HashMap; use gpui::AppContext; use gpui::AsyncAppContext; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; @@ -215,6 +216,7 @@ impl ContextProvider for PythonContextProvider { &self, variables: &task::TaskVariables, _location: &project::Location, + _: Option<&HashMap>, _cx: &mut gpui::AppContext, ) -> Result { let python_module_name = python_module_name_from_relative_path( diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 5055bb69c1..b55f350b9d 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -1,6 +1,7 @@ use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; use async_trait::async_trait; +use collections::HashMap; use futures::{io::BufReader, StreamExt}; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; @@ -434,6 +435,7 @@ impl ContextProvider for RustContextProvider { &self, task_variables: &TaskVariables, location: &Location, + project_env: Option<&HashMap>, cx: &mut gpui::AppContext, ) -> Result { let local_abs_path = location @@ -449,8 +451,8 @@ impl ContextProvider for RustContextProvider { .is_some(); if is_main_function { - if let Some((package_name, bin_name)) = - local_abs_path.and_then(package_name_and_bin_name_from_abs_path) + if let Some((package_name, bin_name)) = local_abs_path + .and_then(|path| package_name_and_bin_name_from_abs_path(path, project_env)) { return Ok(TaskVariables::from_iter([ (RUST_PACKAGE_TASK_VARIABLE.clone(), package_name), @@ -461,7 +463,7 @@ impl ContextProvider for RustContextProvider { if let Some(package_name) = local_abs_path .and_then(|local_abs_path| local_abs_path.parent()) - .and_then(human_readable_package_name) + .and_then(|path| human_readable_package_name(path, project_env)) { return Ok(TaskVariables::from_iter([( RUST_PACKAGE_TASK_VARIABLE.clone(), @@ -615,8 +617,15 @@ struct CargoTarget { src_path: String, } -fn package_name_and_bin_name_from_abs_path(abs_path: &Path) -> Option<(String, String)> { - let output = std::process::Command::new("cargo") +fn package_name_and_bin_name_from_abs_path( + abs_path: &Path, + project_env: Option<&HashMap>, +) -> Option<(String, String)> { + let mut command = std::process::Command::new("cargo"); + if let Some(envs) = project_env { + command.envs(envs); + } + let output = command .current_dir(abs_path.parent()?) .arg("metadata") .arg("--no-deps") @@ -654,9 +663,17 @@ fn retrieve_package_id_and_bin_name_from_metadata( None } -fn human_readable_package_name(package_directory: &Path) -> Option { +fn human_readable_package_name( + package_directory: &Path, + project_env: Option<&HashMap>, +) -> Option { + let mut command = std::process::Command::new("cargo"); + if let Some(envs) = project_env { + command.envs(envs); + } + let pkgid = String::from_utf8( - std::process::Command::new("cargo") + command .current_dir(package_directory) .arg("pkgid") .output() diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5d9ac5e821..4318737e38 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4890,21 +4890,6 @@ impl Project { }; cx.spawn(|project, mut cx| async move { - let mut task_variables = cx - .update(|cx| { - combine_task_variables( - captured_variables, - location, - BasicContextProvider::new(project.upgrade()?), - cx, - ) - .log_err() - }) - .ok() - .flatten()?; - // Remove all custom entries starting with _, as they're not intended for use by the end user. - task_variables.sweep(); - let project_env = project .update(&mut cx, |project, cx| { let worktree_abs_path = worktree_abs_path.clone(); @@ -4915,6 +4900,22 @@ impl Project { .ok()? .await; + let mut task_variables = cx + .update(|cx| { + combine_task_variables( + captured_variables, + location, + project_env.as_ref(), + BasicContextProvider::new(project.upgrade()?), + cx, + ) + .log_err() + }) + .ok() + .flatten()?; + // Remove all custom entries starting with _, as they're not intended for use by the end user. + task_variables.sweep(); + Some(TaskContext { project_env: project_env.unwrap_or_default(), cwd: worktree_abs_path.map(|p| p.to_path_buf()), @@ -5111,6 +5112,7 @@ impl Project { fn combine_task_variables( mut captured_variables: TaskVariables, location: Location, + project_env: Option<&HashMap>, baseline: BasicContextProvider, cx: &mut AppContext, ) -> anyhow::Result { @@ -5120,13 +5122,13 @@ fn combine_task_variables( .language() .and_then(|language| language.context_provider()); let baseline = baseline - .build_context(&captured_variables, &location, cx) + .build_context(&captured_variables, &location, project_env, cx) .context("building basic default context")?; captured_variables.extend(baseline); if let Some(provider) = language_context_provider { captured_variables.extend( provider - .build_context(&captured_variables, &location, cx) + .build_context(&captured_variables, &location, project_env, cx) .context("building provider context")?, ); } diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 314903ec5d..83c9c1f8e5 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -8,7 +8,7 @@ use std::{ }; use anyhow::Result; -use collections::{btree_map, BTreeMap, VecDeque}; +use collections::{btree_map, BTreeMap, HashMap, VecDeque}; use futures::{ channel::mpsc::{unbounded, UnboundedSender}, StreamExt, @@ -543,6 +543,7 @@ impl ContextProvider for BasicContextProvider { &self, _: &TaskVariables, location: &Location, + _: Option<&HashMap>, cx: &mut AppContext, ) -> Result { let buffer = location.buffer.read(cx); From 7814dd0301c39987d36c8dbee4cd1b5802e315cc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 21:50:37 +0200 Subject: [PATCH 165/270] Update Rust crate sysinfo to 0.31.0 (#17733) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [sysinfo](https://redirect.github.com/GuillaumeGomez/sysinfo) | workspace.dependencies | minor | `0.30.7` -> `0.31.0` | --- ### Release Notes
GuillaumeGomez/sysinfo (sysinfo) ### [`v0.31.4`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0314) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.3...v0.31.4) - macOS: Force memory cleanup in disk list retrieval. ### [`v0.31.3`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0313) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.2...v0.31.3) - Raspberry Pi: Fix temperature retrieval. ### [`v0.31.2`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0312) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.1...v0.31.2) - Remove `bstr` dependency (needed for rustc development). ### [`v0.31.1`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0311) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.0...v0.31.1) - Downgrade version of `memchr` (needed for rustc development). ### [`v0.31.0`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0310) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.30.13...v0.31.0) - Split crate in features to only enable what you need. - Remove `System::refresh_process`, `System::refresh_process_specifics` and `System::refresh_pids` methods. - Add new argument of type `ProcessesToUpdate` to `System::refresh_processes` and `System::refresh_processes_specifics` methods. - Add new `NetworkData::ip_networks` method. - Add new `System::refresh_cpu_list` method. - Global CPU now only contains CPU usage. - Rename `TermalSensorType` to `ThermalSensorType`. - Process names is now an `OsString`. - Remove `System::global_cpu_info`. - Add `System::global_cpu_usage`. - macOS: Fix invalid CPU computation when single processes are refreshed one after the other. - Windows: Fix virtual memory computation. - Windows: Fix WoW64 parent process refresh. - Linux: Retrieve RSS (Resident Set Size) memory for cgroups.
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- Cargo.lock | 19 ++++--------------- Cargo.toml | 2 +- crates/client/src/telemetry.rs | 10 ++++++++-- crates/terminal/src/pty_info.rs | 15 ++++++++++----- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9cff895393..930415440b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11085,17 +11085,16 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.30.13" +version = "0.31.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3" +checksum = "355dbe4f8799b304b05e1b0f05fc59b2a18d36645cf169607da45bde2f69a1be" dependencies = [ - "cfg-if", "core-foundation-sys", "libc", + "memchr", "ntapi", - "once_cell", "rayon", - "windows 0.52.0", + "windows 0.54.0", ] [[package]] @@ -13443,16 +13442,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "windows" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" -dependencies = [ - "windows-core 0.52.0", - "windows-targets 0.52.6", -] - [[package]] name = "windows" version = "0.54.0" diff --git a/Cargo.toml b/Cargo.toml index eea510edf2..0b392e02eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -416,7 +416,7 @@ strsim = "0.11" strum = { version = "0.25.0", features = ["derive"] } subtle = "2.5.0" sys-locale = "0.3.1" -sysinfo = "0.30.7" +sysinfo = "0.31.0" tempfile = "3.9.0" thiserror = "1.0.29" tiktoken-rs = "0.5.9" diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 860288038b..b415cae14c 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -304,7 +304,10 @@ impl Telemetry { let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory(); let current_process = Pid::from_u32(std::process::id()); - system.refresh_process_specifics(current_process, refresh_kind); + system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[current_process]), + refresh_kind, + ); // Waiting some amount of time before the first query is important to get a reasonable value // https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage @@ -314,7 +317,10 @@ impl Telemetry { smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await; let current_process = Pid::from_u32(std::process::id()); - system.refresh_process_specifics(current_process, refresh_kind); + system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[current_process]), + refresh_kind, + ); let Some(process) = system.process(current_process) else { log::error!( "Failed to find own process {current_process:?} in system process table" diff --git a/crates/terminal/src/pty_info.rs b/crates/terminal/src/pty_info.rs index 5fc3b05f78..559d022fda 100644 --- a/crates/terminal/src/pty_info.rs +++ b/crates/terminal/src/pty_info.rs @@ -98,9 +98,10 @@ impl PtyProcessInfo { fn refresh(&mut self) -> Option<&Process> { let pid = self.pid_getter.pid()?; - if self - .system - .refresh_process_specifics(pid, self.refresh_kind) + if self.system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[pid]), + self.refresh_kind, + ) == 1 { self.system.process(pid) } else { @@ -116,9 +117,13 @@ impl PtyProcessInfo { .map_or(PathBuf::new(), |p| p.to_owned()); let info = ProcessInfo { - name: process.name().to_owned(), + name: process.name().to_str()?.to_owned(), cwd, - argv: process.cmd().to_vec(), + argv: process + .cmd() + .iter() + .filter_map(|s| s.to_str().map(ToOwned::to_owned)) + .collect(), }; self.current = Some(info.clone()); Some(info) From 8cc6df573cb5926ce501eb58e3ca898e43142a33 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 17 Sep 2024 14:13:37 -0600 Subject: [PATCH 166/270] SshLspAdapterDelegate (#17965) Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 20 +- crates/languages/src/rust.rs | 2 +- crates/project/src/lsp_store.rs | 333 ++++++++++++------- crates/proto/proto/zed.proto | 28 +- crates/proto/src/proto.rs | 11 +- crates/remote/src/ssh_session.rs | 19 +- crates/remote_server/src/headless_project.rs | 2 + crates/remote_server/src/main.rs | 1 - 8 files changed, 268 insertions(+), 148 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 6eaa86f4a7..5d06720fe0 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -54,7 +54,7 @@ use language_model::{ use language_model::{LanguageModelImage, LanguageModelToolUse}; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; -use project::lsp_store::ProjectLspAdapterDelegate; +use project::lsp_store::LocalLspAdapterDelegate; use project::{Project, Worktree}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; @@ -5384,18 +5384,16 @@ fn make_lsp_adapter_delegate( let worktree = project .worktrees(cx) .next() - .ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?; - let fs = if project.is_local() { - Some(project.fs().clone()) - } else { - None - }; + .ok_or_else(|| anyhow!("no worktrees when constructing LocalLspAdapterDelegate"))?; let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { - Ok( - ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, None, cx) - as Arc, - ) + Ok(LocalLspAdapterDelegate::new( + lsp_store, + &worktree, + http_client, + project.fs().clone(), + cx, + ) as Arc) }) }) } diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index b55f350b9d..a32ffe50f5 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -77,7 +77,7 @@ impl LspAdapter for RustLspAdapter { { Ok(()) => (Some(path), Some(env), None), Err(err) => { - log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {:?}", path, err); + log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", path, err); (None, None, None) } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 2c718a42ab..daacf26c3a 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2305,8 +2305,7 @@ impl LspStore { .read(cx) .worktree_for_id(*worktree_id, cx)?; let state = this.as_local()?.language_servers.get(server_id)?; - let delegate = - ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); match state { LanguageServerState::Starting(_) => None, LanguageServerState::Running { @@ -4368,7 +4367,7 @@ impl LspStore { let response = this .update(&mut cx, |this, cx| { let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); anyhow::Ok( cx.spawn(|_, _| async move { delegate.which(command.as_os_str()).await }), ) @@ -4389,7 +4388,7 @@ impl LspStore { let response = this .update(&mut cx, |this, cx| { let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); anyhow::Ok(cx.spawn(|_, _| async move { delegate.shell_env().await })) })?? .await; @@ -4398,6 +4397,52 @@ impl LspStore { env: response.into_iter().collect(), }) } + pub async fn handle_try_exec( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let binary = envelope + .payload + .binary + .ok_or_else(|| anyhow!("missing binary"))?; + let binary = LanguageServerBinary { + path: PathBuf::from(binary.path), + env: None, + arguments: binary.arguments.into_iter().map(Into::into).collect(), + }; + this.update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok(cx.spawn(|_, _| async move { delegate.try_exec(binary).await })) + })?? + .await?; + + Ok(proto::Ack {}) + } + + pub async fn handle_read_text_file( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let path = envelope + .payload + .path + .ok_or_else(|| anyhow!("missing path"))?; + let worktree_id = WorktreeId::from_proto(path.worktree_id); + let path = PathBuf::from(path.path); + let response = this + .update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok(cx.spawn(|_, _| async move { delegate.read_text_file(path).await })) + })?? + .await?; + + Ok(proto::ReadTextFileResponse { text: response }) + } async fn handle_apply_additional_edits_for_completion( this: Model, @@ -4535,9 +4580,12 @@ impl LspStore { ) { let ssh = self.as_ssh().unwrap(); - let delegate = - ProjectLspAdapterDelegate::for_ssh(self, worktree, ssh.upstream_client.clone(), cx) - as Arc; + let delegate = Arc::new(SshLspAdapterDelegate { + lsp_store: cx.handle().downgrade(), + worktree: worktree.read(cx).snapshot(), + upstream_client: ssh.upstream_client.clone(), + language_registry: self.languages.clone(), + }) as Arc; // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. let lsp_adapter = adapter.adapter.clone(); @@ -4645,7 +4693,7 @@ impl LspStore { let local = self.as_local().unwrap(); let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let lsp_adapter_delegate = ProjectLspAdapterDelegate::for_local(self, worktree_handle, cx); + let lsp_adapter_delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx); let project_environment = local.environment.update(cx, |environment, cx| { environment.get_environment(Some(worktree_id), Some(worktree_path.clone()), cx) }); @@ -6938,18 +6986,32 @@ impl LspAdapter for SshLspAdapter { None } } +pub fn language_server_settings<'a, 'b: 'a>( + delegate: &'a dyn LspAdapterDelegate, + language: &str, + cx: &'b AppContext, +) -> Option<&'a LspSettings> { + ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: delegate.worktree_root_path(), + }), + cx, + ) + .lsp + .get(language) +} -pub struct ProjectLspAdapterDelegate { +pub struct LocalLspAdapterDelegate { lsp_store: WeakModel, worktree: worktree::Snapshot, - fs: Option>, + fs: Arc, http_client: Arc, language_registry: Arc, load_shell_env_task: Shared>>>, - upstream_client: Option, } -impl ProjectLspAdapterDelegate { +impl LocalLspAdapterDelegate { fn for_local( lsp_store: &LspStore, worktree: &Model, @@ -6957,45 +7019,37 @@ impl ProjectLspAdapterDelegate { ) -> Arc { let local = lsp_store .as_local() - .expect("ProjectLspAdapterDelegate cannot be constructed on a remote"); + .expect("LocalLspAdapterDelegate cannot be constructed on a remote"); let http_client = local .http_client .clone() .unwrap_or_else(|| Arc::new(BlockedHttpClient)); - Self::new( - lsp_store, - worktree, - http_client, - Some(local.fs.clone()), - None, - cx, - ) + Self::new(lsp_store, worktree, http_client, local.fs.clone(), cx) } - fn for_ssh( - lsp_store: &LspStore, - worktree: &Model, - upstream_client: AnyProtoClient, - cx: &mut ModelContext, - ) -> Arc { - Self::new( - lsp_store, - worktree, - Arc::new(BlockedHttpClient), - None, - Some(upstream_client), - cx, - ) - } + // fn for_ssh( + // lsp_store: &LspStore, + // worktree: &Model, + // upstream_client: AnyProtoClient, + // cx: &mut ModelContext, + // ) -> Arc { + // Self::new( + // lsp_store, + // worktree, + // Arc::new(BlockedHttpClient), + // None, + // Some(upstream_client), + // cx, + // ) + // } pub fn new( lsp_store: &LspStore, worktree: &Model, http_client: Arc, - fs: Option>, - upstream_client: Option, + fs: Arc, cx: &mut ModelContext, ) -> Arc { let worktree_id = worktree.read(cx).id(); @@ -7015,52 +7069,14 @@ impl ProjectLspAdapterDelegate { worktree: worktree.read(cx).snapshot(), fs, http_client, - upstream_client, language_registry: lsp_store.languages.clone(), load_shell_env_task, }) } } -struct BlockedHttpClient; - -impl HttpClient for BlockedHttpClient { - fn send( - &self, - _req: Request, - ) -> BoxFuture<'static, Result, Error>> { - Box::pin(async { - Err(std::io::Error::new( - std::io::ErrorKind::PermissionDenied, - "ssh host blocked http connection", - ) - .into()) - }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } -} - -pub fn language_server_settings<'a, 'b: 'a>( - delegate: &'a dyn LspAdapterDelegate, - language: &str, - cx: &'b AppContext, -) -> Option<&'a LspSettings> { - ProjectSettings::get( - Some(SettingsLocation { - worktree_id: delegate.worktree_id(), - path: delegate.worktree_root_path(), - }), - cx, - ) - .lsp - .get(language) -} - #[async_trait] -impl LspAdapterDelegate for ProjectLspAdapterDelegate { +impl LspAdapterDelegate for LocalLspAdapterDelegate { fn show_notification(&self, message: &str, cx: &mut AppContext) { self.lsp_store .update(cx, |_, cx| { @@ -7082,42 +7098,12 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { } async fn shell_env(&self) -> HashMap { - if let Some(upstream_client) = &self.upstream_client { - use rpc::proto::SSH_PROJECT_ID; - - return upstream_client - .request(proto::ShellEnv { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - }) - .await - .map(|response| response.env.into_iter().collect()) - .unwrap_or_default(); - } - let task = self.load_shell_env_task.clone(); task.await.unwrap_or_default() } #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { - if let Some(upstream_client) = &self.upstream_client { - use rpc::proto::SSH_PROJECT_ID; - - return upstream_client - .request(proto::WhichCommand { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - command: command.to_string_lossy().to_string(), - }) - .await - .log_err() - .and_then(|response| response.path) - .map(PathBuf::from); - } - - self.fs.as_ref()?; - let worktree_abs_path = self.worktree.abs_path(); let shell_path = self.shell_env().await.get("PATH").cloned(); which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() @@ -7125,8 +7111,6 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { #[cfg(target_os = "windows")] async fn which(&self, command: &OsStr) -> Option { - self.fs.as_ref()?; - // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup @@ -7134,10 +7118,6 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { } async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { - if self.fs.is_none() { - return Ok(()); - } - let working_dir = self.worktree_root_path(); let output = smol::process::Command::new(&command.path) .args(command.arguments) @@ -7170,12 +7150,127 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { if self.worktree.entry_for_path(&path).is_none() { return Err(anyhow!("no such path {path:?}")); }; - if let Some(fs) = &self.fs { - let content = fs.load(&path).await?; - Ok(content) - } else { - return Err(anyhow!("cannot open {path:?} on ssh host (yet!)")); - } + self.fs.load(&path).await + } +} + +struct BlockedHttpClient; + +impl HttpClient for BlockedHttpClient { + fn send( + &self, + _req: Request, + ) -> BoxFuture<'static, Result, Error>> { + Box::pin(async { + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + "ssh host blocked http connection", + ) + .into()) + }) + } + + fn proxy(&self) -> Option<&Uri> { + None + } +} + +struct SshLspAdapterDelegate { + lsp_store: WeakModel, + worktree: worktree::Snapshot, + upstream_client: AnyProtoClient, + language_registry: Arc, +} + +#[async_trait] +impl LspAdapterDelegate for SshLspAdapterDelegate { + fn show_notification(&self, message: &str, cx: &mut AppContext) { + self.lsp_store + .update(cx, |_, cx| { + cx.emit(LspStoreEvent::Notification(message.to_owned())) + }) + .ok(); + } + + fn http_client(&self) -> Arc { + Arc::new(BlockedHttpClient) + } + + fn worktree_id(&self) -> WorktreeId { + self.worktree.id() + } + + fn worktree_root_path(&self) -> &Path { + self.worktree.abs_path().as_ref() + } + + async fn shell_env(&self) -> HashMap { + use rpc::proto::SSH_PROJECT_ID; + + self.upstream_client + .request(proto::ShellEnv { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + }) + .await + .map(|response| response.env.into_iter().collect()) + .unwrap_or_default() + } + + async fn which(&self, command: &OsStr) -> Option { + use rpc::proto::SSH_PROJECT_ID; + + self.upstream_client + .request(proto::WhichCommand { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + command: command.to_string_lossy().to_string(), + }) + .await + .log_err() + .and_then(|response| response.path) + .map(PathBuf::from) + } + + async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { + self.upstream_client + .request(proto::TryExec { + project_id: rpc::proto::SSH_PROJECT_ID, + worktree_id: self.worktree.id().to_proto(), + binary: Some(proto::LanguageServerCommand { + path: command.path.to_string_lossy().to_string(), + arguments: command + .arguments + .into_iter() + .map(|s| s.to_string_lossy().to_string()) + .collect(), + env: command.env.unwrap_or_default().into_iter().collect(), + }), + }) + .await?; + Ok(()) + } + + fn update_status( + &self, + server_name: LanguageServerName, + status: language::LanguageServerBinaryStatus, + ) { + self.language_registry + .update_lsp_status(server_name, status); + } + + async fn read_text_file(&self, path: PathBuf) -> Result { + self.upstream_client + .request(proto::ReadTextFile { + project_id: rpc::proto::SSH_PROJECT_ID, + path: Some(proto::ProjectPath { + worktree_id: self.worktree.id().to_proto(), + path: path.to_string_lossy().to_string(), + }), + }) + .await + .map(|r| r.text) } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 77942c8a94..a886b21855 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -289,7 +289,11 @@ message Envelope { WhichCommandResponse which_command_response = 249; ShellEnv shell_env = 250; - ShellEnvResponse shell_env_response = 251; // current max + ShellEnvResponse shell_env_response = 251; + + TryExec try_exec = 252; + ReadTextFile read_text_file = 253; + ReadTextFileResponse read_text_file_response = 254; // current max } reserved 158 to 161; @@ -2551,13 +2555,21 @@ message ShellEnvResponse { map env = 1; } -// message RestartLanguageServer { +message ReadTextFile { + uint64 project_id = 1; + ProjectPath path = 2; +} -// } -// message DestroyLanguageServer { +message ReadTextFileResponse { + string text = 1; +} -// } +message TryExec { + uint64 project_id = 1; + uint64 worktree_id = 2; + LanguageServerCommand binary = 3; +} -// message LspWorkspaceConfiguration { - -// } +message TryExecResponse { + string text = 1; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index a1853ed4a3..b5a00d1670 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -370,6 +370,9 @@ messages!( (WhichCommandResponse, Foreground), (ShellEnv, Foreground), (ShellEnvResponse, Foreground), + (TryExec, Foreground), + (ReadTextFile, Foreground), + (ReadTextFileResponse, Foreground) ); request_messages!( @@ -495,7 +498,9 @@ request_messages!( (AddWorktree, AddWorktreeResponse), (CreateLanguageServer, Ack), (WhichCommand, WhichCommandResponse), - (ShellEnv, ShellEnvResponse) + (ShellEnv, ShellEnvResponse), + (ReadTextFile, ReadTextFileResponse), + (TryExec, Ack), ); entity_messages!( @@ -571,7 +576,9 @@ entity_messages!( UpdateUserSettings, CreateLanguageServer, WhichCommand, - ShellEnv + ShellEnv, + TryExec, + ReadTextFile ); entity_messages!( diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 4762a785db..7556b38f3e 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -15,7 +15,7 @@ use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion}; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, - EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, + EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, }; use smol::{ fs, @@ -157,8 +157,9 @@ impl SshSession { let mut remote_server_child = socket .ssh_command(format!( - "RUST_LOG={} {:?} run", + "RUST_LOG={} RUST_BACKTRACE={} {:?} run", std::env::var("RUST_LOG").unwrap_or_default(), + std::env::var("RUST_BACKTRACE").unwrap_or_default(), remote_binary_path, )) .spawn() @@ -349,7 +350,7 @@ impl SshSession { } Err(error) => { log::error!( - "error handling message. type:{type_name}, error:{error:?}", + "error handling message. type:{type_name}, error:{error}", ); } } @@ -371,7 +372,7 @@ impl SshSession { payload: T, ) -> impl 'static + Future> { log::debug!("ssh request start. name:{}", T::NAME); - let response = self.request_dynamic(payload.into_envelope(0, None, None), ""); + let response = self.request_dynamic(payload.into_envelope(0, None, None), T::NAME); async move { let response = response.await?; log::debug!("ssh request finish. name:{}", T::NAME); @@ -388,7 +389,7 @@ impl SshSession { pub fn request_dynamic( &self, mut envelope: proto::Envelope, - _request_type: &'static str, + type_name: &'static str, ) -> impl 'static + Future> { envelope.id = self.next_message_id.fetch_add(1, SeqCst); let (tx, rx) = oneshot::channel(); @@ -396,7 +397,13 @@ impl SshSession { response_channels_lock.insert(MessageId(envelope.id), tx); drop(response_channels_lock); self.outgoing_tx.unbounded_send(envelope).ok(); - async move { Ok(rx.await.context("connection lost")?.0) } + async move { + let response = rx.await.context("connection lost")?.0; + if let Some(proto::envelope::Payload::Error(error)) = &response.payload { + return Err(RpcError::from_proto(error, type_name)); + } + Ok(response) + } } pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 58f5cb0c20..35d6630c1e 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -107,6 +107,8 @@ impl HeadlessProject { client.add_model_request_handler(LspStore::handle_create_language_server); client.add_model_request_handler(LspStore::handle_which_command); client.add_model_request_handler(LspStore::handle_shell_env); + client.add_model_request_handler(LspStore::handle_try_exec); + client.add_model_request_handler(LspStore::handle_read_text_file); BufferStore::init(&client); WorktreeStore::init(&client); diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 696022a456..908a0a89b6 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -24,7 +24,6 @@ fn main() { #[cfg(not(windows))] fn main() { - env::set_var("RUST_BACKTRACE", "1"); env_logger::builder() .format(|buf, record| { serde_json::to_writer(&mut *buf, &LogRecord::new(record))?; From bdca342cdc2cd4d884d13c2246f0bc89365f3a9a Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 16:28:09 -0400 Subject: [PATCH 167/270] Fix "view release notes" on dev/nightly builds (#17967) --- crates/auto_update/src/auto_update.rs | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 8063ff4c40..c0863e41d1 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -244,19 +244,22 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<( let auto_updater = AutoUpdater::get(cx)?; let release_channel = ReleaseChannel::try_global(cx)?; - if matches!( - release_channel, - ReleaseChannel::Stable | ReleaseChannel::Preview - ) { - let auto_updater = auto_updater.read(cx); - let release_channel = release_channel.dev_name(); - let current_version = auto_updater.current_version; - let url = &auto_updater - .http_client - .build_url(&format!("/releases/{release_channel}/{current_version}")); - cx.open_url(url); + match release_channel { + ReleaseChannel::Stable | ReleaseChannel::Preview => { + let auto_updater = auto_updater.read(cx); + let current_version = auto_updater.current_version; + let release_channel = release_channel.dev_name(); + let path = format!("/releases/{release_channel}/{current_version}"); + let url = &auto_updater.http_client.build_url(&path); + cx.open_url(url); + } + ReleaseChannel::Nightly => { + cx.open_url("https://github.com/zed-industries/zed/commits/nightly/"); + } + ReleaseChannel::Dev => { + cx.open_url("https://github.com/zed-industries/zed/commits/main/"); + } } - None } From 51faf4a1cd8965bf014660a63cc6c18e3e39c5cb Mon Sep 17 00:00:00 2001 From: Marek Fajkus Date: Tue, 17 Sep 2024 22:28:52 +0200 Subject: [PATCH 168/270] Add missing cmake dependency to Nix build (#17968) cmake is required during build of dependecies and thus needs to be supplied in nativeBuildInputs (dependecies required for build not during runtime). This fixes (sandboxed) nix builds of the project. Release Notes: - N/A --- nix/build.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nix/build.nix b/nix/build.nix index cd000e9e91..4782c9a56f 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -28,6 +28,7 @@ stdenvAdapters, nix-gitignore, withGLES ? false, + cmake, }: let includeFilter = path: type: let baseName = baseNameOf (toString path); @@ -58,6 +59,7 @@ pkg-config protobuf rustPlatform.bindgenHook + cmake ]; buildInputs = [ From e7912370e66a8da7b7aa5560bb616590f0a9ad91 Mon Sep 17 00:00:00 2001 From: Graham Taylor Date: Tue, 17 Sep 2024 18:31:06 -0400 Subject: [PATCH 169/270] perplexity: Remove duplicate step and fix numbering in README (#17978) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- extensions/perplexity/README.md | 38 +++++++++++++++++---------------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md index 094a876885..405356dc53 100644 --- a/extensions/perplexity/README.md +++ b/extensions/perplexity/README.md @@ -12,30 +12,32 @@ Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: ## Development Setup -1. Install the rust toolchain and clone the zed repo: -``` -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +1. Install the Rust toolchain and clone the zed repo: -mkdir -p ~/code -cd ~/code -git clone https://github.com/zed-industries/zed -``` + ``` + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -2. Launch Zed and Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) -3. Open Zed -4. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) -5. Click "Install Dev Extension" -6. Navigate to the "extensions/perplexity" folder inside the zed git repo. -7. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) -```sh -env |grep PERPLEXITY_API_KEY -``` -8. Quit and relaunch Zed + mkdir -p ~/code + cd ~/code + git clone https://github.com/zed-industries/zed + ``` + +1. Open Zed +1. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +1. Click "Install Dev Extension" +1. Navigate to the "extensions/perplexity" folder inside the zed git repo. +1. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) + + ```sh + env | grep PERPLEXITY_API_KEY + ``` + +1. Quit and relaunch Zed ## PERPLEXITY_API_KEY This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. -To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. +To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. From db18f7a2b008452a5c64a87735ed9cdd916ebcbd Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 18:32:22 -0400 Subject: [PATCH 170/270] rust: Fix doc comment highlighting (#17976) This PR fixes an issue where `/` and `!` in Rust doc comments were being incorrectly highlighted as operators after #17734. We solve this by removing them from the operators list and using more scoped queries to highlight them. Release Notes: - N/A --------- Co-authored-by: Max --- crates/languages/src/rust/highlights.scm | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index 0c3aa7efc7..8d4bad06e0 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -139,7 +139,6 @@ ] @comment.doc [ - "!" "!=" "%" "%=" @@ -159,7 +158,6 @@ ".." "..=" "..." - "/" "/=" ":" ";" @@ -183,6 +181,10 @@ "?" ] @operator +; Avoid highlighting these as operators when used in doc comments. +(unary_expression "!" @operator) +operator: "/" @operator + (lifetime) @lifetime (parameter (identifier) @variable.parameter) From 8e45bf71cae37cbbc99dda67fdce57aed4d2d8fd Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 17 Sep 2024 16:37:56 -0600 Subject: [PATCH 171/270] Refactor prettier (#17977) In preparation for making formatting work on ssh remotes Release Notes: - N/A Co-authored-by: Mikayla --- crates/node_runtime/src/node_runtime.rs | 49 + crates/project/src/lsp_store.rs | 194 ++- ...{prettier_support.rs => prettier_store.rs} | 1140 +++++++++-------- crates/project/src/project.rs | 218 +--- crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 14 +- 6 files changed, 911 insertions(+), 705 deletions(-) rename crates/project/src/{prettier_support.rs => prettier_store.rs} (65%) diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 73650d73c9..4aa65ab6db 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -462,3 +462,52 @@ impl NodeRuntime for FakeNodeRuntime { unreachable!("Should not install packages {packages:?}") } } + +// TODO: Remove this when headless binary can run node +pub struct DummyNodeRuntime; + +impl DummyNodeRuntime { + pub fn new() -> Arc { + Arc::new(Self) + } +} + +#[async_trait::async_trait] +impl NodeRuntime for DummyNodeRuntime { + async fn binary_path(&self) -> anyhow::Result { + anyhow::bail!("Dummy Node Runtime") + } + + async fn node_environment_path(&self) -> anyhow::Result { + anyhow::bail!("Dummy node runtime") + } + + async fn run_npm_subcommand( + &self, + _: Option<&Path>, + _subcommand: &str, + _args: &[&str], + ) -> anyhow::Result { + anyhow::bail!("Dummy node runtime") + } + + async fn npm_package_latest_version(&self, _name: &str) -> anyhow::Result { + anyhow::bail!("Dummy node runtime") + } + + async fn npm_package_installed_version( + &self, + _local_package_directory: &Path, + _name: &str, + ) -> Result> { + anyhow::bail!("Dummy node runtime") + } + + async fn npm_install_packages( + &self, + _: &Path, + _packages: &[(&str, &str)], + ) -> anyhow::Result<()> { + anyhow::bail!("Dummy node runtime") + } +} diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index daacf26c3a..35eb20259c 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3,6 +3,7 @@ use crate::{ environment::ProjectEnvironment, lsp_command::{self, *}, lsp_ext_command, + prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, worktree_store::{WorktreeStore, WorktreeStoreEvent}, @@ -101,6 +102,8 @@ pub struct LocalLspStore { HashMap>>, supplementary_language_servers: HashMap)>, + prettier_store: Model, + current_lsp_settings: HashMap, LspSettings>, _subscription: gpui::Subscription, } @@ -135,6 +138,7 @@ impl RemoteLspStore {} pub struct SshLspStore { upstream_client: AnyProtoClient, + current_lsp_settings: HashMap, LspSettings>, } #[allow(clippy::large_enum_variant)] @@ -310,9 +314,32 @@ impl LspStore { } } + pub fn swap_current_lsp_settings( + &mut self, + new_settings: HashMap, LspSettings>, + ) -> Option, LspSettings>> { + match &mut self.mode { + LspStoreMode::Ssh(SshLspStore { + current_lsp_settings, + .. + }) + | LspStoreMode::Local(LocalLspStore { + current_lsp_settings, + .. + }) => { + let ret = mem::take(current_lsp_settings); + *current_lsp_settings = new_settings; + Some(ret) + } + LspStoreMode::Remote(_) => None, + } + } + + #[allow(clippy::too_many_arguments)] pub fn new_local( buffer_store: Model, worktree_store: Model, + prettier_store: Model, environment: Model, languages: Arc, http_client: Option>, @@ -324,6 +351,10 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + cx.subscribe(&prettier_store, Self::on_prettier_store_event) + .detach(); + cx.observe_global::(Self::on_settings_changed) + .detach(); Self { mode: LspStoreMode::Local(LocalLspStore { @@ -332,6 +363,8 @@ impl LspStore { last_workspace_edits_by_language_server: Default::default(), language_server_watched_paths: Default::default(), language_server_watcher_registrations: Default::default(), + current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), + prettier_store, environment, http_client, fs, @@ -387,9 +420,14 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + cx.observe_global::(Self::on_settings_changed) + .detach(); Self { - mode: LspStoreMode::Ssh(SshLspStore { upstream_client }), + mode: LspStoreMode::Ssh(SshLspStore { + upstream_client, + current_lsp_settings: Default::default(), + }), downstream_client: None, project_id, buffer_store, @@ -401,6 +439,7 @@ impl LspStore { buffer_snapshots: Default::default(), next_diagnostic_group_id: Default::default(), diagnostic_summaries: Default::default(), + diagnostics: Default::default(), active_entry: None, _maintain_workspace_config: Self::maintain_workspace_config(cx), @@ -498,6 +537,36 @@ impl LspStore { } } + fn on_prettier_store_event( + &mut self, + _: Model, + event: &PrettierStoreEvent, + cx: &mut ModelContext, + ) { + match event { + PrettierStoreEvent::LanguageServerRemoved(prettier_server_id) => { + self.unregister_supplementary_language_server(*prettier_server_id, cx); + } + PrettierStoreEvent::LanguageServerAdded { + new_server_id, + name, + prettier_server, + } => { + self.register_supplementary_language_server( + *new_server_id, + name.clone(), + prettier_server.clone(), + cx, + ); + } + } + } + + // todo! + pub fn prettier_store(&self) -> Option> { + self.as_local().map(|local| local.prettier_store.clone()) + } + fn on_buffer_event( &mut self, buffer: Model, @@ -656,11 +725,29 @@ impl LspStore { }); let buffer_file = buffer.read(cx).file().cloned(); + let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone(); let buffer_file = File::from_dyn(buffer_file.as_ref()); - if let Some(file) = buffer_file { + let worktree_id = if let Some(file) = buffer_file { let worktree = file.worktree.clone(); - self.start_language_servers(&worktree, new_language.name(), cx) + self.start_language_servers(&worktree, new_language.name(), cx); + + Some(worktree.read(cx).id()) + } else { + None + }; + + if let Some(prettier_plugins) = prettier_store::prettier_plugins_for_language(&settings) { + let prettier_store = self.as_local().map(|s| s.prettier_store.clone()); + if let Some(prettier_store) = prettier_store { + prettier_store.update(cx, |prettier_store, cx| { + prettier_store.install_default_prettier( + worktree_id, + prettier_plugins.iter().map(|s| Arc::from(s.as_str())), + cx, + ) + }) + } } cx.emit(LspStoreEvent::LanguageDetected { @@ -799,6 +886,95 @@ impl LspStore { Task::ready(Ok(Default::default())) } + fn on_settings_changed(&mut self, cx: &mut ModelContext) { + let mut language_servers_to_start = Vec::new(); + let mut language_formatters_to_check = Vec::new(); + for buffer in self.buffer_store.read(cx).buffers() { + let buffer = buffer.read(cx); + let buffer_file = File::from_dyn(buffer.file()); + let buffer_language = buffer.language(); + let settings = language_settings(buffer_language, buffer.file(), cx); + if let Some(language) = buffer_language { + if settings.enable_language_server { + if let Some(file) = buffer_file { + language_servers_to_start.push((file.worktree.clone(), language.name())); + } + } + language_formatters_to_check + .push((buffer_file.map(|f| f.worktree_id(cx)), settings.clone())); + } + } + + let mut language_servers_to_stop = Vec::new(); + let mut language_servers_to_restart = Vec::new(); + let languages = self.languages.to_vec(); + + let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone(); + let Some(current_lsp_settings) = self.swap_current_lsp_settings(new_lsp_settings.clone()) + else { + return; + }; + for (worktree_id, started_lsp_name) in self.started_language_servers() { + let language = languages.iter().find_map(|l| { + let adapter = self + .languages + .lsp_adapters(&l.name()) + .iter() + .find(|adapter| adapter.name == started_lsp_name)? + .clone(); + Some((l, adapter)) + }); + if let Some((language, adapter)) = language { + let worktree = self.worktree_for_id(worktree_id, cx).ok(); + let file = worktree.as_ref().and_then(|tree| { + tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _)) + }); + if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { + language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); + } else if let Some(worktree) = worktree { + let server_name = &adapter.name.0; + match ( + current_lsp_settings.get(server_name), + new_lsp_settings.get(server_name), + ) { + (None, None) => {} + (Some(_), None) | (None, Some(_)) => { + language_servers_to_restart.push((worktree, language.name())); + } + (Some(current_lsp_settings), Some(new_lsp_settings)) => { + if current_lsp_settings != new_lsp_settings { + language_servers_to_restart.push((worktree, language.name())); + } + } + } + } + } + } + + for (worktree_id, adapter_name) in language_servers_to_stop { + self.stop_language_server(worktree_id, adapter_name, cx) + .detach(); + } + + if let Some(prettier_store) = self.as_local().map(|s| s.prettier_store.clone()) { + prettier_store.update(cx, |prettier_store, cx| { + prettier_store.on_settings_changed(language_formatters_to_check, cx) + }) + } + + // Start all the newly-enabled language servers. + for (worktree, language) in language_servers_to_start { + self.start_language_servers(&worktree, language, cx); + } + + // Restart all language servers with changed initialization options. + for (worktree, language) in language_servers_to_restart { + self.restart_language_servers(worktree, language, cx); + } + + cx.notify(); + } + pub async fn execute_code_actions_on_servers( this: &WeakModel, adapters_and_servers: &[(Arc, Arc)], @@ -2375,7 +2551,7 @@ impl LspStore { }) } - pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { + fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { self.diagnostics.remove(&id_to_remove); self.diagnostic_summaries.remove(&id_to_remove); @@ -2406,6 +2582,12 @@ impl LspStore { } cx.emit(LspStoreEvent::LanguageServerRemoved(server_id_to_remove)); } + + if let Some(local) = self.as_local() { + local.prettier_store.update(cx, |prettier_store, cx| { + prettier_store.remove_worktree(id_to_remove, cx); + }) + } } pub fn shared( @@ -6117,6 +6299,10 @@ impl LspStore { let Some(local) = self.as_local() else { return }; + local.prettier_store.update(cx, |prettier_store, cx| { + prettier_store.update_prettier_settings(&worktree_handle, changes, cx) + }); + let worktree_id = worktree_handle.read(cx).id(); let mut language_server_ids = self .language_server_ids diff --git a/crates/project/src/prettier_support.rs b/crates/project/src/prettier_store.rs similarity index 65% rename from crates/project/src/prettier_support.rs rename to crates/project/src/prettier_store.rs index e90a1dbdf7..29101917fb 100644 --- a/crates/project/src/prettier_support.rs +++ b/crates/project/src/prettier_store.rs @@ -5,444 +5,384 @@ use std::{ }; use anyhow::{anyhow, Context, Result}; -use collections::HashSet; +use collections::{HashMap, HashSet}; use fs::Fs; use futures::{ future::{self, Shared}, + stream::FuturesUnordered, FutureExt, }; -use gpui::{AsyncAppContext, Model, ModelContext, Task, WeakModel}; +use gpui::{AsyncAppContext, EventEmitter, Model, ModelContext, Task, WeakModel}; use language::{ language_settings::{Formatter, LanguageSettings, SelectedFormatter}, - Buffer, LanguageServerName, LocalFile, + Buffer, LanguageRegistry, LanguageServerName, LocalFile, }; use lsp::{LanguageServer, LanguageServerId}; use node_runtime::NodeRuntime; use paths::default_prettier_dir; use prettier::Prettier; +use smol::stream::StreamExt; use util::{ResultExt, TryFutureExt}; -use crate::{File, FormatOperation, PathChange, Project, ProjectEntryId, Worktree, WorktreeId}; +use crate::{ + worktree_store::WorktreeStore, File, FormatOperation, PathChange, ProjectEntryId, Worktree, + WorktreeId, +}; -pub fn prettier_plugins_for_language( - language_settings: &LanguageSettings, -) -> Option<&HashSet> { - match &language_settings.formatter { - SelectedFormatter::Auto => Some(&language_settings.prettier.plugins), - - SelectedFormatter::List(list) => list - .as_ref() - .contains(&Formatter::Prettier) - .then_some(&language_settings.prettier.plugins), - } +pub struct PrettierStore { + node: Arc, + fs: Arc, + languages: Arc, + worktree_store: Model, + default_prettier: DefaultPrettier, + prettiers_per_worktree: HashMap>>, + prettier_instances: HashMap, } -pub(super) async fn format_with_prettier( - project: &WeakModel, - buffer: &Model, - cx: &mut AsyncAppContext, -) -> Option> { - let prettier_instance = project - .update(cx, |project, cx| { - project.prettier_instance_for_buffer(buffer, cx) - }) - .ok()? - .await; +pub enum PrettierStoreEvent { + LanguageServerRemoved(LanguageServerId), + LanguageServerAdded { + new_server_id: LanguageServerId, + name: LanguageServerName, + prettier_server: Arc, + }, +} - let (prettier_path, prettier_task) = prettier_instance?; +impl EventEmitter for PrettierStore {} - let prettier_description = match prettier_path.as_ref() { - Some(path) => format!("prettier at {path:?}"), - None => "default prettier instance".to_string(), - }; - - match prettier_task.await { - Ok(prettier) => { - let buffer_path = buffer - .update(cx, |buffer, cx| { - File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) - }) - .ok() - .flatten(); - - let format_result = prettier - .format(buffer, buffer_path, cx) - .await - .map(FormatOperation::Prettier) - .with_context(|| format!("{} failed to format buffer", prettier_description)); - - Some(format_result) +impl PrettierStore { + pub fn new( + node: Arc, + fs: Arc, + languages: Arc, + worktree_store: Model, + _: &mut ModelContext, + ) -> Self { + Self { + node, + fs, + languages, + worktree_store, + default_prettier: DefaultPrettier::default(), + prettiers_per_worktree: HashMap::default(), + prettier_instances: HashMap::default(), } - Err(error) => { - project - .update(cx, |project, _| { - let instance_to_update = match prettier_path { - Some(prettier_path) => project.prettier_instances.get_mut(&prettier_path), - None => match &mut project.default_prettier.prettier { - PrettierInstallation::NotInstalled { .. } => None, - PrettierInstallation::Installed(instance) => Some(instance), - }, - }; + } - if let Some(instance) = instance_to_update { - instance.attempt += 1; - instance.prettier = None; + pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { + let mut prettier_instances_to_clean = FuturesUnordered::new(); + if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) { + for path in prettier_paths.iter().flatten() { + if let Some(prettier_instance) = self.prettier_instances.remove(path) { + prettier_instances_to_clean.push(async move { + prettier_instance + .server() + .await + .map(|server| server.server_id()) + }); + } + } + } + cx.spawn(|prettier_store, mut cx| async move { + while let Some(prettier_server_id) = prettier_instances_to_clean.next().await { + if let Some(prettier_server_id) = prettier_server_id { + prettier_store + .update(&mut cx, |_, cx| { + cx.emit(PrettierStoreEvent::LanguageServerRemoved( + prettier_server_id, + )); + }) + .ok(); + } + } + }) + .detach(); + } + + fn prettier_instance_for_buffer( + &mut self, + buffer: &Model, + cx: &mut ModelContext, + ) -> Task, PrettierTask)>> { + let buffer = buffer.read(cx); + let buffer_file = buffer.file(); + if buffer.language().is_none() { + return Task::ready(None); + } + + let node = self.node.clone(); + + match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) { + Some((worktree_id, buffer_path)) => { + let fs = Arc::clone(&self.fs); + let installed_prettiers = self.prettier_instances.keys().cloned().collect(); + cx.spawn(|lsp_store, mut cx| async move { + match cx + .background_executor() + .spawn(async move { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + &buffer_path, + ) + .await + }) + .await + { + Ok(ControlFlow::Break(())) => None, + Ok(ControlFlow::Continue(None)) => { + let default_instance = lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(None); + lsp_store.default_prettier.prettier_task( + &node, + Some(worktree_id), + cx, + ) + }) + .ok()?; + Some((None, default_instance?.log_err().await?)) + } + Ok(ControlFlow::Continue(Some(prettier_dir))) => { + lsp_store + .update(&mut cx, |lsp_store, _| { + lsp_store + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(Some(prettier_dir.clone())) + }) + .ok()?; + if let Some(prettier_task) = lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store.prettier_instances.get_mut(&prettier_dir).map( + |existing_instance| { + existing_instance.prettier_task( + &node, + Some(&prettier_dir), + Some(worktree_id), + cx, + ) + }, + ) + }) + .ok()? + { + log::debug!("Found already started prettier in {prettier_dir:?}"); + return Some((Some(prettier_dir), prettier_task?.await.log_err()?)); + } + + log::info!("Found prettier in {prettier_dir:?}, starting."); + let new_prettier_task = lsp_store + .update(&mut cx, |lsp_store, cx| { + let new_prettier_task = Self::start_prettier( + node, + prettier_dir.clone(), + Some(worktree_id), + cx, + ); + lsp_store.prettier_instances.insert( + prettier_dir.clone(), + PrettierInstance { + attempt: 0, + prettier: Some(new_prettier_task.clone()), + }, + ); + new_prettier_task + }) + .ok()?; + Some((Some(prettier_dir), new_prettier_task)) + } + Err(e) => { + log::error!("Failed to determine prettier path for buffer: {e:#}"); + None + } } }) - .log_err(); - - Some(Err(anyhow!( - "{} failed to spawn: {error:#}", - prettier_description - ))) - } - } -} - -pub struct DefaultPrettier { - prettier: PrettierInstallation, - installed_plugins: HashSet>, -} - -#[derive(Debug)] -pub enum PrettierInstallation { - NotInstalled { - attempts: usize, - installation_task: Option>>>>, - not_installed_plugins: HashSet>, - }, - Installed(PrettierInstance), -} - -pub type PrettierTask = Shared, Arc>>>; - -#[derive(Debug, Clone)] -pub struct PrettierInstance { - attempt: usize, - prettier: Option, -} - -impl Default for DefaultPrettier { - fn default() -> Self { - Self { - prettier: PrettierInstallation::NotInstalled { - attempts: 0, - installation_task: None, - not_installed_plugins: HashSet::default(), - }, - installed_plugins: HashSet::default(), - } - } -} - -impl DefaultPrettier { - pub fn instance(&self) -> Option<&PrettierInstance> { - if let PrettierInstallation::Installed(instance) = &self.prettier { - Some(instance) - } else { - None + } + None => { + let new_task = self.default_prettier.prettier_task(&node, None, cx); + cx.spawn(|_, _| async move { Some((None, new_task?.log_err().await?)) }) + } } } - pub fn prettier_task( - &mut self, - node: &Arc, + fn start_prettier( + node: Arc, + prettier_dir: PathBuf, worktree_id: Option, - cx: &mut ModelContext<'_, Project>, - ) -> Option>> { - match &mut self.prettier { - PrettierInstallation::NotInstalled { .. } => { - Some(start_default_prettier(Arc::clone(node), worktree_id, cx)) - } - PrettierInstallation::Installed(existing_instance) => { - existing_instance.prettier_task(node, None, worktree_id, cx) - } - } - } -} + cx: &mut ModelContext, + ) -> PrettierTask { + cx.spawn(|prettier_store, mut cx| async move { + log::info!("Starting prettier at path {prettier_dir:?}"); + let new_server_id = prettier_store.update(&mut cx, |prettier_store, _| { + prettier_store.languages.next_language_server_id() + })?; -impl PrettierInstance { - pub fn prettier_task( - &mut self, - node: &Arc, - prettier_dir: Option<&Path>, + let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone()) + .await + .context("default prettier spawn") + .map(Arc::new) + .map_err(Arc::new)?; + Self::register_new_prettier( + &prettier_store, + &new_prettier, + worktree_id, + new_server_id, + &mut cx, + ); + Ok(new_prettier) + }) + .shared() + } + + fn start_default_prettier( + node: Arc, worktree_id: Option, - cx: &mut ModelContext<'_, Project>, - ) -> Option>> { - if self.attempt > prettier::FAIL_THRESHOLD { - match prettier_dir { - Some(prettier_dir) => log::warn!( - "Prettier from path {prettier_dir:?} exceeded launch threshold, not starting" - ), - None => log::warn!("Default prettier exceeded launch threshold, not starting"), + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(|prettier_store, mut cx| async move { + let installation_task = prettier_store.update(&mut cx, |prettier_store, _| { + match &prettier_store.default_prettier.prettier { + PrettierInstallation::NotInstalled { + installation_task, .. + } => ControlFlow::Continue(installation_task.clone()), + PrettierInstallation::Installed(default_prettier) => { + ControlFlow::Break(default_prettier.clone()) + } + } + })?; + match installation_task { + ControlFlow::Continue(None) => { + anyhow::bail!("Default prettier is not installed and cannot be started") + } + ControlFlow::Continue(Some(installation_task)) => { + log::info!("Waiting for default prettier to install"); + if let Err(e) = installation_task.await { + prettier_store.update(&mut cx, |project, _| { + if let PrettierInstallation::NotInstalled { + installation_task, + attempts, + .. + } = &mut project.default_prettier.prettier + { + *installation_task = None; + *attempts += 1; + } + })?; + anyhow::bail!( + "Cannot start default prettier due to its installation failure: {e:#}" + ); + } + let new_default_prettier = + prettier_store.update(&mut cx, |prettier_store, cx| { + let new_default_prettier = Self::start_prettier( + node, + default_prettier_dir().clone(), + worktree_id, + cx, + ); + prettier_store.default_prettier.prettier = + PrettierInstallation::Installed(PrettierInstance { + attempt: 0, + prettier: Some(new_default_prettier.clone()), + }); + new_default_prettier + })?; + Ok(new_default_prettier) + } + ControlFlow::Break(instance) => match instance.prettier { + Some(instance) => Ok(instance), + None => { + let new_default_prettier = + prettier_store.update(&mut cx, |prettier_store, cx| { + let new_default_prettier = Self::start_prettier( + node, + default_prettier_dir().clone(), + worktree_id, + cx, + ); + prettier_store.default_prettier.prettier = + PrettierInstallation::Installed(PrettierInstance { + attempt: instance.attempt + 1, + prettier: Some(new_default_prettier.clone()), + }); + new_default_prettier + })?; + Ok(new_default_prettier) + } + }, } - return None; - } - Some(match &self.prettier { - Some(prettier_task) => Task::ready(Ok(prettier_task.clone())), - None => match prettier_dir { - Some(prettier_dir) => { - let new_task = start_prettier( - Arc::clone(node), - prettier_dir.to_path_buf(), - worktree_id, - cx, - ); - self.attempt += 1; - self.prettier = Some(new_task.clone()); - Task::ready(Ok(new_task)) - } - None => { - self.attempt += 1; - let node = Arc::clone(node); - cx.spawn(|project, mut cx| async move { - project - .update(&mut cx, |_, cx| { - start_default_prettier(node, worktree_id, cx) - })? - .await - }) - } - }, }) } - pub async fn server(&self) -> Option> { - self.prettier.clone()?.await.ok()?.server().cloned() - } -} - -fn start_default_prettier( - node: Arc, - worktree_id: Option, - cx: &mut ModelContext<'_, Project>, -) -> Task> { - cx.spawn(|project, mut cx| async move { - let installation_task = project.update(&mut cx, |project, _| { - match &project.default_prettier.prettier { - PrettierInstallation::NotInstalled { - installation_task, .. - } => ControlFlow::Continue(installation_task.clone()), - PrettierInstallation::Installed(default_prettier) => { - ControlFlow::Break(default_prettier.clone()) - } - } - })?; - match installation_task { - ControlFlow::Continue(None) => { - anyhow::bail!("Default prettier is not installed and cannot be started") - } - ControlFlow::Continue(Some(installation_task)) => { - log::info!("Waiting for default prettier to install"); - if let Err(e) = installation_task.await { - project.update(&mut cx, |project, _| { - if let PrettierInstallation::NotInstalled { - installation_task, - attempts, - .. - } = &mut project.default_prettier.prettier - { - *installation_task = None; - *attempts += 1; - } - })?; - anyhow::bail!( - "Cannot start default prettier due to its installation failure: {e:#}" - ); - } - let new_default_prettier = project.update(&mut cx, |project, cx| { - let new_default_prettier = - start_prettier(node, default_prettier_dir().clone(), worktree_id, cx); - project.default_prettier.prettier = - PrettierInstallation::Installed(PrettierInstance { - attempt: 0, - prettier: Some(new_default_prettier.clone()), - }); - new_default_prettier - })?; - Ok(new_default_prettier) - } - ControlFlow::Break(instance) => match instance.prettier { - Some(instance) => Ok(instance), - None => { - let new_default_prettier = project.update(&mut cx, |project, cx| { - let new_default_prettier = - start_prettier(node, default_prettier_dir().clone(), worktree_id, cx); - project.default_prettier.prettier = - PrettierInstallation::Installed(PrettierInstance { - attempt: instance.attempt + 1, - prettier: Some(new_default_prettier.clone()), - }); - new_default_prettier - })?; - Ok(new_default_prettier) - } - }, + fn register_new_prettier( + prettier_store: &WeakModel, + prettier: &Prettier, + worktree_id: Option, + new_server_id: LanguageServerId, + cx: &mut AsyncAppContext, + ) { + let prettier_dir = prettier.prettier_dir(); + let is_default = prettier.is_default(); + if is_default { + log::info!("Started default prettier in {prettier_dir:?}"); + } else { + log::info!("Started prettier in {prettier_dir:?}"); } - }) -} - -fn start_prettier( - node: Arc, - prettier_dir: PathBuf, - worktree_id: Option, - cx: &mut ModelContext<'_, Project>, -) -> PrettierTask { - cx.spawn(|project, mut cx| async move { - log::info!("Starting prettier at path {prettier_dir:?}"); - let new_server_id = project.update(&mut cx, |project, _| { - project.languages.next_language_server_id() - })?; - - let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone()) - .await - .context("default prettier spawn") - .map(Arc::new) - .map_err(Arc::new)?; - register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx); - Ok(new_prettier) - }) - .shared() -} - -fn register_new_prettier( - project: &WeakModel, - prettier: &Prettier, - worktree_id: Option, - new_server_id: LanguageServerId, - cx: &mut AsyncAppContext, -) { - let prettier_dir = prettier.prettier_dir(); - let is_default = prettier.is_default(); - if is_default { - log::info!("Started default prettier in {prettier_dir:?}"); - } else { - log::info!("Started prettier in {prettier_dir:?}"); - } - if let Some(prettier_server) = prettier.server() { - project - .update(cx, |project, cx| { - let name = if is_default { - LanguageServerName(Arc::from("prettier (default)")) - } else { - let worktree_path = worktree_id - .and_then(|id| project.worktree_for_id(id, cx)) - .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path())); - let name = match worktree_path { - Some(worktree_path) => { - if prettier_dir == worktree_path.as_ref() { - let name = prettier_dir - .file_name() - .and_then(|name| name.to_str()) - .unwrap_or_default(); - format!("prettier ({name})") - } else { - let dir_to_display = prettier_dir - .strip_prefix(worktree_path.as_ref()) - .ok() - .unwrap_or(prettier_dir); - format!("prettier ({})", dir_to_display.display()) + if let Some(prettier_server) = prettier.server() { + prettier_store + .update(cx, |prettier_store, cx| { + let name = if is_default { + LanguageServerName(Arc::from("prettier (default)")) + } else { + let worktree_path = worktree_id + .and_then(|id| { + prettier_store + .worktree_store + .read(cx) + .worktree_for_id(id, cx) + }) + .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path())); + let name = match worktree_path { + Some(worktree_path) => { + if prettier_dir == worktree_path.as_ref() { + let name = prettier_dir + .file_name() + .and_then(|name| name.to_str()) + .unwrap_or_default(); + format!("prettier ({name})") + } else { + let dir_to_display = prettier_dir + .strip_prefix(worktree_path.as_ref()) + .ok() + .unwrap_or(prettier_dir); + format!("prettier ({})", dir_to_display.display()) + } } - } - None => format!("prettier ({})", prettier_dir.display()), + None => format!("prettier ({})", prettier_dir.display()), + }; + LanguageServerName(Arc::from(name)) }; - LanguageServerName(Arc::from(name)) - }; - project.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.register_supplementary_language_server( + cx.emit(PrettierStoreEvent::LanguageServerAdded { new_server_id, name, - Arc::clone(prettier_server), - cx, - ) - }); - }) - .ok(); - } -} - -async fn install_prettier_packages( - fs: &dyn Fs, - plugins_to_install: HashSet>, - node: Arc, -) -> anyhow::Result<()> { - let packages_to_versions = future::try_join_all( - plugins_to_install - .iter() - .chain(Some(&"prettier".into())) - .map(|package_name| async { - let returned_package_name = package_name.to_string(); - let latest_version = node - .npm_package_latest_version(package_name) - .await - .with_context(|| { - format!("fetching latest npm version for package {returned_package_name}") - })?; - anyhow::Ok((returned_package_name, latest_version)) - }), - ) - .await - .context("fetching latest npm versions")?; - - let default_prettier_dir = default_prettier_dir().as_path(); - match fs.metadata(default_prettier_dir).await.with_context(|| { - format!("fetching FS metadata for default prettier dir {default_prettier_dir:?}") - })? { - Some(prettier_dir_metadata) => anyhow::ensure!( - prettier_dir_metadata.is_dir, - "default prettier dir {default_prettier_dir:?} is not a directory" - ), - None => fs - .create_dir(default_prettier_dir) - .await - .with_context(|| format!("creating default prettier dir {default_prettier_dir:?}"))?, + prettier_server: prettier_server.clone(), + }); + }) + .ok(); + } } - log::info!("Installing default prettier and plugins: {packages_to_versions:?}"); - let borrowed_packages = packages_to_versions - .iter() - .map(|(package, version)| (package.as_str(), version.as_str())) - .collect::>(); - node.npm_install_packages(default_prettier_dir, &borrowed_packages) - .await - .context("fetching formatter packages")?; - anyhow::Ok(()) -} - -async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> { - let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); - fs.save( - &prettier_wrapper_path, - &text::Rope::from(prettier::PRETTIER_SERVER_JS), - text::LineEnding::Unix, - ) - .await - .with_context(|| { - format!( - "writing {} file at {prettier_wrapper_path:?}", - prettier::PRETTIER_SERVER_FILE - ) - })?; - Ok(()) -} - -async fn should_write_prettier_server_file(fs: &dyn Fs) -> bool { - let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); - if !fs.is_file(&prettier_wrapper_path).await { - return true; - } - let Ok(prettier_server_file_contents) = fs.load(&prettier_wrapper_path).await else { - return true; - }; - prettier_server_file_contents != prettier::PRETTIER_SERVER_JS -} - -impl Project { pub fn update_prettier_settings( &self, worktree: &Model, changes: &[(Arc, ProjectEntryId, PathChange)], - cx: &mut ModelContext<'_, Project>, + cx: &mut ModelContext, ) { let prettier_config_files = Prettier::CONFIG_FILE_NAMES .iter() @@ -510,122 +450,6 @@ impl Project { } } - fn prettier_instance_for_buffer( - &mut self, - buffer: &Model, - cx: &mut ModelContext, - ) -> Task, PrettierTask)>> { - // todo(ssh remote): prettier support - if self.is_via_collab() || self.ssh_session.is_some() { - return Task::ready(None); - } - let buffer = buffer.read(cx); - let buffer_file = buffer.file(); - if buffer.language().is_none() { - return Task::ready(None); - } - let Some(node) = self.node.clone() else { - return Task::ready(None); - }; - match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) { - Some((worktree_id, buffer_path)) => { - let fs = Arc::clone(&self.fs); - let installed_prettiers = self.prettier_instances.keys().cloned().collect(); - cx.spawn(|project, mut cx| async move { - match cx - .background_executor() - .spawn(async move { - Prettier::locate_prettier_installation( - fs.as_ref(), - &installed_prettiers, - &buffer_path, - ) - .await - }) - .await - { - Ok(ControlFlow::Break(())) => None, - Ok(ControlFlow::Continue(None)) => { - let default_instance = project - .update(&mut cx, |project, cx| { - project - .prettiers_per_worktree - .entry(worktree_id) - .or_default() - .insert(None); - project.default_prettier.prettier_task( - &node, - Some(worktree_id), - cx, - ) - }) - .ok()?; - Some((None, default_instance?.log_err().await?)) - } - Ok(ControlFlow::Continue(Some(prettier_dir))) => { - project - .update(&mut cx, |project, _| { - project - .prettiers_per_worktree - .entry(worktree_id) - .or_default() - .insert(Some(prettier_dir.clone())) - }) - .ok()?; - if let Some(prettier_task) = project - .update(&mut cx, |project, cx| { - project.prettier_instances.get_mut(&prettier_dir).map( - |existing_instance| { - existing_instance.prettier_task( - &node, - Some(&prettier_dir), - Some(worktree_id), - cx, - ) - }, - ) - }) - .ok()? - { - log::debug!("Found already started prettier in {prettier_dir:?}"); - return Some((Some(prettier_dir), prettier_task?.await.log_err()?)); - } - - log::info!("Found prettier in {prettier_dir:?}, starting."); - let new_prettier_task = project - .update(&mut cx, |project, cx| { - let new_prettier_task = start_prettier( - node, - prettier_dir.clone(), - Some(worktree_id), - cx, - ); - project.prettier_instances.insert( - prettier_dir.clone(), - PrettierInstance { - attempt: 0, - prettier: Some(new_prettier_task.clone()), - }, - ); - new_prettier_task - }) - .ok()?; - Some((Some(prettier_dir), new_prettier_task)) - } - Err(e) => { - log::error!("Failed to determine prettier path for buffer: {e:#}"); - None - } - } - }) - } - None => { - let new_task = self.default_prettier.prettier_task(&node, None, cx); - cx.spawn(|_, _| async move { Some((None, new_task?.log_err().await?)) }) - } - } - } - pub fn install_default_prettier( &mut self, worktree: Option, @@ -642,12 +466,13 @@ impl Project { } let mut new_plugins = plugins.collect::>(); - let Some(node) = self.node.as_ref().cloned() else { - return; - }; + let node = self.node.clone(); + let fs = Arc::clone(&self.fs); let locate_prettier_installation = match worktree.and_then(|worktree_id| { - self.worktree_for_id(worktree_id, cx) + self.worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) .map(|worktree| worktree.read(cx).abs_path()) }) { Some(locate_from) => { @@ -777,4 +602,291 @@ impl Project { not_installed_plugins: plugins_to_install, }; } + + pub fn on_settings_changed( + &mut self, + language_formatters_to_check: Vec<(Option, LanguageSettings)>, + cx: &mut ModelContext, + ) { + let mut prettier_plugins_by_worktree = HashMap::default(); + for (worktree, language_settings) in language_formatters_to_check { + if let Some(plugins) = prettier_plugins_for_language(&language_settings) { + prettier_plugins_by_worktree + .entry(worktree) + .or_insert_with(HashSet::default) + .extend(plugins.iter().cloned()); + } + } + for (worktree, prettier_plugins) in prettier_plugins_by_worktree { + self.install_default_prettier( + worktree, + prettier_plugins.into_iter().map(Arc::from), + cx, + ); + } + } +} + +pub fn prettier_plugins_for_language( + language_settings: &LanguageSettings, +) -> Option<&HashSet> { + match &language_settings.formatter { + SelectedFormatter::Auto => Some(&language_settings.prettier.plugins), + + SelectedFormatter::List(list) => list + .as_ref() + .contains(&Formatter::Prettier) + .then_some(&language_settings.prettier.plugins), + } +} + +pub(super) async fn format_with_prettier( + prettier_store: &WeakModel, + buffer: &Model, + cx: &mut AsyncAppContext, +) -> Option> { + let prettier_instance = prettier_store + .update(cx, |prettier_store, cx| { + prettier_store.prettier_instance_for_buffer(buffer, cx) + }) + .ok()? + .await; + + let (prettier_path, prettier_task) = prettier_instance?; + + let prettier_description = match prettier_path.as_ref() { + Some(path) => format!("prettier at {path:?}"), + None => "default prettier instance".to_string(), + }; + + match prettier_task.await { + Ok(prettier) => { + let buffer_path = buffer + .update(cx, |buffer, cx| { + File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) + }) + .ok() + .flatten(); + + let format_result = prettier + .format(buffer, buffer_path, cx) + .await + .map(FormatOperation::Prettier) + .with_context(|| format!("{} failed to format buffer", prettier_description)); + + Some(format_result) + } + Err(error) => { + prettier_store + .update(cx, |project, _| { + let instance_to_update = match prettier_path { + Some(prettier_path) => project.prettier_instances.get_mut(&prettier_path), + None => match &mut project.default_prettier.prettier { + PrettierInstallation::NotInstalled { .. } => None, + PrettierInstallation::Installed(instance) => Some(instance), + }, + }; + + if let Some(instance) = instance_to_update { + instance.attempt += 1; + instance.prettier = None; + } + }) + .log_err(); + + Some(Err(anyhow!( + "{} failed to spawn: {error:#}", + prettier_description + ))) + } + } +} + +pub struct DefaultPrettier { + prettier: PrettierInstallation, + installed_plugins: HashSet>, +} + +#[derive(Debug)] +pub enum PrettierInstallation { + NotInstalled { + attempts: usize, + installation_task: Option>>>>, + not_installed_plugins: HashSet>, + }, + Installed(PrettierInstance), +} + +pub type PrettierTask = Shared, Arc>>>; + +#[derive(Debug, Clone)] +pub struct PrettierInstance { + attempt: usize, + prettier: Option, +} + +impl Default for DefaultPrettier { + fn default() -> Self { + Self { + prettier: PrettierInstallation::NotInstalled { + attempts: 0, + installation_task: None, + not_installed_plugins: HashSet::default(), + }, + installed_plugins: HashSet::default(), + } + } +} + +impl DefaultPrettier { + pub fn instance(&self) -> Option<&PrettierInstance> { + if let PrettierInstallation::Installed(instance) = &self.prettier { + Some(instance) + } else { + None + } + } + + pub fn prettier_task( + &mut self, + node: &Arc, + worktree_id: Option, + cx: &mut ModelContext, + ) -> Option>> { + match &mut self.prettier { + PrettierInstallation::NotInstalled { .. } => Some( + PrettierStore::start_default_prettier(node.clone(), worktree_id, cx), + ), + PrettierInstallation::Installed(existing_instance) => { + existing_instance.prettier_task(node, None, worktree_id, cx) + } + } + } +} + +impl PrettierInstance { + pub fn prettier_task( + &mut self, + node: &Arc, + prettier_dir: Option<&Path>, + worktree_id: Option, + cx: &mut ModelContext, + ) -> Option>> { + if self.attempt > prettier::FAIL_THRESHOLD { + match prettier_dir { + Some(prettier_dir) => log::warn!( + "Prettier from path {prettier_dir:?} exceeded launch threshold, not starting" + ), + None => log::warn!("Default prettier exceeded launch threshold, not starting"), + } + return None; + } + Some(match &self.prettier { + Some(prettier_task) => Task::ready(Ok(prettier_task.clone())), + None => match prettier_dir { + Some(prettier_dir) => { + let new_task = PrettierStore::start_prettier( + Arc::clone(node), + prettier_dir.to_path_buf(), + worktree_id, + cx, + ); + self.attempt += 1; + self.prettier = Some(new_task.clone()); + Task::ready(Ok(new_task)) + } + None => { + self.attempt += 1; + let node = Arc::clone(node); + cx.spawn(|prettier_store, mut cx| async move { + prettier_store + .update(&mut cx, |_, cx| { + PrettierStore::start_default_prettier(node, worktree_id, cx) + })? + .await + }) + } + }, + }) + } + + pub async fn server(&self) -> Option> { + self.prettier.clone()?.await.ok()?.server().cloned() + } +} + +async fn install_prettier_packages( + fs: &dyn Fs, + plugins_to_install: HashSet>, + node: Arc, +) -> anyhow::Result<()> { + let packages_to_versions = future::try_join_all( + plugins_to_install + .iter() + .chain(Some(&"prettier".into())) + .map(|package_name| async { + let returned_package_name = package_name.to_string(); + let latest_version = node + .npm_package_latest_version(package_name) + .await + .with_context(|| { + format!("fetching latest npm version for package {returned_package_name}") + })?; + anyhow::Ok((returned_package_name, latest_version)) + }), + ) + .await + .context("fetching latest npm versions")?; + + let default_prettier_dir = default_prettier_dir().as_path(); + match fs.metadata(default_prettier_dir).await.with_context(|| { + format!("fetching FS metadata for default prettier dir {default_prettier_dir:?}") + })? { + Some(prettier_dir_metadata) => anyhow::ensure!( + prettier_dir_metadata.is_dir, + "default prettier dir {default_prettier_dir:?} is not a directory" + ), + None => fs + .create_dir(default_prettier_dir) + .await + .with_context(|| format!("creating default prettier dir {default_prettier_dir:?}"))?, + } + + log::info!("Installing default prettier and plugins: {packages_to_versions:?}"); + let borrowed_packages = packages_to_versions + .iter() + .map(|(package, version)| (package.as_str(), version.as_str())) + .collect::>(); + node.npm_install_packages(default_prettier_dir, &borrowed_packages) + .await + .context("fetching formatter packages")?; + anyhow::Ok(()) +} + +async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> { + let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); + fs.save( + &prettier_wrapper_path, + &text::Rope::from(prettier::PRETTIER_SERVER_JS), + text::LineEnding::Unix, + ) + .await + .with_context(|| { + format!( + "writing {} file at {prettier_wrapper_path:?}", + prettier::PRETTIER_SERVER_FILE + ) + })?; + Ok(()) +} + +async fn should_write_prettier_server_file(fs: &dyn Fs) -> bool { + let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); + if !fs.is_file(&prettier_wrapper_path).await { + return true; + } + let Ok(prettier_server_file_contents) = fs.load(&prettier_wrapper_path).await else { + return true; + }; + prettier_server_file_contents != prettier::PRETTIER_SERVER_JS } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4318737e38..f4816cf0cd 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4,7 +4,7 @@ pub mod debounced_delay; pub mod lsp_command; pub mod lsp_ext_command; pub mod lsp_store; -mod prettier_support; +pub mod prettier_store; pub mod project_settings; pub mod search; mod task_inventory; @@ -31,7 +31,6 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - stream::FuturesUnordered, AsyncWriteExt, FutureExt, StreamExt, }; @@ -59,8 +58,8 @@ use lsp_command::*; use node_runtime::NodeRuntime; use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; -use prettier_support::{DefaultPrettier, PrettierInstance}; -use project_settings::{LspSettings, ProjectSettings, SettingsObserver}; +pub use prettier_store::PrettierStore; +use project_settings::{ProjectSettings, SettingsObserver}; use remote::SshSession; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; @@ -140,7 +139,6 @@ pub struct Project { buffer_ordered_messages_tx: mpsc::UnboundedSender, languages: Arc, client: Arc, - current_lsp_settings: HashMap, LspSettings>, join_project_response_message_id: u32, user_store: Model, fs: Arc, @@ -157,9 +155,6 @@ pub struct Project { remotely_created_buffers: Arc>, terminals: Terminals, node: Option>, - default_prettier: DefaultPrettier, - prettiers_per_worktree: HashMap>>, - prettier_instances: HashMap, tasks: Model, hosted_project_id: Option, dev_server_project_id: Option, @@ -634,6 +629,16 @@ impl Project { cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); + let prettier_store = cx.new_model(|cx| { + PrettierStore::new( + node.clone(), + fs.clone(), + languages.clone(), + worktree_store.clone(), + cx, + ) + }); + let settings_observer = cx.new_model(|cx| { SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx) }); @@ -643,6 +648,7 @@ impl Project { LspStore::new_local( buffer_store.clone(), worktree_store.clone(), + prettier_store.clone(), environment.clone(), languages.clone(), Some(client.http_client()), @@ -658,14 +664,10 @@ impl Project { worktree_store, buffer_store, lsp_store, - current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), join_project_response_message_id: 0, client_state: ProjectClientState::Local, client_subscriptions: Vec::new(), - _subscriptions: vec![ - cx.observe_global::(Self::on_settings_changed), - cx.on_release(Self::release), - ], + _subscriptions: vec![cx.on_release(Self::release)], active_entry: None, snippets, languages, @@ -680,9 +682,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - default_prettier: DefaultPrettier::default(), - prettiers_per_worktree: HashMap::default(), - prettier_instances: HashMap::default(), tasks, hosted_project_id: None, dev_server_project_id: None, @@ -751,14 +750,10 @@ impl Project { worktree_store, buffer_store, lsp_store, - current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), join_project_response_message_id: 0, client_state: ProjectClientState::Local, client_subscriptions: Vec::new(), - _subscriptions: vec![ - cx.observe_global::(Self::on_settings_changed), - cx.on_release(Self::release), - ], + _subscriptions: vec![cx.on_release(Self::release)], active_entry: None, snippets, languages, @@ -773,9 +768,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - default_prettier: DefaultPrettier::default(), - prettiers_per_worktree: HashMap::default(), - prettier_instances: HashMap::default(), tasks, hosted_project_id: None, dev_server_project_id: None, @@ -928,7 +920,6 @@ impl Project { buffer_store: buffer_store.clone(), worktree_store: worktree_store.clone(), lsp_store: lsp_store.clone(), - current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), active_entry: None, collaborators: Default::default(), join_project_response_message_id: response.message_id, @@ -954,9 +945,6 @@ impl Project { local_handles: Vec::new(), }, node: None, - default_prettier: DefaultPrettier::default(), - prettiers_per_worktree: HashMap::default(), - prettier_instances: HashMap::default(), tasks, hosted_project_id: None, dev_server_project_id: response @@ -1176,112 +1164,6 @@ impl Project { self.worktree_store.clone() } - fn on_settings_changed(&mut self, cx: &mut ModelContext) { - let mut language_servers_to_start = Vec::new(); - let mut language_formatters_to_check = Vec::new(); - for buffer in self.buffer_store.read(cx).buffers() { - let buffer = buffer.read(cx); - let buffer_file = File::from_dyn(buffer.file()); - let buffer_language = buffer.language(); - let settings = language_settings(buffer_language, buffer.file(), cx); - if let Some(language) = buffer_language { - if settings.enable_language_server { - if let Some(file) = buffer_file { - language_servers_to_start.push((file.worktree.clone(), language.name())); - } - } - language_formatters_to_check - .push((buffer_file.map(|f| f.worktree_id(cx)), settings.clone())); - } - } - - let mut language_servers_to_stop = Vec::new(); - let mut language_servers_to_restart = Vec::new(); - let languages = self.languages.to_vec(); - - let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone(); - let current_lsp_settings = &self.current_lsp_settings; - for (worktree_id, started_lsp_name) in self.lsp_store.read(cx).started_language_servers() { - let language = languages.iter().find_map(|l| { - let adapter = self - .languages - .lsp_adapters(&l.name()) - .iter() - .find(|adapter| adapter.name == started_lsp_name)? - .clone(); - Some((l, adapter)) - }); - if let Some((language, adapter)) = language { - let worktree = self.worktree_for_id(worktree_id, cx); - let file = worktree.as_ref().and_then(|tree| { - tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _)) - }); - if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { - language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); - } else if let Some(worktree) = worktree { - let server_name = &adapter.name.0; - match ( - current_lsp_settings.get(server_name), - new_lsp_settings.get(server_name), - ) { - (None, None) => {} - (Some(_), None) | (None, Some(_)) => { - language_servers_to_restart.push((worktree, language.name())); - } - (Some(current_lsp_settings), Some(new_lsp_settings)) => { - if current_lsp_settings != new_lsp_settings { - language_servers_to_restart.push((worktree, language.name())); - } - } - } - } - } - } - self.current_lsp_settings = new_lsp_settings; - - // Stop all newly-disabled language servers. - self.lsp_store.update(cx, |lsp_store, cx| { - for (worktree_id, adapter_name) in language_servers_to_stop { - lsp_store - .stop_language_server(worktree_id, adapter_name, cx) - .detach(); - } - }); - - let mut prettier_plugins_by_worktree = HashMap::default(); - for (worktree, language_settings) in language_formatters_to_check { - if let Some(plugins) = - prettier_support::prettier_plugins_for_language(&language_settings) - { - prettier_plugins_by_worktree - .entry(worktree) - .or_insert_with(HashSet::default) - .extend(plugins.iter().cloned()); - } - } - for (worktree, prettier_plugins) in prettier_plugins_by_worktree { - self.install_default_prettier( - worktree, - prettier_plugins.into_iter().map(Arc::from), - cx, - ); - } - - // Start all the newly-enabled language servers. - self.lsp_store.update(cx, |lsp_store, cx| { - for (worktree, language) in language_servers_to_start { - lsp_store.start_language_servers(&worktree, language, cx); - } - - // Restart all language servers with changed initialization options. - for (worktree, language) in language_servers_to_restart { - lsp_store.restart_language_servers(worktree, language, cx); - } - }); - - cx.notify(); - } - pub fn buffer_for_id(&self, remote_id: BufferId, cx: &AppContext) -> Option> { self.buffer_store.read(cx).get(remote_id) } @@ -2160,24 +2042,10 @@ impl Project { buffer, new_language, } => { - let Some(new_language) = new_language else { + let Some(_) = new_language else { cx.emit(Event::LanguageNotFound(buffer.clone())); return; }; - let buffer_file = buffer.read(cx).file().cloned(); - let settings = - language_settings(Some(new_language), buffer_file.as_ref(), cx).clone(); - let buffer_file = File::from_dyn(buffer_file.as_ref()); - let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); - if let Some(prettier_plugins) = - prettier_support::prettier_plugins_for_language(&settings) - { - self.install_default_prettier( - worktree, - prettier_plugins.iter().map(|s| Arc::from(s.as_str())), - cx, - ); - }; } LspStoreEvent::RefreshInlayHints => cx.emit(Event::RefreshInlayHints), LspStoreEvent::LanguageServerPrompt(prompt) => { @@ -2253,7 +2121,6 @@ impl Project { worktree::Event::UpdatedEntries(changes) => { if is_local { this.update_local_worktree_settings(&worktree, changes, cx); - this.update_prettier_settings(&worktree, changes, cx); } cx.emit(Event::WorktreeUpdatedEntries( @@ -2300,37 +2167,6 @@ impl Project { return; } - let mut prettier_instances_to_clean = FuturesUnordered::new(); - if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) { - for path in prettier_paths.iter().flatten() { - if let Some(prettier_instance) = self.prettier_instances.remove(path) { - prettier_instances_to_clean.push(async move { - prettier_instance - .server() - .await - .map(|server| server.server_id()) - }); - } - } - } - cx.spawn(|project, mut cx| async move { - while let Some(prettier_server_id) = prettier_instances_to_clean.next().await { - if let Some(prettier_server_id) = prettier_server_id { - project - .update(&mut cx, |project, cx| { - project.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.unregister_supplementary_language_server( - prettier_server_id, - cx, - ); - }); - }) - .ok(); - } - } - }) - .detach(); - self.task_inventory().update(cx, |inventory, _| { inventory.remove_worktree_sources(id_to_remove); }); @@ -3059,11 +2895,21 @@ impl Project { None } } - Formatter::Prettier => prettier_support::format_with_prettier(&project, buffer, cx) - .await - .transpose() - .ok() - .flatten(), + Formatter::Prettier => { + let prettier = project.update(cx, |project, cx| { + project + .lsp_store + .read(cx) + .prettier_store() + .unwrap() + .downgrade() + })?; + prettier_store::format_with_prettier(&prettier, buffer, cx) + .await + .transpose() + .ok() + .flatten() + } Formatter::External { command, arguments } => { let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index f5efa21bd0..ed12b41167 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -26,6 +26,7 @@ env_logger.workspace = true fs.workspace = true futures.workspace = true gpui.workspace = true +node_runtime.workspace = true log.workspace = true project.workspace = true remote.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 35d6630c1e..ec26bddfc3 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -2,12 +2,13 @@ use anyhow::{anyhow, Result}; use fs::Fs; use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext}; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; +use node_runtime::DummyNodeRuntime; use project::{ buffer_store::{BufferStore, BufferStoreEvent}, project_settings::SettingsObserver, search::SearchQuery, worktree_store::WorktreeStore, - LspStore, LspStoreEvent, ProjectPath, WorktreeId, + LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, }; use remote::SshSession; use rpc::{ @@ -54,6 +55,16 @@ impl HeadlessProject { buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx); buffer_store }); + let prettier_store = cx.new_model(|cx| { + PrettierStore::new( + DummyNodeRuntime::new(), + fs.clone(), + languages.clone(), + worktree_store.clone(), + cx, + ) + }); + let settings_observer = cx.new_model(|cx| { let mut observer = SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx); observer.shared(SSH_PROJECT_ID, session.clone().into(), cx); @@ -64,6 +75,7 @@ impl HeadlessProject { let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), + prettier_store.clone(), environment, languages.clone(), None, From 56f9e4c7b3834826c01a75f0a883e594a8482d90 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 17 Sep 2024 15:39:44 -0700 Subject: [PATCH 172/270] Remove visible 'TBD' from docs (#17979) Release Notes: - N/A --- docs/src/extensions/languages.md | 2 ++ docs/src/key-bindings.md | 2 ++ docs/src/languages/javascript.md | 2 ++ 3 files changed, 6 insertions(+) diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 90de3f658d..c003285303 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -284,7 +284,9 @@ The `@run` capture specifies where the run button should appear in the editor. O | @run | Captures the script name | | @script | Also captures the script name (for different purposes) | + ## Language Servers diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 989e101e7d..b35c894071 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -177,8 +177,10 @@ See the [tasks documentation](tasks.md#custom-keybindings-for-tasks) for more. #### Global + | **Command** | **Target** | **Default Shortcut** | | ------------------------- | ------------ | ----------------------- | diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index 0e642527e0..8fb84881ad 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -26,6 +26,7 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t } ``` + ## ESLint From fbb402ef12b0e61bc02db7a1715d0a36df1b94a7 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Wed, 18 Sep 2024 06:45:08 +0800 Subject: [PATCH 173/270] windows: Remove the use of `DispatcherQueue` and fix `FileSaveDialog` unresponsive issue (#17946) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #17069, closes #12410 With the help of @kennykerr (Creator of C++/WinRT and the crate `windows-rs`, Engineer on the Windows team at Microsoft) and @riverar (Windows Development expert), we discovered that this bug only occurs when an IME with a candidate window, such as Microsoft Pinyin IME, is active. In this case, the `FileSaveDialog` becomes unresponsive—while the dialog itself appears to be functioning, it doesn't accept any mouse or keyboard input. After a period of debugging and testing, I found that this issue only arises when using `DispatcherQueue` to dispatch runnables on the UI thread. After @kennykerr’s further investigation, Kenny identified that this is a bug with `DispatcherQueue`, and he recommended to avoid using `DispatcherQueue`. Given the uncertainty about whether Microsoft will address this bug in the foreseeable future, I have removed the use of `DispatcherQueue`. Co-authored-by: Kenny Release Notes: - N/A --------- Co-authored-by: Kenny --- Cargo.toml | 2 +- crates/gpui/Cargo.toml | 4 +- .../gpui/src/platform/windows/dispatcher.rs | 57 ++++-------- crates/gpui/src/platform/windows/events.rs | 3 + crates/gpui/src/platform/windows/platform.rs | 90 ++++++++++++++----- crates/gpui/src/platform/windows/window.rs | 19 ++-- 6 files changed, 104 insertions(+), 71 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0b392e02eb..ec3138179b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -490,7 +490,6 @@ features = [ "implement", "Foundation_Numerics", "Storage", - "System", "System_Threading", "UI_ViewManagement", "Wdk_System_SystemServices", @@ -521,6 +520,7 @@ features = [ "Win32_UI_Input_Ime", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", + "Win32_UI_Shell_Common", "Win32_UI_WindowsAndMessaging", ] diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 09b546fc32..d0d75b73e9 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -50,7 +50,7 @@ parking = "2.0.0" parking_lot.workspace = true postage.workspace = true profiling.workspace = true -rand = { optional = true, workspace = true} +rand = { optional = true, workspace = true } raw-window-handle = "0.6" refineable.workspace = true resvg = { version = "0.41.0", default-features = false } @@ -110,6 +110,7 @@ blade-graphics.workspace = true blade-macros.workspace = true blade-util.workspace = true bytemuck = "1" +flume = "0.11" [target.'cfg(target_os = "linux")'.dependencies] as-raw-xcb-connection = "1" @@ -117,7 +118,6 @@ ashpd.workspace = true calloop = "0.13.0" calloop-wayland-source = "0.3.0" cosmic-text = { git = "https://github.com/pop-os/cosmic-text", rev = "542b20c" } -flume = "0.11" wayland-backend = { version = "0.3.3", features = ["client_system", "dlopen"] } wayland-client = { version = "0.31.2" } wayland-cursor = "0.31.1" diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index abe40d2c2e..575e844051 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -3,51 +3,39 @@ use std::{ time::Duration, }; +use anyhow::Context; use async_task::Runnable; +use flume::Sender; use parking::Parker; use parking_lot::Mutex; use util::ResultExt; use windows::{ Foundation::TimeSpan, - System::{ - DispatcherQueue, DispatcherQueueController, DispatcherQueueHandler, - Threading::{ - ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemOptions, - WorkItemPriority, - }, - }, - Win32::System::WinRT::{ - CreateDispatcherQueueController, DispatcherQueueOptions, DQTAT_COM_NONE, - DQTYPE_THREAD_CURRENT, + System::Threading::{ + ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemOptions, + WorkItemPriority, }, + Win32::{Foundation::HANDLE, System::Threading::SetEvent}, }; -use crate::{PlatformDispatcher, TaskLabel}; +use crate::{PlatformDispatcher, SafeHandle, TaskLabel}; pub(crate) struct WindowsDispatcher { - controller: DispatcherQueueController, - main_queue: DispatcherQueue, + main_sender: Sender, + dispatch_event: SafeHandle, parker: Mutex, main_thread_id: ThreadId, } impl WindowsDispatcher { - pub(crate) fn new() -> Self { - let controller = unsafe { - let options = DispatcherQueueOptions { - dwSize: std::mem::size_of::() as u32, - threadType: DQTYPE_THREAD_CURRENT, - apartmentType: DQTAT_COM_NONE, - }; - CreateDispatcherQueueController(options).unwrap() - }; - let main_queue = controller.DispatcherQueue().unwrap(); + pub(crate) fn new(main_sender: Sender, dispatch_event: HANDLE) -> Self { + let dispatch_event = dispatch_event.into(); let parker = Mutex::new(Parker::new()); let main_thread_id = current().id(); WindowsDispatcher { - controller, - main_queue, + main_sender, + dispatch_event, parker, main_thread_id, } @@ -86,12 +74,6 @@ impl WindowsDispatcher { } } -impl Drop for WindowsDispatcher { - fn drop(&mut self) { - self.controller.ShutdownQueueAsync().log_err(); - } -} - impl PlatformDispatcher for WindowsDispatcher { fn is_main_thread(&self) -> bool { current().id() == self.main_thread_id @@ -105,14 +87,11 @@ impl PlatformDispatcher for WindowsDispatcher { } fn dispatch_on_main_thread(&self, runnable: Runnable) { - let handler = { - let mut task_wrapper = Some(runnable); - DispatcherQueueHandler::new(move || { - task_wrapper.take().unwrap().run(); - Ok(()) - }) - }; - self.main_queue.TryEnqueue(&handler).log_err(); + self.main_sender + .send(runnable) + .context("Dispatch on main thread failed") + .log_err(); + unsafe { SetEvent(*self.dispatch_event).log_err() }; } fn dispatch_after(&self, duration: Duration, runnable: Runnable) { diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 0d55142ae9..b62f51f6d9 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -177,6 +177,9 @@ fn handle_timer_msg( state_ptr: Rc, ) -> Option { if wparam.0 == SIZE_MOVE_LOOP_TIMER_ID { + for runnable in state_ptr.main_receiver.drain() { + runnable.run(); + } handle_paint_msg(handle, state_ptr) } else { None diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 934d9336d2..d9f08c2247 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -8,6 +8,7 @@ use std::{ use ::util::ResultExt; use anyhow::{anyhow, Context, Result}; +use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; use itertools::Itertools; use parking_lot::RwLock; @@ -46,6 +47,8 @@ pub(crate) struct WindowsPlatform { raw_window_handles: RwLock>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, + main_receiver: flume::Receiver, + dispatch_event: HANDLE, background_executor: BackgroundExecutor, foreground_executor: ForegroundExecutor, text_system: Arc, @@ -89,7 +92,9 @@ impl WindowsPlatform { unsafe { OleInitialize(None).expect("unable to initialize Windows OLE"); } - let dispatcher = Arc::new(WindowsDispatcher::new()); + let (main_sender, main_receiver) = flume::unbounded::(); + let dispatch_event = unsafe { CreateEventW(None, false, false, None) }.unwrap(); + let dispatcher = Arc::new(WindowsDispatcher::new(main_sender, dispatch_event)); let background_executor = BackgroundExecutor::new(dispatcher.clone()); let foreground_executor = ForegroundExecutor::new(dispatcher); let bitmap_factory = ManuallyDrop::new(unsafe { @@ -113,6 +118,8 @@ impl WindowsPlatform { state, raw_window_handles, icon, + main_receiver, + dispatch_event, background_executor, foreground_executor, text_system, @@ -176,6 +183,24 @@ impl WindowsPlatform { lock.is_empty() } + + #[inline] + fn run_foreground_tasks(&self) { + for runnable in self.main_receiver.drain() { + runnable.run(); + } + } + + fn generate_creation_info(&self) -> WindowCreationInfo { + WindowCreationInfo { + icon: self.icon, + executor: self.foreground_executor.clone(), + current_cursor: self.state.borrow().current_cursor, + windows_version: self.windows_version, + validation_number: self.validation_number, + main_receiver: self.main_receiver.clone(), + } + } } impl Platform for WindowsPlatform { @@ -197,16 +222,21 @@ impl Platform for WindowsPlatform { begin_vsync(*vsync_event); 'a: loop { let wait_result = unsafe { - MsgWaitForMultipleObjects(Some(&[*vsync_event]), false, INFINITE, QS_ALLINPUT) + MsgWaitForMultipleObjects( + Some(&[*vsync_event, self.dispatch_event]), + false, + INFINITE, + QS_ALLINPUT, + ) }; match wait_result { // compositor clock ticked so we should draw a frame - WAIT_EVENT(0) => { - self.redraw_all(); - } + WAIT_EVENT(0) => self.redraw_all(), + // foreground tasks are dispatched + WAIT_EVENT(1) => self.run_foreground_tasks(), // Windows thread messages are posted - WAIT_EVENT(1) => { + WAIT_EVENT(2) => { let mut msg = MSG::default(); unsafe { while PeekMessageW(&mut msg, None, 0, 0, PM_REMOVE).as_bool() { @@ -230,6 +260,8 @@ impl Platform for WindowsPlatform { } } } + // foreground tasks may have been queued in the message handlers + self.run_foreground_tasks(); } _ => { log::error!("Something went wrong while waiting {:?}", wait_result); @@ -319,17 +351,7 @@ impl Platform for WindowsPlatform { handle: AnyWindowHandle, options: WindowParams, ) -> Result> { - let lock = self.state.borrow(); - let window = WindowsWindow::new( - handle, - options, - self.icon, - self.foreground_executor.clone(), - lock.current_cursor, - self.windows_version, - self.validation_number, - )?; - drop(lock); + let window = WindowsWindow::new(handle, options, self.generate_creation_info())?; let handle = window.get_raw_handle(); self.raw_window_handles.write().push(handle); @@ -558,6 +580,15 @@ impl Drop for WindowsPlatform { } } +pub(crate) struct WindowCreationInfo { + pub(crate) icon: HICON, + pub(crate) executor: ForegroundExecutor, + pub(crate) current_cursor: HCURSOR, + pub(crate) windows_version: WindowsVersion, + pub(crate) validation_number: usize, + pub(crate) main_receiver: flume::Receiver, +} + fn open_target(target: &str) { unsafe { let ret = ShellExecuteW( @@ -631,22 +662,33 @@ fn file_open_dialog(options: PathPromptOptions) -> Result>> fn file_save_dialog(directory: PathBuf) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; - if let Some(full_path) = directory.canonicalize().log_err() { - let full_path = full_path.to_string_lossy().to_string(); - if !full_path.is_empty() { - let path_item: IShellItem = - unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? }; - unsafe { dialog.SetFolder(&path_item).log_err() }; + if !directory.to_string_lossy().is_empty() { + if let Some(full_path) = directory.canonicalize().log_err() { + let full_path = full_path.to_string_lossy().to_string(); + if !full_path.is_empty() { + let path_item: IShellItem = + unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? }; + unsafe { dialog.SetFolder(&path_item).log_err() }; + } } } unsafe { + dialog.SetFileTypes(&[Common::COMDLG_FILTERSPEC { + pszName: windows::core::w!("All files"), + pszSpec: windows::core::w!("*.*"), + }])?; if dialog.Show(None).is_err() { // User cancelled return Ok(None); } } let shell_item = unsafe { dialog.GetResult()? }; - let file_path_string = unsafe { shell_item.GetDisplayName(SIGDN_FILESYSPATH)?.to_string()? }; + let file_path_string = unsafe { + let pwstr = shell_item.GetDisplayName(SIGDN_FILESYSPATH)?; + let string = pwstr.to_string()?; + CoTaskMemFree(Some(pwstr.0 as _)); + string + }; Ok(Some(PathBuf::from(file_path_string))) } diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 1a059491a2..e2cfb38afd 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -12,6 +12,7 @@ use std::{ use ::util::ResultExt; use anyhow::{Context, Result}; +use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; use itertools::Itertools; use raw_window_handle as rwh; @@ -63,6 +64,7 @@ pub(crate) struct WindowsWindowStatePtr { pub(crate) executor: ForegroundExecutor, pub(crate) windows_version: WindowsVersion, pub(crate) validation_number: usize, + pub(crate) main_receiver: flume::Receiver, } impl WindowsWindowState { @@ -226,6 +228,7 @@ impl WindowsWindowStatePtr { executor: context.executor.clone(), windows_version: context.windows_version, validation_number: context.validation_number, + main_receiver: context.main_receiver.clone(), })) } } @@ -253,18 +256,23 @@ struct WindowCreateContext { current_cursor: HCURSOR, windows_version: WindowsVersion, validation_number: usize, + main_receiver: flume::Receiver, } impl WindowsWindow { pub(crate) fn new( handle: AnyWindowHandle, params: WindowParams, - icon: HICON, - executor: ForegroundExecutor, - current_cursor: HCURSOR, - windows_version: WindowsVersion, - validation_number: usize, + creation_info: WindowCreationInfo, ) -> Result { + let WindowCreationInfo { + icon, + executor, + current_cursor, + windows_version, + validation_number, + main_receiver, + } = creation_info; let classname = register_wnd_class(icon); let hide_title_bar = params .titlebar @@ -305,6 +313,7 @@ impl WindowsWindow { current_cursor, windows_version, validation_number, + main_receiver, }; let lpparam = Some(&context as *const _ as *const _); let creation_result = unsafe { From 4d074fc737db71ff10b4cf1e68f4b70e092124b6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 19:20:45 -0400 Subject: [PATCH 174/270] editor: Fix rewrap with a non-empty selection (#17980) This PR fixes an issue where rewrapping would not occur with a non-empty selection. It is only the expansion to neighboring lines that needs to be gated by an empty selection. Release Notes: - N/A --- crates/editor/src/editor.rs | 30 ++++++++++++++-------------- crates/editor/src/editor_tests.rs | 33 +++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 15 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c3c54e4951..61a59665c1 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6736,22 +6736,22 @@ impl Editor { let mut line_prefix = indent_size.chars().collect::(); - if selection.is_empty() { - if let Some(comment_prefix) = - buffer - .language_scope_at(selection.head()) - .and_then(|language| { - language - .line_comment_prefixes() - .iter() - .find(|prefix| buffer.contains_str_at(indent_end, prefix)) - .cloned() - }) - { - line_prefix.push_str(&comment_prefix); - should_rewrap = true; - } + if let Some(comment_prefix) = + buffer + .language_scope_at(selection.head()) + .and_then(|language| { + language + .line_comment_prefixes() + .iter() + .find(|prefix| buffer.contains_str_at(indent_end, prefix)) + .cloned() + }) + { + line_prefix.push_str(&comment_prefix); + should_rewrap = true; + } + if selection.is_empty() { 'expand_upwards: while start_row > 0 { let prev_row = start_row - 1; if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 6a8efc5905..e11b38ba59 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4017,6 +4017,39 @@ async fn test_rewrap(cx: &mut TestAppContext) { cx.assert_editor_state(wrapped_text); } + // Test that rewrapping works inside of a selection + { + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into()], + ..LanguageConfig::default() + }, + None, + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + let unwrapped_text = indoc! {" + «// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. Proin consectetur felis vel purus auctor, eu lacinia sapien scelerisque. Vivamus sit amet neque et quam tincidunt hendrerit. Praesent semper egestas tellus id dignissim. Pellentesque odio lectus, iaculis ac volutpat et, blandit quis urna. Sed vestibulum nisi sit amet nisl venenatis tempus. Donec molestie blandit quam, et porta nunc laoreet in. Integer sit amet scelerisque nisi. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras egestas porta metus, eu viverra ipsum efficitur quis. Donec luctus eros turpis, id vulputate turpis porttitor id. Aliquam id accumsan eros.ˇ» + "}; + + let wrapped_text = indoc! {" + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit + // purus, a ornare lacus gravida vitae. Proin consectetur felis vel purus + // auctor, eu lacinia sapien scelerisque. Vivamus sit amet neque et quam + // tincidunt hendrerit. Praesent semper egestas tellus id dignissim. + // Pellentesque odio lectus, iaculis ac volutpat et, blandit quis urna. Sed + // vestibulum nisi sit amet nisl venenatis tempus. Donec molestie blandit quam, + // et porta nunc laoreet in. Integer sit amet scelerisque nisi. Lorem ipsum + // dolor sit amet, consectetur adipiscing elit. Cras egestas porta metus, eu + // viverra ipsum efficitur quis. Donec luctus eros turpis, id vulputate turpis + // porttitor id. Aliquam id accumsan eros.ˇ + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + } + // Test that cursors that expand to the same region are collapsed. { let language = Arc::new(Language::new( From 2e72fd210a93aaaf5a4a1ad9fd04a046fd1f394e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 17 Sep 2024 19:43:59 -0600 Subject: [PATCH 175/270] Replace Default trait bound with a zero function on Summary/Dimension (#17975) This lets us provide a context when constructing the zero value. We need it so we can require anchors to be associated with a buffer id, which we're doing as part of simplifying the multibuffer API. Release Notes: - N/A Co-authored-by: Nathan --- crates/channel/src/channel_chat.rs | 30 ++-- crates/editor/src/display_map.rs | 5 +- crates/editor/src/display_map/block_map.rs | 38 +++-- crates/editor/src/display_map/crease_map.rs | 41 +++-- crates/editor/src/display_map/fold_map.rs | 79 +++++++--- crates/editor/src/display_map/inlay_map.rs | 47 ++++-- crates/editor/src/display_map/wrap_map.rs | 44 ++++-- crates/editor/src/git/blame.rs | 14 +- crates/git/src/diff.rs | 46 +++--- crates/gpui/src/elements/list.rs | 34 +++-- crates/language/src/buffer.rs | 10 +- crates/language/src/diagnostic_set.rs | 8 +- crates/language/src/syntax_map.rs | 39 +++-- .../src/syntax_map/syntax_map_tests.rs | 16 +- crates/multi_buffer/src/multi_buffer.rs | 142 +++++++++++------- .../notifications/src/notification_store.rs | 21 ++- crates/project/src/lsp_store.rs | 8 +- crates/rope/src/rope.rs | 66 +++++--- crates/rope/src/unclipped.rs | 4 + crates/sum_tree/src/cursor.rs | 36 +++-- crates/sum_tree/src/sum_tree.rs | 125 +++++++++------ crates/sum_tree/src/tree_map.rs | 22 ++- crates/text/src/anchor.rs | 2 +- crates/text/src/locator.rs | 4 + crates/text/src/operation_queue.rs | 12 +- crates/text/src/text.rs | 94 ++++++++---- crates/text/src/undo_map.rs | 9 +- crates/worktree/src/worktree.rs | 59 ++++++-- 28 files changed, 706 insertions(+), 349 deletions(-) diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 286eb46a91..1a9e46db04 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -332,7 +332,7 @@ impl ChannelChat { .update(&mut cx, |chat, cx| { if let Some(first_id) = chat.first_loaded_message_id() { if first_id <= message_id { - let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(); + let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&()); let message_id = ChannelMessageId::Saved(message_id); cursor.seek(&message_id, Bias::Left, &()); return ControlFlow::Break( @@ -498,7 +498,7 @@ impl ChannelChat { } pub fn message(&self, ix: usize) -> &ChannelMessage { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&Count(ix), Bias::Right, &()); cursor.item().unwrap() } @@ -515,13 +515,13 @@ impl ChannelChat { } pub fn messages_in_range(&self, range: Range) -> impl Iterator { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&Count(range.start), Bias::Right, &()); cursor.take(range.len()) } pub fn pending_messages(&self) -> impl Iterator { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &()); cursor } @@ -589,11 +589,11 @@ impl ChannelChat { fn insert_messages(&mut self, messages: SumTree, cx: &mut ModelContext) { if let Some((first_message, last_message)) = messages.first().zip(messages.last()) { let nonces = messages - .cursor::<()>() + .cursor::<()>(&()) .map(|m| m.nonce) .collect::>(); - let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(); + let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&()); let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &()); let start_ix = old_cursor.start().1 .0; let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &()); @@ -646,7 +646,7 @@ impl ChannelChat { } fn message_removed(&mut self, id: u64, cx: &mut ModelContext) { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &()); if let Some(item) = cursor.item() { if item.id == ChannelMessageId::Saved(id) { @@ -685,7 +685,7 @@ impl ChannelChat { edited_at: Option, cx: &mut ModelContext, ) { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&id, Bias::Left, &()); let ix = messages.summary().count; @@ -716,7 +716,7 @@ async fn messages_from_proto( cx: &mut AsyncAppContext, ) -> Result> { let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?; - let mut result = SumTree::new(); + let mut result = SumTree::default(); result.extend(messages, &()); Ok(result) } @@ -825,6 +825,10 @@ impl Default for ChannelMessageId { impl sum_tree::Summary for ChannelMessageSummary { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.max_id = summary.max_id; self.count += summary.count; @@ -832,6 +836,10 @@ impl sum_tree::Summary for ChannelMessageSummary { } impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { debug_assert!(summary.max_id > *self); *self = summary.max_id; @@ -839,6 +847,10 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { } impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { self.0 += summary.count; } diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 86ea7ee3fa..790a0a6a1e 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -127,7 +127,9 @@ impl DisplayMap { let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let tab_size = Self::tab_size(&buffer, cx); - let (inlay_map, snapshot) = InlayMap::new(buffer.read(cx).snapshot(cx)); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let crease_map = CreaseMap::new(&buffer_snapshot); + let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot); let (fold_map, snapshot) = FoldMap::new(snapshot); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx); @@ -138,7 +140,6 @@ impl DisplayMap { excerpt_header_height, excerpt_footer_height, ); - let crease_map = CreaseMap::default(); cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 28e0b9d7af..3a298832de 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -389,10 +389,10 @@ impl BlockMap { } let mut transforms = self.transforms.borrow_mut(); - let mut new_transforms = SumTree::new(); + let mut new_transforms = SumTree::default(); let old_row_count = transforms.summary().input_rows; let new_row_count = wrap_snapshot.max_point().row() + 1; - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(&()); let mut last_block_ix = 0; let mut blocks_in_edit = Vec::new(); let mut edits = edits.into_iter().peekable(); @@ -757,7 +757,7 @@ impl<'a> BlockMapReader<'a> { .unwrap_or(self.wrap_snapshot.max_point().row() + 1), ); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&start_wrap_row, Bias::Left, &()); while let Some(transform) = cursor.item() { if cursor.start().0 > end_wrap_row { @@ -950,7 +950,7 @@ impl BlockSnapshot { highlights: Highlights<'a>, ) -> BlockChunks<'a> { let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let input_end = { cursor.seek(&BlockRow(rows.end), Bias::Right, &()); let overshoot = if cursor @@ -990,7 +990,7 @@ impl BlockSnapshot { } pub(super) fn buffer_rows(&self, start_row: BlockRow) -> BlockBufferRows { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&start_row, Bias::Right, &()); let (output_start, input_start) = cursor.start(); let overshoot = if cursor.item().map_or(false, |t| t.is_isomorphic()) { @@ -1008,7 +1008,7 @@ impl BlockSnapshot { } pub fn blocks_in_range(&self, rows: Range) -> impl Iterator { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&BlockRow(rows.start), Bias::Left, &()); while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start { cursor.next(&()); @@ -1050,7 +1050,7 @@ impl BlockSnapshot { let wrap_point = self .wrap_snapshot .make_wrap_point(excerpt_range.start, Bias::Left); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Left, &()); while let Some(transform) = cursor.item() { if let Some(block) = transform.block.as_ref() { @@ -1072,7 +1072,7 @@ impl BlockSnapshot { .wrap_snapshot .make_wrap_point(excerpt_range.end, Bias::Left); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Left, &()); while let Some(transform) = cursor.item() { if let Some(block) = transform.block.as_ref() { @@ -1102,7 +1102,7 @@ impl BlockSnapshot { } pub(super) fn line_len(&self, row: BlockRow) -> u32 { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(row.0), Bias::Right, &()); if let Some(transform) = cursor.item() { let (output_start, input_start) = cursor.start(); @@ -1118,13 +1118,13 @@ impl BlockSnapshot { } pub(super) fn is_block_line(&self, row: BlockRow) -> bool { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&row, Bias::Right, &()); cursor.item().map_or(false, |t| t.block.is_some()) } pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(point.row), Bias::Right, &()); let max_input_row = WrapRow(self.transforms.summary().input_rows); @@ -1172,7 +1172,7 @@ impl BlockSnapshot { } pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); if let Some(transform) = cursor.item() { debug_assert!(transform.is_isomorphic()); @@ -1188,7 +1188,7 @@ impl BlockSnapshot { } pub fn to_wrap_point(&self, block_point: BlockPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); if let Some(transform) = cursor.item() { match transform.block.as_ref().map(|b| b.disposition()) { @@ -1368,6 +1368,10 @@ impl sum_tree::Item for Transform { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.input_rows += summary.input_rows; self.output_rows += summary.output_rows; @@ -1375,12 +1379,20 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapRow { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.input_rows; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for BlockRow { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.output_rows; } diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 10ee125b32..bfc9c7d1a4 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -12,19 +12,34 @@ use crate::FoldPlaceholder; #[derive(Copy, Clone, Default, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] pub struct CreaseId(usize); -#[derive(Default)] pub struct CreaseMap { snapshot: CreaseSnapshot, next_id: CreaseId, id_to_range: HashMap>, } -#[derive(Clone, Default)] +impl CreaseMap { + pub fn new(snapshot: &MultiBufferSnapshot) -> Self { + CreaseMap { + snapshot: CreaseSnapshot::new(snapshot), + next_id: CreaseId::default(), + id_to_range: HashMap::default(), + } + } +} + +#[derive(Clone)] pub struct CreaseSnapshot { creases: SumTree, } impl CreaseSnapshot { + pub fn new(snapshot: &MultiBufferSnapshot) -> Self { + CreaseSnapshot { + creases: SumTree::new(snapshot), + } + } + /// Returns the first Crease starting on the specified buffer row. pub fn query_row<'a>( &'a self, @@ -32,7 +47,7 @@ impl CreaseSnapshot { snapshot: &'a MultiBufferSnapshot, ) -> Option<&'a Crease> { let start = snapshot.anchor_before(Point::new(row.0, 0)); - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); while let Some(item) = cursor.item() { match Ord::cmp(&item.crease.range.start.to_point(snapshot).row, &row.0) { @@ -56,7 +71,7 @@ impl CreaseSnapshot { snapshot: &'a MultiBufferSnapshot, ) -> impl '_ + Iterator { let start = snapshot.anchor_before(Point::new(range.start.0, 0)); - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); std::iter::from_fn(move || { @@ -79,7 +94,7 @@ impl CreaseSnapshot { &self, snapshot: &MultiBufferSnapshot, ) -> Vec<(CreaseId, Range)> { - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); let mut results = Vec::new(); cursor.next(snapshot); @@ -194,8 +209,8 @@ impl CreaseMap { ) -> Vec { let mut new_ids = Vec::new(); self.snapshot.creases = { - let mut new_creases = SumTree::new(); - let mut cursor = self.snapshot.creases.cursor::(); + let mut new_creases = SumTree::new(snapshot); + let mut cursor = self.snapshot.creases.cursor::(snapshot); for crease in creases { new_creases.append(cursor.slice(&crease.range, Bias::Left, snapshot), snapshot); @@ -227,8 +242,8 @@ impl CreaseMap { }); self.snapshot.creases = { - let mut new_creases = SumTree::new(); - let mut cursor = self.snapshot.creases.cursor::(); + let mut new_creases = SumTree::new(snapshot); + let mut cursor = self.snapshot.creases.cursor::(snapshot); for (id, range) in removals { new_creases.append(cursor.slice(&range, Bias::Left, snapshot), snapshot); @@ -264,6 +279,10 @@ impl Default for ItemSummary { impl sum_tree::Summary for ItemSummary { type Context = MultiBufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _snapshot: &MultiBufferSnapshot) { self.range = other.range.clone(); } @@ -303,7 +322,7 @@ mod test { let text = "line1\nline2\nline3\nline4\nline5"; let buffer = MultiBuffer::build_simple(text, cx); let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); - let mut crease_map = CreaseMap::default(); + let mut crease_map = CreaseMap::new(&buffer.read(cx).read(cx)); // Insert creases let creases = [ @@ -350,7 +369,7 @@ mod test { let text = "line1\nline2\nline3\nline4\nline5\nline6\nline7"; let buffer = MultiBuffer::build_simple(text, cx); let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); - let mut crease_map = CreaseMap::default(); + let mut crease_map = CreaseMap::new(&snapshot); let creases = [ Crease::new( diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 486fe4b2e5..37983030b8 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -79,7 +79,7 @@ impl FoldPoint { } pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { - let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; InlayPoint(cursor.start().1 .0 + overshoot) @@ -88,7 +88,7 @@ impl FoldPoint { pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset { let mut cursor = snapshot .transforms - .cursor::<(FoldPoint, TransformSummary)>(); + .cursor::<(FoldPoint, TransformSummary)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().1.output.lines; let mut offset = cursor.start().1.output.len; @@ -105,6 +105,10 @@ impl FoldPoint { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.lines; } @@ -154,8 +158,8 @@ impl<'a> FoldMapWriter<'a> { folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(&a.range, &b.range, buffer)); self.0.snapshot.folds = { - let mut new_tree = SumTree::new(); - let mut cursor = self.0.snapshot.folds.cursor::(); + let mut new_tree = SumTree::new(buffer); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); for fold in folds { new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer); new_tree.push(fold, buffer); @@ -202,8 +206,8 @@ impl<'a> FoldMapWriter<'a> { fold_ixs_to_delete.dedup(); self.0.snapshot.folds = { - let mut cursor = self.0.snapshot.folds.cursor::(); - let mut folds = SumTree::new(); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); + let mut folds = SumTree::new(buffer); for fold_ix in fold_ixs_to_delete { folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); cursor.next(buffer); @@ -230,7 +234,7 @@ impl FoldMap { pub(crate) fn new(inlay_snapshot: InlaySnapshot) -> (Self, FoldSnapshot) { let this = Self { snapshot: FoldSnapshot { - folds: Default::default(), + folds: SumTree::new(&inlay_snapshot.buffer), transforms: SumTree::from_item( Transform { summary: TransformSummary { @@ -314,8 +318,8 @@ impl FoldMap { } else { let mut inlay_edits_iter = inlay_edits.iter().cloned().peekable(); - let mut new_transforms = SumTree::::new(); - let mut cursor = self.snapshot.transforms.cursor::(); + let mut new_transforms = SumTree::::default(); + let mut cursor = self.snapshot.transforms.cursor::(&()); cursor.seek(&InlayOffset(0), Bias::Right, &()); while let Some(mut edit) = inlay_edits_iter.next() { @@ -367,7 +371,10 @@ impl FoldMap { let anchor = inlay_snapshot .buffer .anchor_before(inlay_snapshot.to_buffer_offset(edit.new.start)); - let mut folds_cursor = self.snapshot.folds.cursor::(); + let mut folds_cursor = self + .snapshot + .folds + .cursor::(&inlay_snapshot.buffer); folds_cursor.seek( &FoldRange(anchor..Anchor::max()), Bias::Left, @@ -470,8 +477,8 @@ impl FoldMap { let mut old_transforms = self .snapshot .transforms - .cursor::<(InlayOffset, FoldOffset)>(); - let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(); + .cursor::<(InlayOffset, FoldOffset)>(&()); + let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&()); for mut edit in inlay_edits { old_transforms.seek(&edit.old.start, Bias::Left, &()); @@ -545,7 +552,7 @@ impl FoldSnapshot { pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let mut summary = TextSummary::default(); - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&range.start, Bias::Right, &()); if let Some(transform) = cursor.item() { let start_in_transform = range.start.0 - cursor.start().0 .0; @@ -594,7 +601,7 @@ impl FoldSnapshot { } pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { - let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().map_or(false, |t| t.is_fold()) { if bias == Bias::Left || point == cursor.start().0 { @@ -631,7 +638,7 @@ impl FoldSnapshot { } let fold_point = FoldPoint::new(start_row, 0); - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&fold_point, Bias::Left, &()); let overshoot = fold_point.0 - cursor.start().0 .0; @@ -672,7 +679,7 @@ impl FoldSnapshot { { let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_offset, Bias::Right, &()); cursor.item().map_or(false, |t| t.placeholder.is_some()) } @@ -681,7 +688,7 @@ impl FoldSnapshot { let mut inlay_point = self .inlay_snapshot .to_inlay_point(Point::new(buffer_row.0, 0)); - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_point, Bias::Right, &()); loop { match cursor.item() { @@ -711,7 +718,7 @@ impl FoldSnapshot { language_aware: bool, highlights: Highlights<'a>, ) -> FoldChunks<'a> { - let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(); + let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); transform_cursor.seek(&range.start, Bias::Right, &()); let inlay_start = { @@ -766,7 +773,7 @@ impl FoldSnapshot { } pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(transform) = cursor.item() { let transform_start = cursor.start().0 .0; @@ -826,7 +833,7 @@ where let buffer = &inlay_snapshot.buffer; let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); - let mut cursor = folds.filter::<_, usize>(move |summary| { + let mut cursor = folds.filter::<_, usize>(buffer, move |summary| { let start_cmp = start.cmp(&summary.max_end, buffer); let end_cmp = end.cmp(&summary.min_start, buffer); @@ -945,6 +952,10 @@ impl sum_tree::Item for Transform { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -1028,6 +1039,10 @@ impl Default for FoldSummary { impl sum_tree::Summary for FoldSummary { type Context = MultiBufferSnapshot; + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.min_start.cmp(&self.min_start, buffer) == Ordering::Less { self.min_start = other.min_start; @@ -1052,6 +1067,10 @@ impl sum_tree::Summary for FoldSummary { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for FoldRange { + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { self.0.start = summary.start; self.0.end = summary.end; @@ -1065,6 +1084,10 @@ impl<'a> sum_tree::SeekTarget<'a, FoldSummary, FoldRange> for FoldRange { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { *self += summary.count; } @@ -1196,7 +1219,7 @@ impl FoldOffset { pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint { let mut cursor = snapshot .transforms - .cursor::<(FoldOffset, TransformSummary)>(); + .cursor::<(FoldOffset, TransformSummary)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { Point::new(0, (self.0 - cursor.start().0 .0) as u32) @@ -1210,7 +1233,7 @@ impl FoldOffset { #[cfg(test)] pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { - let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(); + let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; InlayOffset(cursor.start().1 .0 + overshoot) @@ -1240,18 +1263,30 @@ impl Sub for FoldOffset { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.len; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.input.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.input.len; } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index b6ab2cdd28..712db45e3f 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -97,6 +97,10 @@ struct TransformSummary { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -137,6 +141,10 @@ impl SubAssign for InlayOffset { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.len; } @@ -162,18 +170,30 @@ impl Sub for InlayPoint { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.input.len; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.input.lines; } @@ -475,8 +495,8 @@ impl InlayMap { (snapshot.clone(), Vec::new()) } else { let mut inlay_edits = Patch::default(); - let mut new_transforms = SumTree::new(); - let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(); + let mut new_transforms = SumTree::default(); + let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&()); let mut buffer_edits_iter = buffer_edits.iter().peekable(); while let Some(buffer_edit) = buffer_edits_iter.next() { new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); @@ -693,7 +713,7 @@ impl InlaySnapshot { pub fn to_point(&self, offset: InlayOffset) -> InlayPoint { let mut cursor = self .transforms - .cursor::<(InlayOffset, (InlayPoint, usize))>(); + .cursor::<(InlayOffset, (InlayPoint, usize))>(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = offset.0 - cursor.start().0 .0; match cursor.item() { @@ -723,7 +743,7 @@ impl InlaySnapshot { pub fn to_offset(&self, point: InlayPoint) -> InlayOffset { let mut cursor = self .transforms - .cursor::<(InlayPoint, (InlayOffset, Point))>(); + .cursor::<(InlayPoint, (InlayOffset, Point))>(&()); cursor.seek(&point, Bias::Right, &()); let overshoot = point.0 - cursor.start().0 .0; match cursor.item() { @@ -741,9 +761,8 @@ impl InlaySnapshot { None => self.len(), } } - pub fn to_buffer_point(&self, point: InlayPoint) -> Point { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); cursor.seek(&point, Bias::Right, &()); match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -754,9 +773,8 @@ impl InlaySnapshot { None => self.buffer.max_point(), } } - pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&offset, Bias::Right, &()); match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -769,7 +787,7 @@ impl InlaySnapshot { } pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { - let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(); + let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&()); cursor.seek(&offset, Bias::Left, &()); loop { match cursor.item() { @@ -801,9 +819,8 @@ impl InlaySnapshot { } } } - pub fn to_inlay_point(&self, point: Point) -> InlayPoint { - let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&()); cursor.seek(&point, Bias::Left, &()); loop { match cursor.item() { @@ -837,7 +854,7 @@ impl InlaySnapshot { } pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); cursor.seek(&point, Bias::Left, &()); loop { match cursor.item() { @@ -934,7 +951,7 @@ impl InlaySnapshot { pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let mut summary = TextSummary::default(); - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&range.start, Bias::Right, &()); let overshoot = range.start.0 - cursor.start().0 .0; @@ -982,7 +999,7 @@ impl InlaySnapshot { } pub fn buffer_rows(&self, row: u32) -> InlayBufferRows<'_> { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let inlay_point = InlayPoint::new(row, 0); cursor.seek(&inlay_point, Bias::Left, &()); @@ -1024,7 +1041,7 @@ impl InlaySnapshot { language_aware: bool, highlights: Highlights<'a>, ) -> InlayChunks<'a> { - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&range.start, Bias::Right, &()); let mut highlight_endpoints = Vec::new(); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 08b2ae0c64..564bba2158 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -204,7 +204,7 @@ impl WrapMap { } } else { let old_rows = self.snapshot.transforms.summary().output.lines.row + 1; - self.snapshot.transforms = SumTree::new(); + self.snapshot.transforms = SumTree::default(); let summary = self.snapshot.tab_snapshot.text_summary(); if !summary.lines.is_zero() { self.snapshot @@ -303,7 +303,7 @@ impl WrapMap { impl WrapSnapshot { fn new(tab_snapshot: TabSnapshot) -> Self { - let mut transforms = SumTree::new(); + let mut transforms = SumTree::default(); let extent = tab_snapshot.text_summary(); if !extent.lines.is_zero() { transforms.push(Transform::isomorphic(extent), &()); @@ -324,7 +324,7 @@ impl WrapSnapshot { if tab_edits.is_empty() { new_transforms = self.transforms.clone(); } else { - let mut old_cursor = self.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); let mut tab_edits_iter = tab_edits.iter().peekable(); new_transforms = @@ -424,7 +424,7 @@ impl WrapSnapshot { new_transforms = self.transforms.clone(); } else { let mut row_edits = row_edits.into_iter().peekable(); - let mut old_cursor = self.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); new_transforms = old_cursor.slice( &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), @@ -537,8 +537,8 @@ impl WrapSnapshot { fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch { let mut wrap_edits = Vec::new(); - let mut old_cursor = self.transforms.cursor::(); - let mut new_cursor = new_snapshot.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); + let mut new_cursor = new_snapshot.transforms.cursor::(&()); for mut tab_edit in tab_edits.iter().cloned() { tab_edit.old.start.0.column = 0; tab_edit.old.end.0 += Point::new(1, 0); @@ -579,7 +579,7 @@ impl WrapSnapshot { ) -> WrapChunks<'a> { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); - let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&output_start, Bias::Right, &()); let mut input_start = TabPoint(transforms.start().1 .0); if transforms.item().map_or(false, |t| t.is_isomorphic()) { @@ -606,7 +606,7 @@ impl WrapSnapshot { } pub fn line_len(&self, row: u32) -> u32 { - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); if cursor .item() @@ -626,7 +626,7 @@ impl WrapSnapshot { } pub fn soft_wrap_indent(&self, row: u32) -> Option { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); cursor.item().and_then(|transform| { if transform.is_isomorphic() { @@ -642,7 +642,7 @@ impl WrapSnapshot { } pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { - let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); let mut input_row = transforms.start().1.row(); if transforms.item().map_or(false, |t| t.is_isomorphic()) { @@ -662,7 +662,7 @@ impl WrapSnapshot { } pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); let mut tab_point = cursor.start().1 .0; if cursor.item().map_or(false, |t| t.is_isomorphic()) { @@ -680,14 +680,14 @@ impl WrapSnapshot { } pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(); + let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&()); cursor.seek(&point, Bias::Right, &()); WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)) } pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().map_or(false, |t| !t.is_isomorphic()) { point = *cursor.start(); @@ -705,7 +705,7 @@ impl WrapSnapshot { *point.column_mut() = 0; - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -725,7 +725,7 @@ impl WrapSnapshot { pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { point.0 += Point::new(1, 0); - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); while let Some(transform) = cursor.item() { if transform.is_isomorphic() && cursor.start().1.column() == 0 { @@ -747,7 +747,7 @@ impl WrapSnapshot { ); { - let mut transforms = self.transforms.cursor::<()>().peekable(); + let mut transforms = self.transforms.cursor::<()>(&()).peekable(); while let Some(transform) = transforms.next() { if let Some(next_transform) = transforms.peek() { assert!(transform.is_isomorphic() != next_transform.is_isomorphic()); @@ -982,6 +982,10 @@ impl WrapPoint { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -989,6 +993,10 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.input.lines; } @@ -1001,6 +1009,10 @@ impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoi } impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.output.lines; } diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 00531ee886..775cbcc379 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -37,12 +37,20 @@ impl sum_tree::Item for GitBlameEntry { impl sum_tree::Summary for GitBlameEntrySummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _cx: &()) { self.rows += summary.rows; } } impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a GitBlameEntrySummary, _cx: &()) { *self += summary.rows; } @@ -191,7 +199,7 @@ impl GitBlame { ) -> impl 'a + Iterator> { self.sync(cx); - let mut cursor = self.entries.cursor::(); + let mut cursor = self.entries.cursor::(&()); rows.into_iter().map(move |row| { let row = row?; cursor.seek_forward(&row.0, Bias::Right, &()); @@ -249,8 +257,8 @@ impl GitBlame { }) .peekable(); - let mut new_entries = SumTree::new(); - let mut cursor = self.entries.cursor::(); + let mut new_entries = SumTree::default(); + let mut cursor = self.entries.cursor::(&()); while let Some(mut edit) = row_edits.next() { while let Some(next_edit) = row_edits.peek() { diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index eedef199dc..8cc7ee1863 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -48,6 +48,10 @@ pub struct DiffHunkSummary { impl sum_tree::Summary for DiffHunkSummary { type Context = text::BufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { self.buffer_range.start = self .buffer_range @@ -63,17 +67,11 @@ pub struct BufferDiff { tree: SumTree>, } -impl Default for BufferDiff { - fn default() -> Self { - Self::new() - } -} - impl BufferDiff { - pub fn new() -> BufferDiff { + pub fn new(buffer: &BufferSnapshot) -> BufferDiff { BufferDiff { last_buffer_version: None, - tree: SumTree::new(), + tree: SumTree::new(buffer), } } @@ -97,11 +95,13 @@ impl BufferDiff { range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { - let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); - !before_start && !after_end - }); + let mut cursor = self + .tree + .filter::<_, DiffHunkSummary>(buffer, move |summary| { + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + !before_start && !after_end + }); let anchor_iter = std::iter::from_fn(move || { cursor.next(buffer); @@ -142,11 +142,13 @@ impl BufferDiff { range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { - let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); - !before_start && !after_end - }); + let mut cursor = self + .tree + .filter::<_, DiffHunkSummary>(buffer, move |summary| { + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + !before_start && !after_end + }); std::iter::from_fn(move || { cursor.prev(buffer); @@ -171,11 +173,11 @@ impl BufferDiff { #[cfg(test)] fn clear(&mut self, buffer: &text::BufferSnapshot) { self.last_buffer_version = Some(buffer.version().clone()); - self.tree = SumTree::new(); + self.tree = SumTree::new(buffer); } pub async fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { - let mut tree = SumTree::new(); + let mut tree = SumTree::new(buffer); let diff_base_text = diff_base.to_string(); let buffer_text = buffer.as_rope().to_string(); @@ -351,7 +353,7 @@ mod tests { .unindent(); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let mut diff = BufferDiff::new(); + let mut diff = BufferDiff::new(&buffer); smol::block_on(diff.update(&diff_base_rope, &buffer)); assert_hunks( diff.hunks(&buffer), @@ -412,7 +414,7 @@ mod tests { .unindent(); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let mut diff = BufferDiff::new(); + let mut diff = BufferDiff::new(&buffer); smol::block_on(diff.update(&diff_base_rope, &buffer)); assert_eq!(diff.hunks(&buffer).count(), 8); diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index bea8270fa5..d03392867b 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -181,7 +181,7 @@ impl ListState { last_layout_bounds: None, last_padding: None, render_item: Box::new(render_item), - items: SumTree::new(), + items: SumTree::default(), logical_scroll_top: None, alignment, overdraw, @@ -228,7 +228,7 @@ impl ListState { ) { let state = &mut *self.0.borrow_mut(); - let mut old_items = state.items.cursor::(); + let mut old_items = state.items.cursor::(&()); let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right, &()); old_items.seek_forward(&Count(old_range.end), Bias::Right, &()); @@ -297,7 +297,7 @@ impl ListState { scroll_top.item_ix = ix; scroll_top.offset_in_item = px(0.); } else { - let mut cursor = state.items.cursor::(); + let mut cursor = state.items.cursor::(&()); cursor.seek(&Count(ix + 1), Bias::Right, &()); let bottom = cursor.start().height + padding.top; let goal_top = px(0.).max(bottom - height + padding.bottom); @@ -326,7 +326,7 @@ impl ListState { return None; } - let mut cursor = state.items.cursor::<(Count, Height)>(); + let mut cursor = state.items.cursor::<(Count, Height)>(&()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); let scroll_top = cursor.start().1 .0 + scroll_top.offset_in_item; @@ -348,7 +348,7 @@ impl ListState { impl StateInner { fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); let start_y = cursor.start().height + scroll_top.offset_in_item; cursor.seek_forward(&Height(start_y + height), Bias::Left, &()); @@ -378,7 +378,7 @@ impl StateInner { if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; } else { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Height(new_scroll_top), Bias::Right, &()); let item_ix = cursor.start().count; let offset_in_item = new_scroll_top - cursor.start().height; @@ -418,7 +418,7 @@ impl StateInner { } fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &()); cursor.start().height + logical_scroll_top.offset_in_item } @@ -445,7 +445,7 @@ impl StateInner { AvailableSpace::MinContent, ); - let mut cursor = old_items.cursor::(); + let mut cursor = old_items.cursor::(&()); // Render items after the scroll top, including those in the trailing overdraw cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); @@ -560,7 +560,7 @@ impl StateInner { } let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len()); - let mut cursor = old_items.cursor::(); + let mut cursor = old_items.cursor::(&()); let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right, &()); new_items.extend(measured_items, &()); cursor.seek(&Count(measured_range.end), Bias::Right, &()); @@ -573,7 +573,7 @@ impl StateInner { if !rendered_focused_item { let mut cursor = self .items - .filter::<_, Count>(|summary| summary.has_focus_handles); + .filter::<_, Count>(&(), |summary| summary.has_focus_handles); cursor.next(&()); while let Some(item) = cursor.item() { if item.contains_focused(cx) { @@ -629,7 +629,7 @@ impl StateInner { offset_in_item: autoscroll_bounds.top() - item_origin.y, }); } else if autoscroll_bounds.bottom() > bounds.bottom() { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Count(item.index), Bias::Right, &()); let mut height = bounds.size.height - padding.top - padding.bottom; @@ -883,6 +883,10 @@ impl sum_tree::Item for ListItem { impl sum_tree::Summary for ListItemSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.count += summary.count; self.rendered_count += summary.rendered_count; @@ -893,12 +897,20 @@ impl sum_tree::Summary for ListItemSummary { } impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) { self.0 += summary.count; } } impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Height { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) { self.0 += summary.height; } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 76058ffd9b..43fe1565ac 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -722,7 +722,9 @@ impl Buffer { capability: Capability, ) -> Self { let saved_mtime = file.as_ref().and_then(|file| file.mtime()); - + let snapshot = buffer.snapshot(); + let git_diff = git::diff::BufferDiff::new(&snapshot); + let syntax_map = Mutex::new(SyntaxMap::new(&snapshot)); Self { saved_mtime, saved_version: buffer.version(), @@ -739,10 +741,10 @@ impl Buffer { }) .map(Rope::from), diff_base_version: 0, - git_diff: git::diff::BufferDiff::new(), + git_diff, file, capability, - syntax_map: Mutex::new(SyntaxMap::new()), + syntax_map, parsing_in_background: false, non_text_state_update_count: 0, sync_parse_timeout: Duration::from_millis(1), @@ -809,7 +811,7 @@ impl Buffer { /// Assign a language to the buffer. pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { self.non_text_state_update_count += 1; - self.syntax_map.lock().clear(); + self.syntax_map.lock().clear(&self.text); self.language = language; self.reparse(cx); cx.emit(BufferEvent::LanguageChanged); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index bc53778d97..c35659d9bb 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -15,7 +15,7 @@ use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; /// The diagnostics are stored in a [`SumTree`], which allows this struct /// to be cheaply copied, and allows for efficient retrieval of the /// diagnostics that intersect a given range of the buffer. -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct DiagnosticSet { diagnostics: SumTree>, } @@ -135,7 +135,7 @@ impl DiagnosticSet { { let end_bias = if inclusive { Bias::Right } else { Bias::Left }; let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); - let mut cursor = self.diagnostics.filter::<_, ()>({ + let mut cursor = self.diagnostics.filter::<_, ()>(buffer, { move |summary: &Summary| { let start_cmp = range.start.cmp(&summary.max_end, buffer); let end_cmp = range.end.cmp(&summary.min_start, buffer); @@ -261,6 +261,10 @@ impl Default for Summary { impl sum_tree::Summary for Summary { type Context = text::BufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.min_start.cmp(&self.min_start, buffer).is_lt() { self.min_start = other.min_start; diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 0cdc166570..daae54fb4d 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -18,13 +18,12 @@ use sum_tree::{Bias, SeekTarget, SumTree}; use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree}; -#[derive(Default)] pub struct SyntaxMap { snapshot: SyntaxSnapshot, language_registry: Option>, } -#[derive(Clone, Default)] +#[derive(Clone)] pub struct SyntaxSnapshot { layers: SumTree, parsed_version: clock::Global, @@ -212,8 +211,11 @@ struct ByteChunks<'a>(text::Chunks<'a>); pub(crate) struct QueryCursorHandle(Option); impl SyntaxMap { - pub fn new() -> Self { - Self::default() + pub fn new(text: &BufferSnapshot) -> Self { + Self { + snapshot: SyntaxSnapshot::new(text), + language_registry: None, + } } pub fn set_language_registry(&mut self, registry: Arc) { @@ -242,12 +244,21 @@ impl SyntaxMap { self.snapshot = snapshot; } - pub fn clear(&mut self) { - self.snapshot = SyntaxSnapshot::default(); + pub fn clear(&mut self, text: &BufferSnapshot) { + self.snapshot = SyntaxSnapshot::new(text); } } impl SyntaxSnapshot { + fn new(text: &BufferSnapshot) -> Self { + Self { + layers: SumTree::new(text), + parsed_version: clock::Global::default(), + interpolated_version: clock::Global::default(), + language_registry_version: 0, + } + } + pub fn is_empty(&self) -> bool { self.layers.is_empty() } @@ -262,10 +273,10 @@ impl SyntaxSnapshot { return; } - let mut layers = SumTree::new(); + let mut layers = SumTree::new(text); let mut first_edit_ix_for_depth = 0; let mut prev_depth = 0; - let mut cursor = self.layers.cursor::(); + let mut cursor = self.layers.cursor::(text); cursor.next(text); 'outer: loop { @@ -388,7 +399,7 @@ impl SyntaxSnapshot { let mut resolved_injection_ranges = Vec::new(); let mut cursor = self .layers - .filter::<_, ()>(|summary| summary.contains_unknown_injections); + .filter::<_, ()>(text, |summary| summary.contains_unknown_injections); cursor.next(text); while let Some(layer) = cursor.item() { let SyntaxLayerContent::Pending { language_name } = &layer.content else { @@ -430,9 +441,9 @@ impl SyntaxSnapshot { log::trace!("reparse. invalidated ranges:{:?}", invalidated_ranges); let max_depth = self.layers.summary().max_depth; - let mut cursor = self.layers.cursor::(); + let mut cursor = self.layers.cursor::(text); cursor.next(text); - let mut layers = SumTree::new(); + let mut layers = SumTree::new(text); let mut changed_regions = ChangeRegionSet::default(); let mut queue = BinaryHeap::new(); @@ -823,7 +834,7 @@ impl SyntaxSnapshot { let start = buffer.anchor_before(start_offset); let end = buffer.anchor_after(end_offset); - let mut cursor = self.layers.filter::<_, ()>(move |summary| { + let mut cursor = self.layers.filter::<_, ()>(buffer, move |summary| { if summary.max_depth > summary.min_depth { true } else { @@ -1666,6 +1677,10 @@ impl Default for SyntaxLayerSummary { impl sum_tree::Summary for SyntaxLayerSummary { type Context = BufferSnapshot; + fn zero(_cx: &BufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.max_depth > self.max_depth { self.max_depth = other.max_depth; diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index 6f42252da5..f6d27bcbd2 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -103,7 +103,7 @@ fn test_syntax_map_layers_for_range(cx: &mut AppContext) { .unindent(), ); - let mut syntax_map = SyntaxMap::new(); + let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); syntax_map.reparse(language.clone(), &buffer); @@ -202,7 +202,7 @@ fn test_dynamic_language_injection(cx: &mut AppContext) { .unindent(), ); - let mut syntax_map = SyntaxMap::new(); + let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); syntax_map.reparse(markdown.clone(), &buffer); syntax_map.reparse(markdown_inline.clone(), &buffer); @@ -897,11 +897,11 @@ fn test_random_edits( let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), text); - let mut syntax_map = SyntaxMap::new(); + let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); syntax_map.reparse(language.clone(), &buffer); - let mut reference_syntax_map = SyntaxMap::new(); + let mut reference_syntax_map = SyntaxMap::new(&buffer); reference_syntax_map.set_language_registry(registry.clone()); log::info!("initial text:\n{}", buffer.text()); @@ -918,7 +918,7 @@ fn test_random_edits( syntax_map.reparse(language.clone(), &buffer); - reference_syntax_map.clear(); + reference_syntax_map.clear(&buffer); reference_syntax_map.reparse(language.clone(), &buffer); } @@ -931,7 +931,7 @@ fn test_random_edits( syntax_map.interpolate(&buffer); syntax_map.reparse(language.clone(), &buffer); - reference_syntax_map.clear(); + reference_syntax_map.clear(&buffer); reference_syntax_map.reparse(language.clone(), &buffer); assert_eq!( syntax_map.layers(&buffer).len(), @@ -1082,7 +1082,7 @@ fn test_edit_sequence( .unwrap(); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), Default::default()); - let mut mutated_syntax_map = SyntaxMap::new(); + let mut mutated_syntax_map = SyntaxMap::new(&buffer); mutated_syntax_map.set_language_registry(registry.clone()); mutated_syntax_map.reparse(language.clone(), &buffer); @@ -1097,7 +1097,7 @@ fn test_edit_sequence( // Create a second syntax map from scratch log::info!("fresh parse {i}: {marked_string:?}"); - let mut reference_syntax_map = SyntaxMap::new(); + let mut reference_syntax_map = SyntaxMap::new(&buffer); reference_syntax_map.set_language_registry(registry.clone()); reference_syntax_map.reparse(language.clone(), &buffer); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 5b6eddd5b1..9dee092dea 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -561,7 +561,7 @@ impl MultiBuffer { } let mut buffer_edits: HashMap> = Default::default(); let mut edited_excerpt_ids = Vec::new(); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); for (ix, (range, new_text)) in edits.enumerate() { let new_text: Arc = new_text.into(); let original_indent_column = original_indent_columns.get(ix).copied().unwrap_or(0); @@ -841,7 +841,7 @@ impl MultiBuffer { let mut ranges = Vec::new(); let snapshot = self.read(cx); let buffers = self.buffers.borrow(); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { let Some(buffer_state) = buffers.get(buffer_id) else { @@ -957,7 +957,7 @@ impl MultiBuffer { let mut selections_by_buffer: HashMap>> = Default::default(); let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(); + let mut cursor = snapshot.excerpts.cursor::>(&()); for selection in selections { let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); @@ -1281,7 +1281,7 @@ impl MultiBuffer { let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); - let mut cursor = snapshot.excerpts.cursor::>(); + let mut cursor = snapshot.excerpts.cursor::>(&()); let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &()); prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); @@ -1388,7 +1388,7 @@ impl MultiBuffer { let mut excerpts = Vec::new(); let snapshot = self.read(cx); let buffers = self.buffers.borrow(); - let mut cursor = snapshot.excerpts.cursor::>(); + let mut cursor = snapshot.excerpts.cursor::>(&()); for locator in buffers .get(&buffer.read(cx).remote_id()) .map(|state| &state.excerpts) @@ -1432,7 +1432,7 @@ impl MultiBuffer { let snapshot = self.read(cx); let position = position.to_offset(&snapshot); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&position, Bias::Right, &()); cursor .item() @@ -1459,7 +1459,7 @@ impl MultiBuffer { ) -> Option<(Model, usize, ExcerptId)> { let snapshot = self.read(cx); let offset = point.to_offset(&snapshot); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -1482,7 +1482,7 @@ impl MultiBuffer { ) -> Option<(Model, Point, ExcerptId)> { let snapshot = self.read(cx); let point = point.to_point(&snapshot); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -1507,7 +1507,7 @@ impl MultiBuffer { let end = range.end.to_offset(&snapshot); let mut result = Vec::new(); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&start, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -1546,8 +1546,8 @@ impl MultiBuffer { let mut buffers = self.buffers.borrow_mut(); let mut snapshot = self.snapshot.borrow_mut(); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); let mut edits = Vec::new(); let mut excerpt_ids = ids.iter().copied().peekable(); @@ -1801,8 +1801,8 @@ impl MultiBuffer { let ids = ids.into_iter().collect::>(); let snapshot = self.snapshot(cx); let locators = snapshot.excerpt_locators_for_ids(ids.iter().copied()); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); let mut edits = Vec::>::new(); for locator in &locators { @@ -1927,8 +1927,8 @@ impl MultiBuffer { excerpts_to_edit.sort_unstable_by_key(|(locator, _, _)| *locator); let mut edits = Vec::new(); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); for (locator, buffer, buffer_edited) in excerpts_to_edit { new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); @@ -2230,7 +2230,7 @@ impl MultiBufferSnapshot { pub fn reversed_chars_at(&self, position: T) -> impl Iterator + '_ { let mut offset = position.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Left, &()); let mut excerpt_chunks = cursor.item().map(|excerpt| { let end_before_footer = cursor.start() + excerpt.text_summary.len; @@ -2357,7 +2357,7 @@ impl MultiBufferSnapshot { return buffer.clip_offset(offset, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); @@ -2376,7 +2376,7 @@ impl MultiBufferSnapshot { return buffer.clip_point(point, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&point, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt.range.context.start.to_point(&excerpt.buffer); @@ -2395,7 +2395,7 @@ impl MultiBufferSnapshot { return buffer.clip_offset_utf16(offset, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt.range.context.start.to_offset_utf16(&excerpt.buffer); @@ -2414,7 +2414,7 @@ impl MultiBufferSnapshot { return buffer.clip_point_utf16(point, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&point.0, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt @@ -2432,7 +2432,7 @@ impl MultiBufferSnapshot { pub fn bytes_in_range(&self, range: Range) -> MultiBufferBytes { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpts = self.excerpts.cursor::(); + let mut excerpts = self.excerpts.cursor::(&()); excerpts.seek(&range.start, Bias::Right, &()); let mut chunk = &[][..]; @@ -2457,7 +2457,7 @@ impl MultiBufferSnapshot { range: Range, ) -> ReversedMultiBufferBytes { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpts = self.excerpts.cursor::(); + let mut excerpts = self.excerpts.cursor::(&()); excerpts.seek(&range.end, Bias::Left, &()); let mut chunk = &[][..]; @@ -2482,7 +2482,7 @@ impl MultiBufferSnapshot { pub fn buffer_rows(&self, start_row: MultiBufferRow) -> MultiBufferRows { let mut result = MultiBufferRows { buffer_row_range: 0..0, - excerpts: self.excerpts.cursor(), + excerpts: self.excerpts.cursor(&()), }; result.seek(start_row); result @@ -2492,7 +2492,7 @@ impl MultiBufferSnapshot { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut chunks = MultiBufferChunks { range: range.clone(), - excerpts: self.excerpts.cursor(), + excerpts: self.excerpts.cursor(&()), excerpt_chunks: None, language_aware, }; @@ -2505,7 +2505,7 @@ impl MultiBufferSnapshot { return buffer.offset_to_point(offset); } - let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); @@ -2526,7 +2526,7 @@ impl MultiBufferSnapshot { return buffer.offset_to_point_utf16(offset); } - let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>(); + let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); @@ -2547,7 +2547,7 @@ impl MultiBufferSnapshot { return buffer.point_to_point_utf16(point); } - let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>(); + let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); @@ -2569,7 +2569,7 @@ impl MultiBufferSnapshot { return buffer.point_to_offset(point); } - let mut cursor = self.excerpts.cursor::<(Point, usize)>(); + let mut cursor = self.excerpts.cursor::<(Point, usize)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_point, start_offset) = cursor.start(); @@ -2590,7 +2590,7 @@ impl MultiBufferSnapshot { return buffer.offset_utf16_to_offset(offset_utf16); } - let mut cursor = self.excerpts.cursor::<(OffsetUtf16, usize)>(); + let mut cursor = self.excerpts.cursor::<(OffsetUtf16, usize)>(&()); cursor.seek(&offset_utf16, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset_utf16, start_offset) = cursor.start(); @@ -2612,7 +2612,7 @@ impl MultiBufferSnapshot { return buffer.offset_to_offset_utf16(offset); } - let mut cursor = self.excerpts.cursor::<(usize, OffsetUtf16)>(); + let mut cursor = self.excerpts.cursor::<(usize, OffsetUtf16)>(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_offset_utf16) = cursor.start(); @@ -2636,7 +2636,7 @@ impl MultiBufferSnapshot { return buffer.point_utf16_to_offset(point); } - let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>(); + let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_point, start_offset) = cursor.start(); @@ -2659,7 +2659,7 @@ impl MultiBufferSnapshot { point: T, ) -> Option<(&BufferSnapshot, usize)> { let offset = point.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -2680,7 +2680,7 @@ impl MultiBufferSnapshot { let mut result = BTreeMap::new(); let mut rows_for_excerpt = Vec::new(); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let mut rows = rows.into_iter().peekable(); let mut prev_row = u32::MAX; let mut prev_language_indent_size = IndentSize::default(); @@ -2769,7 +2769,7 @@ impl MultiBufferSnapshot { &self, row: MultiBufferRow, ) -> Option<(&BufferSnapshot, Range)> { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let point = Point::new(row.0, 0); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() && *cursor.start() == point { @@ -2803,9 +2803,9 @@ impl MultiBufferSnapshot { D: TextDimension, O: ToOffset, { - let mut summary = D::default(); + let mut summary = D::zero(&()); let mut range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&range.start, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let mut end_before_newline = cursor.end(&()); @@ -2856,7 +2856,7 @@ impl MultiBufferSnapshot { where D: TextDimension + Ord + Sub, { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let locator = self.excerpt_locator_for_id(anchor.excerpt_id); cursor.seek(locator, Bias::Left, &()); @@ -2894,7 +2894,7 @@ impl MultiBufferSnapshot { } let mut anchors = anchors.into_iter().peekable(); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { let excerpt_id = anchor.excerpt_id; @@ -2949,7 +2949,7 @@ impl MultiBufferSnapshot { I: 'a + IntoIterator, { let mut anchors = anchors.into_iter().enumerate().peekable(); - let mut cursor = self.excerpts.cursor::>(); + let mut cursor = self.excerpts.cursor::>(&()); cursor.next(&()); let mut result = Vec::new(); @@ -3064,7 +3064,7 @@ impl MultiBufferSnapshot { }; } - let mut cursor = self.excerpts.cursor::<(usize, Option)>(); + let mut cursor = self.excerpts.cursor::<(usize, Option)>(&()); cursor.seek(&offset, Bias::Right, &()); if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left { cursor.prev(&()); @@ -3099,7 +3099,7 @@ impl MultiBufferSnapshot { text_anchor: text::Anchor, ) -> Option { let locator = self.excerpt_locator_for_id(excerpt_id); - let mut cursor = self.excerpts.cursor::>(); + let mut cursor = self.excerpts.cursor::>(&()); cursor.seek(locator, Bias::Left, &()); if let Some(excerpt) = cursor.item() { if excerpt.id == excerpt_id { @@ -3139,7 +3139,7 @@ impl MultiBufferSnapshot { ) -> impl Iterator + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&range.start, Bias::Right, &()); cursor.prev(&()); @@ -3183,7 +3183,7 @@ impl MultiBufferSnapshot { }; let bounds = (start, end); - let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.seek(&start_offset, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -3550,7 +3550,7 @@ impl MultiBufferSnapshot { &self, row_range: Range, ) -> impl Iterator> + '_ { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &()); if cursor.item().is_none() { @@ -3617,7 +3617,7 @@ impl MultiBufferSnapshot { &self, row_range: Range, ) -> impl Iterator> + '_ { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &()); @@ -3779,7 +3779,7 @@ impl MultiBufferSnapshot { } else if id == ExcerptId::max() { Locator::max_ref() } else { - let mut cursor = self.excerpt_ids.cursor::(); + let mut cursor = self.excerpt_ids.cursor::(&()); cursor.seek(&id, Bias::Left, &()); if let Some(entry) = cursor.item() { if entry.id == id { @@ -3814,7 +3814,7 @@ impl MultiBufferSnapshot { } } - let mut cursor = self.excerpt_ids.cursor::(); + let mut cursor = self.excerpt_ids.cursor::(&()); for id in sorted_ids { if cursor.seek_forward(&id, Bias::Left, &()) { locators.push(cursor.item().unwrap().locator.clone()); @@ -3839,7 +3839,7 @@ impl MultiBufferSnapshot { &'a self, excerpt_id: ExcerptId, ) -> Option> { - let mut cursor = self.excerpts.cursor::<(Option<&Locator>, T)>(); + let mut cursor = self.excerpts.cursor::<(Option<&Locator>, T)>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); if cursor.seek(&Some(locator), Bias::Left, &()) { let start = cursor.start().1.clone(); @@ -3851,7 +3851,7 @@ impl MultiBufferSnapshot { } fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { - let mut cursor = self.excerpts.cursor::>(); + let mut cursor = self.excerpts.cursor::>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); cursor.seek(&Some(locator), Bias::Left, &()); if let Some(excerpt) = cursor.item() { @@ -3866,7 +3866,7 @@ impl MultiBufferSnapshot { pub fn excerpt_containing(&self, range: Range) -> Option { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&range.start, Bias::Right, &()); let start_excerpt = cursor.item()?; @@ -3891,7 +3891,7 @@ impl MultiBufferSnapshot { I: IntoIterator> + 'a, { let mut ranges = ranges.into_iter().map(|range| range.to_offset(self)); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.next(&()); let mut current_range = ranges.next(); iter::from_fn(move || { @@ -3943,7 +3943,7 @@ impl MultiBufferSnapshot { ranges: impl IntoIterator>, ) -> impl Iterator)> { let mut ranges = ranges.into_iter().map(|range| range.to_offset(self)); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.next(&()); let mut current_range = ranges.next(); iter::from_fn(move || { @@ -3980,7 +3980,7 @@ impl MultiBufferSnapshot { range: &'a Range, include_local: bool, ) -> impl 'a + Iterator)> { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); cursor.seek(start_locator, Bias::Left, &()); @@ -4519,6 +4519,10 @@ impl sum_tree::KeyedItem for ExcerptIdMapping { impl sum_tree::Summary for ExcerptId { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { *self = *other; } @@ -4527,6 +4531,10 @@ impl sum_tree::Summary for ExcerptId { impl sum_tree::Summary for ExcerptSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { debug_assert!(summary.excerpt_locator > self.excerpt_locator); self.excerpt_locator = summary.excerpt_locator.clone(); @@ -4536,12 +4544,20 @@ impl sum_tree::Summary for ExcerptSummary { } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for TextSummary { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += &summary.text; } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.len; } @@ -4566,30 +4582,50 @@ impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Locator { } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for OffsetUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.len_utf16; } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines; } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines_utf16() } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self = Some(&summary.excerpt_locator); } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self = Some(summary.excerpt_id); } diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index e01b99d472..48fcb5dfbb 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -137,13 +137,12 @@ impl NotificationStore { return None; } let ix = count - 1 - ix; - let mut cursor = self.notifications.cursor::(); + let mut cursor = self.notifications.cursor::(&()); cursor.seek(&Count(ix), Bias::Right, &()); cursor.item() } - pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> { - let mut cursor = self.notifications.cursor::(); + let mut cursor = self.notifications.cursor::(&()); cursor.seek(&NotificationId(id), Bias::Left, &()); if let Some(item) = cursor.item() { if item.id == id { @@ -372,8 +371,8 @@ impl NotificationStore { is_new: bool, cx: &mut ModelContext<'_, NotificationStore>, ) { - let mut cursor = self.notifications.cursor::<(NotificationId, Count)>(); - let mut new_notifications = SumTree::new(); + let mut cursor = self.notifications.cursor::<(NotificationId, Count)>(&()); + let mut new_notifications = SumTree::default(); let mut old_range = 0..0; for (i, (id, new_notification)) in notifications.into_iter().enumerate() { @@ -468,6 +467,10 @@ impl sum_tree::Item for NotificationEntry { impl sum_tree::Summary for NotificationSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.max_id = self.max_id.max(summary.max_id); self.count += summary.count; @@ -476,6 +479,10 @@ impl sum_tree::Summary for NotificationSummary { } impl<'a> sum_tree::Dimension<'a, NotificationSummary> for NotificationId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &NotificationSummary, _: &()) { debug_assert!(summary.max_id > self.0); self.0 = summary.max_id; @@ -483,6 +490,10 @@ impl<'a> sum_tree::Dimension<'a, NotificationSummary> for NotificationId { } impl<'a> sum_tree::Dimension<'a, NotificationSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &NotificationSummary, _: &()) { self.0 += summary.count; } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 35eb20259c..24852afd70 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2753,7 +2753,7 @@ impl LspStore { if let Some(language) = buffer.language().cloned() { for adapter in self.languages.lsp_adapters(&language.name()) { if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) { - buffer.update_diagnostics(*server_id, Default::default(), cx); + buffer.update_diagnostics(*server_id, DiagnosticSet::new([], buffer), cx); } } } @@ -5149,7 +5149,11 @@ impl LspStore { self.buffer_store.update(cx, |buffer_store, cx| { for buffer in buffer_store.buffers() { buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(server_id, Default::default(), cx); + buffer.update_diagnostics( + server_id, + DiagnosticSet::new([], buffer), + cx, + ); }); } }); diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 00ffbfa9eb..56fe7fc054 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -36,7 +36,7 @@ impl Rope { } pub fn append(&mut self, rope: Rope) { - let mut chunks = rope.chunks.cursor::<()>(); + let mut chunks = rope.chunks.cursor::<()>(&()); chunks.next(&()); if let Some(chunk) = chunks.item() { if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE) @@ -175,7 +175,7 @@ impl Rope { { // Ensure all chunks except maybe the last one are not underflowing. // Allow some wiggle room for multibyte characters at chunk boundaries. - let mut chunks = self.chunks.cursor::<()>().peekable(); + let mut chunks = self.chunks.cursor::<()>(&()).peekable(); while let Some(chunk) = chunks.next() { if chunks.peek().is_some() { assert!(chunk.0.len() + 3 >= CHUNK_BASE); @@ -245,7 +245,7 @@ impl Rope { if offset >= self.summary().len { return self.summary().len_utf16; } - let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(); + let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -258,7 +258,7 @@ impl Rope { if offset >= self.summary().len_utf16 { return self.summary().len; } - let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(); + let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -271,7 +271,7 @@ impl Rope { if offset >= self.summary().len { return self.summary().lines; } - let mut cursor = self.chunks.cursor::<(usize, Point)>(); + let mut cursor = self.chunks.cursor::<(usize, Point)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -284,7 +284,7 @@ impl Rope { if offset >= self.summary().len { return self.summary().lines_utf16(); } - let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(); + let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -297,7 +297,7 @@ impl Rope { if point >= self.summary().lines { return self.summary().lines_utf16(); } - let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(); + let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&()); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 @@ -310,7 +310,7 @@ impl Rope { if point >= self.summary().lines { return self.summary().len; } - let mut cursor = self.chunks.cursor::<(Point, usize)>(); + let mut cursor = self.chunks.cursor::<(Point, usize)>(&()); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 @@ -331,7 +331,7 @@ impl Rope { if point >= self.summary().lines_utf16() { return self.summary().len; } - let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(); + let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&()); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 @@ -344,7 +344,7 @@ impl Rope { if point.0 >= self.summary().lines_utf16() { return self.summary().lines; } - let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(); + let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&()); cursor.seek(&point.0, Bias::Left, &()); let overshoot = Unclipped(point.0 - cursor.start().0); cursor.start().1 @@ -354,7 +354,7 @@ impl Rope { } pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&offset, Bias::Left, &()); if let Some(chunk) = cursor.item() { let mut ix = offset - cursor.start(); @@ -377,7 +377,7 @@ impl Rope { } pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = offset - cursor.start(); @@ -388,7 +388,7 @@ impl Rope { } pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = point - cursor.start(); @@ -399,7 +399,7 @@ impl Rope { } pub fn clip_point_utf16(&self, point: Unclipped, bias: Bias) -> PointUtf16 { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&point.0, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = Unclipped(point.0 - cursor.start()); @@ -472,7 +472,7 @@ pub struct Cursor<'a> { impl<'a> Cursor<'a> { pub fn new(rope: &'a Rope, offset: usize) -> Self { - let mut chunks = rope.chunks.cursor(); + let mut chunks = rope.chunks.cursor(&()); chunks.seek(&offset, Bias::Right, &()); Self { rope, @@ -521,7 +521,7 @@ impl<'a> Cursor<'a> { pub fn summary(&mut self, end_offset: usize) -> D { debug_assert!(end_offset >= self.offset); - let mut summary = D::default(); + let mut summary = D::zero(&()); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); @@ -563,7 +563,7 @@ pub struct Chunks<'a> { impl<'a> Chunks<'a> { pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { - let mut chunks = rope.chunks.cursor(); + let mut chunks = rope.chunks.cursor(&()); let offset = if reversed { chunks.seek(&range.end, Bias::Left, &()); range.end @@ -774,7 +774,7 @@ pub struct Bytes<'a> { impl<'a> Bytes<'a> { pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { - let mut chunks = rope.chunks.cursor(); + let mut chunks = rope.chunks.cursor(&()); if reversed { chunks.seek(&range.end, Bias::Left, &()); } else { @@ -1180,6 +1180,10 @@ impl<'a> From<&'a str> for ChunkSummary { impl sum_tree::Summary for ChunkSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.text += &summary.text; } @@ -1263,6 +1267,10 @@ impl<'a> From<&'a str> for TextSummary { impl sum_tree::Summary for TextSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &Self::Context) { *self += summary; } @@ -1333,6 +1341,10 @@ impl TextDimension for (D1, D2) { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for TextSummary { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += &summary.text; } @@ -1349,6 +1361,10 @@ impl TextDimension for TextSummary { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.len; } @@ -1365,6 +1381,10 @@ impl TextDimension for usize { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for OffsetUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.len_utf16; } @@ -1381,6 +1401,10 @@ impl TextDimension for OffsetUtf16 { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.lines; } @@ -1397,6 +1421,10 @@ impl TextDimension for Point { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for PointUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.lines_utf16(); } @@ -1890,7 +1918,7 @@ mod tests { impl Rope { fn text(&self) -> String { let mut text = String::new(); - for chunk in self.chunks.cursor::<()>() { + for chunk in self.chunks.cursor::<()>(&()) { text.push_str(&chunk.0); } text diff --git a/crates/rope/src/unclipped.rs b/crates/rope/src/unclipped.rs index 937cbca053..b3427e2cb9 100644 --- a/crates/rope/src/unclipped.rs +++ b/crates/rope/src/unclipped.rs @@ -13,6 +13,10 @@ impl From for Unclipped { impl<'a, T: sum_tree::Dimension<'a, ChunkSummary>> sum_tree::Dimension<'a, ChunkSummary> for Unclipped { + fn zero(_: &()) -> Self { + Self(T::zero(&())) + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { self.0.add_summary(summary, &()); } diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 452930f942..6da43a8de5 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -28,21 +28,21 @@ where T: Item, D: Dimension<'a, T::Summary>, { - pub fn new(tree: &'a SumTree) -> Self { + pub fn new(tree: &'a SumTree, cx: &::Context) -> Self { Self { tree, stack: ArrayVec::new(), - position: D::default(), + position: D::zero(cx), did_seek: false, at_end: tree.is_empty(), } } - fn reset(&mut self) { + fn reset(&mut self, cx: &::Context) { self.did_seek = false; self.at_end = self.tree.is_empty(); self.stack.truncate(0); - self.position = D::default(); + self.position = D::zero(cx); } pub fn start(&self) -> &D { @@ -192,7 +192,7 @@ where } if self.at_end { - self.position = D::default(); + self.position = D::zero(cx); self.at_end = self.tree.is_empty(); if !self.tree.is_empty() { self.stack.push(StackEntry { @@ -208,7 +208,7 @@ where if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) { self.position = position.clone(); } else { - self.position = D::default(); + self.position = D::zero(cx); } let entry = self.stack.last_mut().unwrap(); @@ -232,7 +232,7 @@ where if descending { let tree = &child_trees[entry.index]; self.stack.push(StackEntry { - position: D::default(), + position: D::zero(cx), tree, index: tree.0.child_summaries().len() - 1, }) @@ -264,7 +264,7 @@ where self.stack.push(StackEntry { tree: self.tree, index: 0, - position: D::default(), + position: D::zero(cx), }); descend = true; } @@ -364,7 +364,7 @@ where where Target: SeekTarget<'a, T::Summary, D>, { - self.reset(); + self.reset(cx); self.seek_internal(pos, bias, &mut (), cx) } @@ -392,10 +392,10 @@ where Target: SeekTarget<'a, T::Summary, D>, { let mut slice = SliceSeekAggregate { - tree: SumTree::new(), + tree: SumTree::new(cx), leaf_items: ArrayVec::new(), leaf_item_summaries: ArrayVec::new(), - leaf_summary: T::Summary::default(), + leaf_summary: ::zero(cx), }; self.seek_internal(end, bias, &mut slice, cx); slice.tree @@ -417,7 +417,7 @@ where Target: SeekTarget<'a, T::Summary, D>, Output: Dimension<'a, T::Summary>, { - let mut summary = SummarySeekAggregate(Output::default()); + let mut summary = SummarySeekAggregate(Output::zero(cx)); self.seek_internal(end, bias, &mut summary, cx); summary.0 } @@ -443,7 +443,7 @@ where self.stack.push(StackEntry { tree: self.tree, index: 0, - position: Default::default(), + position: D::zero(cx), }); } @@ -633,8 +633,12 @@ where T: Item, D: Dimension<'a, T::Summary>, { - pub fn new(tree: &'a SumTree, filter_node: F) -> Self { - let cursor = tree.cursor::(); + pub fn new( + tree: &'a SumTree, + cx: &::Context, + filter_node: F, + ) -> Self { + let cursor = tree.cursor::(cx); Self { cursor, filter_node, @@ -727,7 +731,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate { fn end_leaf(&mut self, cx: &::Context) { self.tree.append( SumTree(Arc::new(Node::Leaf { - summary: mem::take(&mut self.leaf_summary), + summary: mem::replace(&mut self.leaf_summary, ::zero(cx)), items: mem::take(&mut self.leaf_items), item_summaries: mem::take(&mut self.leaf_item_summaries), })), diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8a8027408f..ca351d67ce 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -34,9 +34,11 @@ pub trait KeyedItem: Item { /// /// Each Summary type can have multiple [`Dimensions`] that it measures, /// which can be used to navigate the tree -pub trait Summary: Default + Clone + fmt::Debug { +pub trait Summary: Clone + fmt::Debug { type Context; + fn zero(cx: &Self::Context) -> Self; + fn add_summary(&mut self, summary: &Self, cx: &Self::Context); } @@ -47,17 +49,23 @@ pub trait Summary: Default + Clone + fmt::Debug { /// # Example: /// Zed's rope has a `TextSummary` type that summarizes lines, characters, and bytes. /// Each of these are different dimensions we may want to seek to -pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug + Default { - fn add_summary(&mut self, _summary: &'a S, _: &S::Context); +pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug { + fn zero(cx: &S::Context) -> Self; + + fn add_summary(&mut self, summary: &'a S, cx: &S::Context); fn from_summary(summary: &'a S, cx: &S::Context) -> Self { - let mut dimension = Self::default(); + let mut dimension = Self::zero(cx); dimension.add_summary(summary, cx); dimension } } impl<'a, T: Summary> Dimension<'a, T> for T { + fn zero(cx: &T::Context) -> Self { + Summary::zero(cx) + } + fn add_summary(&mut self, summary: &'a T, cx: &T::Context) { Summary::add_summary(self, summary, cx); } @@ -74,10 +82,18 @@ impl<'a, S: Summary, D: Dimension<'a, S> + Ord> SeekTarget<'a, S, D> for D { } impl<'a, T: Summary> Dimension<'a, T> for () { + fn zero(_: &T::Context) -> Self { + () + } + fn add_summary(&mut self, _: &'a T, _: &T::Context) {} } impl<'a, T: Summary, D1: Dimension<'a, T>, D2: Dimension<'a, T>> Dimension<'a, T> for (D1, D2) { + fn zero(cx: &T::Context) -> Self { + (D1::zero(cx), D2::zero(cx)) + } + fn add_summary(&mut self, summary: &'a T, cx: &T::Context) { self.0.add_summary(summary, cx); self.1.add_summary(summary, cx); @@ -161,16 +177,16 @@ impl Bias { pub struct SumTree(Arc>); impl SumTree { - pub fn new() -> Self { + pub fn new(cx: &::Context) -> Self { SumTree(Arc::new(Node::Leaf { - summary: T::Summary::default(), + summary: ::zero(cx), items: ArrayVec::new(), item_summaries: ArrayVec::new(), })) } pub fn from_item(item: T, cx: &::Context) -> Self { - let mut tree = Self::new(); + let mut tree = Self::new(cx); tree.push(item, cx); tree } @@ -206,7 +222,7 @@ impl SumTree { let mut current_parent_node = None; for child_node in nodes.drain(..) { let parent_node = current_parent_node.get_or_insert_with(|| Node::Internal { - summary: T::Summary::default(), + summary: ::zero(cx), height, child_summaries: ArrayVec::new(), child_trees: ArrayVec::new(), @@ -234,7 +250,7 @@ impl SumTree { } if nodes.is_empty() { - Self::new() + Self::new(cx) } else { debug_assert_eq!(nodes.len(), 1); Self(Arc::new(nodes.pop().unwrap())) @@ -296,7 +312,7 @@ impl SumTree { } if nodes.is_empty() { - Self::new() + Self::new(cx) } else { debug_assert_eq!(nodes.len(), 1); nodes.pop().unwrap() @@ -306,7 +322,7 @@ impl SumTree { #[allow(unused)] pub fn items(&self, cx: &::Context) -> Vec { let mut items = Vec::new(); - let mut cursor = self.cursor::<()>(); + let mut cursor = self.cursor::<()>(cx); cursor.next(cx); while let Some(item) = cursor.item() { items.push(item.clone()); @@ -319,21 +335,25 @@ impl SumTree { Iter::new(self) } - pub fn cursor<'a, S>(&'a self) -> Cursor + pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor where S: Dimension<'a, T::Summary>, { - Cursor::new(self) + Cursor::new(self, cx) } /// Note: If the summary type requires a non `()` context, then the filter cursor /// that is returned cannot be used with Rust's iterators. - pub fn filter<'a, F, U>(&'a self, filter_node: F) -> FilterCursor + pub fn filter<'a, F, U>( + &'a self, + cx: &::Context, + filter_node: F, + ) -> FilterCursor where F: FnMut(&T::Summary) -> bool, U: Dimension<'a, T::Summary>, { - FilterCursor::new(self, filter_node) + FilterCursor::new(self, cx, filter_node) } #[allow(dead_code)] @@ -389,7 +409,7 @@ impl SumTree { &'a self, cx: &::Context, ) -> D { - let mut extent = D::default(); + let mut extent = D::zero(cx); match self.0.as_ref() { Node::Internal { summary, .. } | Node::Leaf { summary, .. } => { extent.add_summary(summary, cx); @@ -636,7 +656,7 @@ impl SumTree { ) -> Option { let mut replaced = None; *self = { - let mut cursor = self.cursor::(); + let mut cursor = self.cursor::(cx); let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx); if let Some(cursor_item) = cursor.item() { if cursor_item.key() == item.key() { @@ -654,7 +674,7 @@ impl SumTree { pub fn remove(&mut self, key: &T::Key, cx: &::Context) -> Option { let mut removed = None; *self = { - let mut cursor = self.cursor::(); + let mut cursor = self.cursor::(cx); let mut new_tree = cursor.slice(key, Bias::Left, cx); if let Some(item) = cursor.item() { if item.key() == *key { @@ -681,11 +701,11 @@ impl SumTree { edits.sort_unstable_by_key(|item| item.key()); *self = { - let mut cursor = self.cursor::(); - let mut new_tree = SumTree::new(); + let mut cursor = self.cursor::(cx); + let mut new_tree = SumTree::new(cx); let mut buffered_items = Vec::new(); - cursor.seek(&T::Key::default(), Bias::Left, cx); + cursor.seek(&T::Key::zero(cx), Bias::Left, cx); for edit in edits { let new_key = edit.key(); let mut old_item = cursor.item(); @@ -724,7 +744,7 @@ impl SumTree { } pub fn get(&self, key: &T::Key, cx: &::Context) -> Option<&T> { - let mut cursor = self.cursor::(); + let mut cursor = self.cursor::(cx); if cursor.seek(key, Bias::Left, cx) { cursor.item() } else { @@ -733,9 +753,13 @@ impl SumTree { } } -impl Default for SumTree { +impl Default for SumTree +where + T: Item, + S: Summary, +{ fn default() -> Self { - Self::new() + Self::new(&()) } } @@ -824,7 +848,7 @@ where T: 'a + Summary, I: Iterator, { - let mut sum = T::default(); + let mut sum = T::zero(cx); for value in iter { sum.add_summary(value, cx); } @@ -846,10 +870,10 @@ mod tests { #[test] fn test_extend_and_push_tree() { - let mut tree1 = SumTree::new(); + let mut tree1 = SumTree::default(); tree1.extend(0..20, &()); - let mut tree2 = SumTree::new(); + let mut tree2 = SumTree::default(); tree2.extend(50..100, &()); tree1.append(tree2, &()); @@ -877,7 +901,7 @@ mod tests { let mut rng = StdRng::seed_from_u64(seed); let rng = &mut rng; - let mut tree = SumTree::::new(); + let mut tree = SumTree::::default(); let count = rng.gen_range(0..10); if rng.gen() { tree.extend(rng.sample_iter(distributions::Standard).take(count), &()); @@ -903,7 +927,7 @@ mod tests { reference_items.splice(splice_start..splice_end, new_items.clone()); tree = { - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); if rng.gen() { new_tree.extend(new_items, &()); @@ -918,12 +942,13 @@ mod tests { assert_eq!(tree.items(&()), reference_items); assert_eq!( tree.iter().collect::>(), - tree.cursor::<()>().collect::>() + tree.cursor::<()>(&()).collect::>() ); log::info!("tree items: {:?}", tree.items(&())); - let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even); + let mut filter_cursor = + tree.filter::<_, Count>(&(), |summary| summary.contains_even); let expected_filtered_items = tree .items(&()) .into_iter() @@ -964,7 +989,7 @@ mod tests { assert_eq!(filter_cursor.item(), None); let mut before_start = false; - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); let start_pos = rng.gen_range(0..=reference_items.len()); cursor.seek(&Count(start_pos), Bias::Right, &()); let mut pos = rng.gen_range(start_pos..=reference_items.len()); @@ -1015,7 +1040,7 @@ mod tests { let start_bias = if rng.gen() { Bias::Left } else { Bias::Right }; let end_bias = if rng.gen() { Bias::Left } else { Bias::Right }; - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); cursor.seek(&Count(start), start_bias, &()); let slice = cursor.slice(&Count(end), end_bias, &()); @@ -1030,8 +1055,8 @@ mod tests { #[test] fn test_cursor() { // Empty tree - let tree = SumTree::::new(); - let mut cursor = tree.cursor::(); + let tree = SumTree::::default(); + let mut cursor = tree.cursor::(&()); assert_eq!( cursor.slice(&Count(0), Bias::Right, &()).items(&()), Vec::::new() @@ -1052,9 +1077,9 @@ mod tests { assert_eq!(cursor.start().sum, 0); // Single-element tree - let mut tree = SumTree::::new(); + let mut tree = SumTree::::default(); tree.extend(vec![1], &()); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!( cursor.slice(&Count(0), Bias::Right, &()).items(&()), Vec::::new() @@ -1076,7 +1101,7 @@ mod tests { assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 0); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); @@ -1096,9 +1121,9 @@ mod tests { assert_eq!(cursor.start().sum, 1); // Multiple-element tree - let mut tree = SumTree::new(); + let mut tree = SumTree::default(); tree.extend(vec![1, 2, 3, 4, 5, 6], &()); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]); assert_eq!(cursor.item(), Some(&3)); @@ -1179,7 +1204,7 @@ mod tests { assert_eq!(cursor.next_item(), Some(&2)); assert_eq!(cursor.start().sum, 0); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!( cursor .slice(&tree.extent::(&()), Bias::Right, &()) @@ -1227,7 +1252,7 @@ mod tests { #[test] fn test_edit() { - let mut tree = SumTree::::new(); + let mut tree = SumTree::::default(); let removed = tree.edit(vec![Edit::Insert(1), Edit::Insert(2), Edit::Insert(0)], &()); assert_eq!(tree.items(&()), vec![0, 1, 2]); @@ -1305,6 +1330,10 @@ mod tests { impl Summary for IntegersSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.count += other.count; self.sum += other.sum; @@ -1314,12 +1343,20 @@ mod tests { } impl<'a> Dimension<'a, IntegersSummary> for u8 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { *self = summary.max; } } impl<'a> Dimension<'a, IntegersSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { self.0 += summary.count; } @@ -1332,6 +1369,10 @@ mod tests { } impl<'a> Dimension<'a, IntegersSummary> for Sum { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { self.0 += summary.sum; } diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 53bb0a807c..72465b1a99 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -53,7 +53,7 @@ impl TreeMap { } pub fn get(&self, key: &K) -> Option<&V> { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &()); if let Some(item) = cursor.item() { if Some(key) == item.key().0.as_ref() { @@ -72,7 +72,7 @@ impl TreeMap { pub fn remove(&mut self, key: &K) -> Option { let mut removed = None; - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); let mut new_tree = cursor.slice(&key, Bias::Left, &()); if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { @@ -88,7 +88,7 @@ impl TreeMap { pub fn remove_range(&mut self, start: &impl MapSeekTarget, end: &impl MapSeekTarget) { let start = MapSeekTargetAdaptor(start); let end = MapSeekTargetAdaptor(end); - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let mut new_tree = cursor.slice(&start, Bias::Left, &()); cursor.seek(&end, Bias::Left, &()); new_tree.append(cursor.suffix(&()), &()); @@ -98,7 +98,7 @@ impl TreeMap { /// Returns the key-value pair with the greatest key less than or equal to the given key. pub fn closest(&self, key: &K) -> Option<(&K, &V)> { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); cursor.seek(&key, Bias::Right, &()); cursor.prev(&()); @@ -106,7 +106,7 @@ impl TreeMap { } pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + '_ { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let from_key = MapKeyRef(Some(from)); cursor.seek(&from_key, Bias::Left, &()); @@ -117,7 +117,7 @@ impl TreeMap { where F: FnOnce(&mut V) -> T, { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); let mut new_tree = cursor.slice(&key, Bias::Left, &()); let mut result = None; @@ -136,7 +136,7 @@ impl TreeMap { pub fn retain bool>(&mut self, mut predicate: F) { let mut new_map = SumTree::>::default(); - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); cursor.next(&()); while let Some(item) = cursor.item() { if predicate(&item.key, &item.value) { @@ -247,6 +247,10 @@ where { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { *self = summary.clone() } @@ -256,6 +260,10 @@ impl<'a, K> Dimension<'a, MapKey> for MapKeyRef<'a, K> where K: Clone + Debug + Ord, { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a MapKey, _: &()) { self.0 = summary.0.as_ref(); } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index ee833326f5..3bc5889cae 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -100,7 +100,7 @@ impl Anchor { false } else { let fragment_id = buffer.fragment_id_for_anchor(self); - let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None); fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None); fragment_cursor .item() diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 83d57016c5..7afc16f581 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -85,6 +85,10 @@ impl sum_tree::KeyedItem for Locator { impl sum_tree::Summary for Locator { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.assign(summary); } diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index 063f050665..c7964f6267 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -34,7 +34,7 @@ impl Default for OperationQueue { impl OperationQueue { pub fn new() -> Self { - OperationQueue(SumTree::new()) + OperationQueue(SumTree::default()) } pub fn len(&self) -> usize { @@ -58,7 +58,7 @@ impl OperationQueue { pub fn drain(&mut self) -> Self { let clone = self.clone(); - self.0 = SumTree::new(); + self.0 = SumTree::default(); clone } @@ -70,6 +70,10 @@ impl OperationQueue { impl Summary for OperationSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { assert!(self.key < other.key); self.key = other.key; @@ -90,6 +94,10 @@ impl<'a> Add<&'a Self> for OperationSummary { } impl<'a> Dimension<'a, OperationSummary> for OperationKey { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &OperationSummary, _: &()) { assert!(*self <= summary.key); *self = summary.key; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b17748c6d0..9630ec5b80 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -492,6 +492,10 @@ struct FragmentTextSummary { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { self.visible += summary.text.visible; self.deleted += summary.text.deleted; @@ -654,8 +658,8 @@ impl Buffer { normalized: Rope, ) -> Buffer { let history = History::new(normalized); - let mut fragments = SumTree::new(); - let mut insertions = SumTree::new(); + let mut fragments = SumTree::new(&None); + let mut insertions = SumTree::default(); let mut lamport_clock = clock::Lamport::new(replica_id); let mut version = clock::Global::new(); @@ -772,7 +776,7 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::(); + let mut old_fragments = self.fragments.cursor::(&None); let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None); new_ropes.append(new_fragments.summary().text); @@ -992,7 +996,7 @@ impl Buffer { let mut insertion_offset = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(); + let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx); let mut new_fragments = old_fragments.slice( &VersionedFullOffset::Offset(ranges[0].start), Bias::Left, @@ -1185,7 +1189,7 @@ impl Buffer { // Get all of the fragments corresponding to these insertion slices. let mut fragment_ids = Vec::new(); - let mut insertions_cursor = self.insertions.cursor::(); + let mut insertions_cursor = self.insertions.cursor::(&()); for insertion_slice in &insertion_slices { if insertion_slice.insertion_id != insertions_cursor.start().timestamp || insertion_slice.range.start > insertions_cursor.start().split_offset @@ -1217,8 +1221,8 @@ impl Buffer { self.snapshot.undo_map.insert(undo); let mut edits = Patch::default(); - let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>(); - let mut new_fragments = SumTree::new(); + let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); + let mut new_fragments = SumTree::new(&None); let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); @@ -1455,7 +1459,7 @@ impl Buffer { D: TextDimension, { // get fragment ranges - let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); let offset_ranges = self .fragment_ids_for_edits(transaction.edit_ids.iter()) .into_iter() @@ -1485,7 +1489,7 @@ impl Buffer { }); // convert to the desired text dimension. - let mut position = D::default(); + let mut position = D::zero(&()); let mut rope_cursor = self.visible_text.cursor(0); disjoint_ranges.map(move |range| { position.add_assign(&rope_cursor.summary(range.start)); @@ -1665,8 +1669,8 @@ impl Buffer { ); } - let mut cursor = self.snapshot.fragments.cursor::>(); - for insertion_fragment in self.snapshot.insertions.cursor::<()>() { + let mut cursor = self.snapshot.fragments.cursor::>(&None); + for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) { cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); let fragment = cursor.item().unwrap(); assert_eq!(insertion_fragment.fragment_id, fragment.id); @@ -1783,7 +1787,7 @@ impl BufferSnapshot { let mut cursor = self .fragments - .filter::<_, FragmentTextSummary>(move |summary| { + .filter::<_, FragmentTextSummary>(&None, move |summary| { !version.observed_all(&summary.max_version) }); cursor.next(&None); @@ -2110,14 +2114,14 @@ impl BufferSnapshot { A: 'a + IntoIterator, { let anchors = anchors.into_iter(); - let mut insertion_cursor = self.insertions.cursor::(); - let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut insertion_cursor = self.insertions.cursor::(&()); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); let mut text_cursor = self.visible_text.cursor(0); - let mut position = D::default(); + let mut position = D::zero(&()); anchors.map(move |(anchor, payload)| { if *anchor == Anchor::MIN { - return (D::default(), payload); + return (D::zero(&()), payload); } else if *anchor == Anchor::MAX { return (D::from_text_summary(&self.visible_text.summary()), payload); } @@ -2159,7 +2163,7 @@ impl BufferSnapshot { D: TextDimension, { if *anchor == Anchor::MIN { - D::default() + D::zero(&()) } else if *anchor == Anchor::MAX { D::from_text_summary(&self.visible_text.summary()) } else { @@ -2167,7 +2171,7 @@ impl BufferSnapshot { timestamp: anchor.timestamp, split_offset: anchor.offset, }; - let mut insertion_cursor = self.insertions.cursor::(); + let mut insertion_cursor = self.insertions.cursor::(&()); insertion_cursor.seek(&anchor_key, anchor.bias, &()); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); @@ -2192,7 +2196,7 @@ impl BufferSnapshot { ); }; - let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; @@ -2213,7 +2217,7 @@ impl BufferSnapshot { timestamp: anchor.timestamp, split_offset: anchor.offset, }; - let mut insertion_cursor = self.insertions.cursor::(); + let mut insertion_cursor = self.insertions.cursor::(&()); insertion_cursor.seek(&anchor_key, anchor.bias, &()); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); @@ -2263,7 +2267,7 @@ impl BufferSnapshot { } else if bias == Bias::Right && offset == self.len() { Anchor::MAX } else { - let mut fragment_cursor = self.fragments.cursor::(); + let mut fragment_cursor = self.fragments.cursor::(&None); fragment_cursor.seek(&offset, bias, &None); let fragment = fragment_cursor.item().unwrap(); let overshoot = offset - *fragment_cursor.start(); @@ -2341,15 +2345,15 @@ impl BufferSnapshot { let fragments_cursor = if *since == self.version { None } else { - let mut cursor = self - .fragments - .filter(move |summary| !since.observed_all(&summary.max_version)); + let mut cursor = self.fragments.filter(&None, move |summary| { + !since.observed_all(&summary.max_version) + }); cursor.next(&None); Some(cursor) }; let mut cursor = self .fragments - .cursor::<(Option<&Locator>, FragmentTextSummary)>(); + .cursor::<(Option<&Locator>, FragmentTextSummary)>(&None); let start_fragment_id = self.fragment_id_for_anchor(&range.start); cursor.seek(&Some(start_fragment_id), Bias::Left, &None); @@ -2371,8 +2375,8 @@ impl BufferSnapshot { fragments_cursor, undos: &self.undo_map, since, - old_end: Default::default(), - new_end: Default::default(), + old_end: D::zero(&()), + new_end: D::zero(&()), range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset), buffer_id: self.remote_id, } @@ -2382,9 +2386,9 @@ impl BufferSnapshot { if *since != self.version { let start_fragment_id = self.fragment_id_for_anchor(&range.start); let end_fragment_id = self.fragment_id_for_anchor(&range.end); - let mut cursor = self - .fragments - .filter::<_, usize>(move |summary| !since.observed_all(&summary.max_version)); + let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { + !since.observed_all(&summary.max_version) + }); cursor.next(&None); while let Some(fragment) = cursor.item() { if fragment.id > *end_fragment_id { @@ -2405,9 +2409,9 @@ impl BufferSnapshot { pub fn has_edits_since(&self, since: &clock::Global) -> bool { if *since != self.version { - let mut cursor = self - .fragments - .filter::<_, usize>(move |summary| !since.observed_all(&summary.max_version)); + let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { + !since.observed_all(&summary.max_version) + }); cursor.next(&None); while let Some(fragment) = cursor.item() { let was_visible = fragment.was_visible(since, &self.undo_map); @@ -2644,6 +2648,10 @@ impl sum_tree::Item for Fragment { impl sum_tree::Summary for FragmentSummary { type Context = Option; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &Self::Context) { self.max_id.assign(&other.max_id); self.text.visible += &other.text.visible; @@ -2704,6 +2712,10 @@ impl InsertionFragment { impl sum_tree::Summary for InsertionFragmentKey { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { *self = *summary; } @@ -2736,18 +2748,30 @@ impl ops::Sub for FullOffset { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { *self += summary.text.visible; } } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { self.0 += summary.text.visible + summary.text.deleted; } } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { *self = Some(&summary.max_id); } @@ -2786,6 +2810,10 @@ impl Default for VersionedFullOffset { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset { + fn zero(_cx: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { if let Self::Offset(offset) = self { let version = cx.as_ref().unwrap(); diff --git a/crates/text/src/undo_map.rs b/crates/text/src/undo_map.rs index f95809c02e..4e670fd456 100644 --- a/crates/text/src/undo_map.rs +++ b/crates/text/src/undo_map.rs @@ -33,6 +33,10 @@ struct UndoMapKey { impl sum_tree::Summary for UndoMapKey { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &Self::Context) { *self = cmp::max(*self, *summary); } @@ -62,9 +66,8 @@ impl UndoMap { pub fn is_undone(&self, edit_id: clock::Lamport) -> bool { self.undo_count(edit_id) % 2 == 1 } - pub fn was_undone(&self, edit_id: clock::Lamport, version: &clock::Global) -> bool { - let mut cursor = self.0.cursor::(); + let mut cursor = self.0.cursor::(&()); cursor.seek( &UndoMapKey { edit_id, @@ -89,7 +92,7 @@ impl UndoMap { } pub fn undo_count(&self, edit_id: clock::Lamport) -> u32 { - let mut cursor = self.0.cursor::(); + let mut cursor = self.0.cursor::(&()); cursor.seek( &UndoMapKey { edit_id, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 776c01c49c..d8555b71a4 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2049,7 +2049,7 @@ impl Snapshot { fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option> { let removed_entry = self.entries_by_id.remove(&entry_id, &())?; self.entries_by_path = { - let mut cursor = self.entries_by_path.cursor::(); + let mut cursor = self.entries_by_path.cursor::(&()); let mut new_entries_by_path = cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &()); while let Some(entry) = cursor.item() { @@ -2192,7 +2192,7 @@ impl Snapshot { include_ignored: bool, start_offset: usize, ) -> Traversal { - let mut cursor = self.entries_by_path.cursor(); + let mut cursor = self.entries_by_path.cursor(&()); cursor.seek( &TraversalTarget::Count { count: start_offset, @@ -2302,7 +2302,7 @@ impl Snapshot { pub fn propagate_git_statuses(&self, result: &mut [Entry]) { let mut cursor = self .entries_by_path - .cursor::<(TraversalProgress, GitStatuses)>(); + .cursor::<(TraversalProgress, GitStatuses)>(&()); let mut entry_stack = Vec::<(usize, GitStatuses)>::new(); let mut result_ix = 0; @@ -2358,13 +2358,13 @@ impl Snapshot { pub fn paths(&self) -> impl Iterator> { let empty_path = Path::new(""); self.entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .filter(move |entry| entry.path.as_ref() != empty_path) .map(|entry| &entry.path) } pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> { - let mut cursor = self.entries_by_path.cursor(); + let mut cursor = self.entries_by_path.cursor(&()); cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &()); let traversal = Traversal { cursor, @@ -2581,7 +2581,7 @@ impl LocalSnapshot { #[cfg(test)] pub(crate) fn expanded_entries(&self) -> impl Iterator { self.entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored)) } @@ -2591,11 +2591,11 @@ impl LocalSnapshot { assert_eq!( self.entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .map(|e| (&e.path, e.id)) .collect::>(), self.entries_by_id - .cursor::<()>() + .cursor::<()>(&()) .map(|e| (&e.path, e.id)) .collect::>() .into_iter() @@ -2605,7 +2605,7 @@ impl LocalSnapshot { let mut files = self.files(true, 0); let mut visible_files = self.files(false, 0); - for entry in self.entries_by_path.cursor::<()>() { + for entry in self.entries_by_path.cursor::<()>(&()) { if entry.is_file() { assert_eq!(files.next().unwrap().inode, entry.inode); if !entry.is_ignored && !entry.is_external { @@ -2633,7 +2633,7 @@ impl LocalSnapshot { let dfs_paths_via_iter = self .entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .map(|e| e.path.as_ref()) .collect::>(); assert_eq!(bfs_paths, dfs_paths_via_iter); @@ -2679,7 +2679,7 @@ impl LocalSnapshot { #[cfg(test)] pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> { let mut paths = Vec::new(); - for entry in self.entries_by_path.cursor::<()>() { + for entry in self.entries_by_path.cursor::<()>(&()) { if include_ignored || !entry.is_ignored { paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored)); } @@ -2839,7 +2839,10 @@ impl BackgroundScannerState { let mut new_entries; let removed_entries; { - let mut cursor = self.snapshot.entries_by_path.cursor::(); + let mut cursor = self + .snapshot + .entries_by_path + .cursor::(&()); new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); new_entries.append(cursor.suffix(&()), &()); @@ -2847,7 +2850,7 @@ impl BackgroundScannerState { self.snapshot.entries_by_path = new_entries; let mut removed_ids = Vec::with_capacity(removed_entries.summary().count); - for entry in removed_entries.cursor::<()>() { + for entry in removed_entries.cursor::<()>(&()) { match self.removed_entries.entry(entry.inode) { hash_map::Entry::Occupied(mut e) => { let prev_removed_entry = e.get_mut(); @@ -3403,6 +3406,10 @@ impl Default for EntrySummary { impl sum_tree::Summary for EntrySummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, rhs: &Self, _: &()) { self.max_path = rhs.max_path.clone(); self.count += rhs.count; @@ -3445,12 +3452,20 @@ struct PathEntrySummary { impl sum_tree::Summary for PathEntrySummary { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &Self::Context) { self.max_id = summary.max_id; } } impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) { *self = summary.max_id; } @@ -3466,6 +3481,10 @@ impl Default for PathKey { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.0 = summary.max_path.clone(); } @@ -4629,8 +4648,8 @@ impl BackgroundScanner { // Identify which paths have changed. Use the known set of changed // parent paths to optimize the search. let mut changes = Vec::new(); - let mut old_paths = old_snapshot.entries_by_path.cursor::(); - let mut new_paths = new_snapshot.entries_by_path.cursor::(); + let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); + let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); let mut last_newly_loaded_dir_path = None; old_paths.next(&()); new_paths.next(&()); @@ -4981,6 +5000,10 @@ impl<'a> TraversalProgress<'a> { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.max_path = summary.max_path.as_ref(); self.count += summary.count; @@ -5030,6 +5053,10 @@ impl Sub for GitStatuses { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { *self += summary.statuses } @@ -5050,7 +5077,7 @@ impl<'a> Traversal<'a> { include_ignored: bool, start_path: &Path, ) -> Self { - let mut cursor = entries.cursor(); + let mut cursor = entries.cursor(&()); cursor.seek(&TraversalTarget::Path(start_path), Bias::Left, &()); let mut traversal = Self { cursor, From 8e30229ec974722fd08daac9509eca27a93a00b4 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Tue, 17 Sep 2024 22:09:59 -0400 Subject: [PATCH 176/270] Fix nix shell (#17982) Recently `cmake` was added as a build-time dependency to the wasm runtime. This adds that dependency to our nix shell env. Release Notes: - N/A --- flake.lock | 29 ++++++++++++----------------- flake.nix | 5 +---- nix/shell.nix | 1 + 3 files changed, 14 insertions(+), 21 deletions(-) diff --git a/flake.lock b/flake.lock index f996e1e577..2b421a9efb 100644 --- a/flake.lock +++ b/flake.lock @@ -1,17 +1,12 @@ { "nodes": { "crane": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, "locked": { - "lastModified": 1724537630, - "narHash": "sha256-gpqINM71zp3kw5XYwUXa84ZtPnCmLLnByuFoYesT1bY=", + "lastModified": 1725409566, + "narHash": "sha256-PrtLmqhM6UtJP7v7IGyzjBFhbG4eOAHT6LPYOFmYfbk=", "owner": "ipetkov", "repo": "crane", - "rev": "3e08f4b1fc9aaede5dd511d8f5f4ef27501e49b0", + "rev": "7e4586bad4e3f8f97a9271def747cf58c4b68f3c", "type": "github" }, "original": { @@ -28,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1724740262, - "narHash": "sha256-cpFasbzOTlwLi4fNas6hDznVUdCJn/lMLxi7MAMG6hg=", + "lastModified": 1726554553, + "narHash": "sha256-xakDhIS1c1VgJc/NMOLj05yBsTdlXKMEYz6wC8Hdshc=", "owner": "nix-community", "repo": "fenix", - "rev": "703efdd9b5c6a7d5824afa348a24fbbf8ff226be", + "rev": "1f59d7585aa06d2c327960d397bea4067d8fee98", "type": "github" }, "original": { @@ -58,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1724479785, - "narHash": "sha256-pP3Azj5d6M5nmG68Fu4JqZmdGt4S4vqI5f8te+E/FTw=", + "lastModified": 1726463316, + "narHash": "sha256-gI9kkaH0ZjakJOKrdjaI/VbaMEo9qBbSUl93DnU7f4c=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "d0e1602ddde669d5beb01aec49d71a51937ed7be", + "rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172", "type": "github" }, "original": { @@ -83,11 +78,11 @@ "rust-analyzer-src": { "flake": false, "locked": { - "lastModified": 1724666781, - "narHash": "sha256-nOQDgjTDlWe0/+Ptf3o2p6UrznQFHnXBHRV1ZAsSpe8=", + "lastModified": 1726443025, + "narHash": "sha256-nCmG4NJpwI0IoIlYlwtDwVA49yuspA2E6OhfCOmiArQ=", "owner": "rust-lang", "repo": "rust-analyzer", - "rev": "095926ea6f008477a15a2ec6b0b8797e2e5be0e5", + "rev": "94b526fc86eaa0e90fb4d54a5ba6313aa1e9b269", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 23631996ab..7d1410ac7c 100644 --- a/flake.nix +++ b/flake.nix @@ -7,10 +7,7 @@ url = "github:nix-community/fenix"; inputs.nixpkgs.follows = "nixpkgs"; }; - crane = { - url = "github:ipetkov/crane"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + crane.url = "github:ipetkov/crane"; flake-compat.url = "github:edolstra/flake-compat"; }; diff --git a/nix/shell.nix b/nix/shell.nix index 34682ab1d5..03e298e132 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -26,6 +26,7 @@ in nativeBuildInputs = with pkgs; [ clang curl + cmake perl pkg-config protobuf From 2699fa8d4a10eb38445e0c5979dfb56a2193f949 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Wed, 18 Sep 2024 11:59:19 +0800 Subject: [PATCH 177/270] windows: Fix `tailwind-language-server` (#17778) Closes #17741 I'm not sure why, but ever since `tailwind` was upgraded to `0.24`, there have been occasional errors indicating that the `.ps1` file could not be found. After reviewing the `.ps1` script, it appears that it simply starts the server using `node`. This PR directly using the method from the script to start the server with `node`. Co-authored-by: Anay Release Notes: - N/A --------- Co-authored-by: Anay --- crates/languages/src/tailwind.rs | 34 +++++++------------------------- 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 524e4ce846..9a053dbd87 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -18,20 +18,15 @@ use std::{ use util::{maybe, ResultExt}; #[cfg(target_os = "windows")] -const SERVER_PATH: &str = "node_modules/.bin/tailwindcss-language-server.ps1"; +const SERVER_PATH: &str = + "node_modules/@tailwindcss/language-server/bin/tailwindcss-language-server"; #[cfg(not(target_os = "windows"))] const SERVER_PATH: &str = "node_modules/.bin/tailwindcss-language-server"; -#[cfg(not(target_os = "windows"))] fn server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] } -#[cfg(target_os = "windows")] -fn server_binary_arguments(server_path: &Path) -> Vec { - vec!["-File".into(), server_path.into(), "--stdio".into()] -} - pub struct TailwindLspAdapter { node: Arc, } @@ -114,26 +109,11 @@ impl LspAdapter for TailwindLspAdapter { .await?; } - #[cfg(target_os = "windows")] - { - let env_path = self.node.node_environment_path().await?; - let mut env = HashMap::default(); - env.insert("PATH".to_string(), env_path.to_string_lossy().to_string()); - - Ok(LanguageServerBinary { - path: "powershell.exe".into(), - env: Some(env), - arguments: server_binary_arguments(&server_path), - }) - } - #[cfg(not(target_os = "windows"))] - { - Ok(LanguageServerBinary { - path: self.node.binary_path().await?, - env: None, - arguments: server_binary_arguments(&server_path), - }) - } + Ok(LanguageServerBinary { + path: self.node.binary_path().await?, + env: None, + arguments: server_binary_arguments(&server_path), + }) } async fn cached_server_binary( From d4e10dfba3bc88c6476605a9efbd7f5c5be45a8a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 18 Sep 2024 10:04:02 +0200 Subject: [PATCH 178/270] docs: Update rust-analyzer docs (#17988) Release Notes: - N/A --- docs/src/languages/rust.md | 200 ++++++++++++++++++------------------- 1 file changed, 100 insertions(+), 100 deletions(-) diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 026b522898..233c378dae 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -13,37 +13,29 @@ TBD: Provide explicit examples not just `....` ## Inlay Hints -The following configuration can be used to enable inlay hints for rust: +The following configuration can be used to change the inlay hint settings for `rust-analyzer` in Rust: ```json -"inlayHints": { - "maxLength": null, - "lifetimeElisionHints": { - "useParameterNames": true, - "enable": "skip_trivial" - }, - "closureReturnTypeHints": { - "enable": "always" - } -} -``` - -to make the language server send back inlay hints when Zed has them enabled in the settings. - -Use - -```json -"lsp": { - "rust-analyzer": { - "initialization_options": { - .... +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "inlayHints": { + "maxLength": null, + "lifetimeElisionHints": { + "enable": "skip_trivial" + "useParameterNames": true, + }, + "closureReturnTypeHints": { + "enable": "always" + } + } + } } } } ``` -to override these settings. - See [Inlay Hints](https://rust-analyzer.github.io/manual.html#inlay-hints) in the Rust Analyzer Manual for more information. ## Target directory @@ -70,7 +62,23 @@ A `true` setting will set the target directory to `target/rust-analyzer`. You ca You can configure which `rust-analyzer` binary Zed should use. -To use a binary in a custom location, add the following to your `settings.json`: +By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to use that. If that binary successfully executes `rust-analyzer --help`, it's used. Otherwise, Zed will fall back to installing its own `rust-analyzer` version and using that. + +If you want to disable Zed looking for a `rust-analyzer` binary, you can set `path_lookup` to `false` in your `settings.json`: + +```json +{ + "lsp": { + "rust-analyzer": { + "binary": { + "path_lookup": false + } + } + } +} +``` + +If you want to use a binary in a custom location, you can specify a `path` and optional `args`: ```json { @@ -85,19 +93,7 @@ To use a binary in a custom location, add the following to your `settings.json`: } ``` -To use a binary that is on your `$PATH`, add the following to your `settings.json`: - -```json -{ - "lsp": { - "rust-analyzer": { - "binary": { - "path_lookup": true - } - } - } -} -``` +This `"path"` has to be an absolute path. ## More server configuration @@ -138,30 +134,32 @@ Check on save feature is responsible for returning part of the diagnostics based Consider more `rust-analyzer.cargo.` and `rust-analyzer.check.` and `rust-analyzer.diagnostics.` settings from the manual for more fine-grained configuration. Here's a snippet for Zed settings.json (the language server will restart automatically after the `lsp.rust-analyzer` section is edited and saved): -```json5 -"lsp": { +```json +{ + "lsp": { "rust-analyzer": { - "initialization_options": { - // get more cargo-less diagnostics from rust-analyzer, - // which might include false-positives (those can be turned off by their names) - "diagnostics": { - "experimental": { - "enable": true - } - }, - // To disable the checking entirely - // (ignores all cargo and check settings below) - "checkOnSave": false, - // To check the `lib` target only. - "cargo": { - "allTargets": false - }, - // Use `-p` instead of `--workspace` for cargo check - "check": { - "workspace": false - } + "initialization_options": { + // get more cargo-less diagnostics from rust-analyzer, + // which might include false-positives (those can be turned off by their names) + "diagnostics": { + "experimental": { + "enable": true + } + }, + // To disable the checking entirely + // (ignores all cargo and check settings below) + "checkOnSave": false, + // To check the `lib` target only. + "cargo": { + "allTargets": false + }, + // Use `-p` instead of `--workspace` for cargo check + "check": { + "workspace": false } + } } + } } ``` @@ -170,50 +168,52 @@ Here's a snippet for Zed settings.json (the language server will restart automat There's a way get custom completion items from rust-analyzer, that will transform the code according to the snippet body: ```json -"lsp": { +{ + "lsp": { "rust-analyzer": { - "initialization_options": { - "completion": { - "snippets": { - "custom": { - "Arc::new": { - "postfix": "arc", - "body": ["Arc::new(${receiver})"], - "requires": "std::sync::Arc", - "scope": "expr" - }, - "Some": { - "postfix": "some", - "body": ["Some(${receiver})"], - "scope": "expr" - }, - "Ok": { - "postfix": "ok", - "body": ["Ok(${receiver})"], - "scope": "expr" - }, - "Rc::new": { - "postfix": "rc", - "body": ["Rc::new(${receiver})"], - "requires": "std::rc::Rc", - "scope": "expr" - }, - "Box::pin": { - "postfix": "boxpin", - "body": ["Box::pin(${receiver})"], - "requires": "std::boxed::Box", - "scope": "expr" - }, - "vec!": { - "postfix": "vec", - "body": ["vec![${receiver}]"], - "description": "vec![]", - "scope": "expr" - } - } - } + "initialization_options": { + "completion": { + "snippets": { + "custom": { + "Arc::new": { + "postfix": "arc", + "body": ["Arc::new(${receiver})"], + "requires": "std::sync::Arc", + "scope": "expr" + }, + "Some": { + "postfix": "some", + "body": ["Some(${receiver})"], + "scope": "expr" + }, + "Ok": { + "postfix": "ok", + "body": ["Ok(${receiver})"], + "scope": "expr" + }, + "Rc::new": { + "postfix": "rc", + "body": ["Rc::new(${receiver})"], + "requires": "std::rc::Rc", + "scope": "expr" + }, + "Box::pin": { + "postfix": "boxpin", + "body": ["Box::pin(${receiver})"], + "requires": "std::boxed::Box", + "scope": "expr" + }, + "vec!": { + "postfix": "vec", + "body": ["vec![${receiver}]"], + "description": "vec![]", + "scope": "expr" + } } + } } + } } + } } ``` From 550ceec549519e564e52c3d4f02f88ebad531236 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 18 Sep 2024 10:04:13 +0200 Subject: [PATCH 179/270] docs: Update Ruby docs to provide more complete examples (#17987) Closes #17917 Release Notes: - N/A --- docs/src/languages/ruby.md | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/docs/src/languages/ruby.md b/docs/src/languages/ruby.md index 6385de9c96..47466a35cd 100644 --- a/docs/src/languages/ruby.md +++ b/docs/src/languages/ruby.md @@ -138,15 +138,21 @@ Ruby LSP uses pull-based diagnostics which Zed doesn't support yet. We can tell ```json { + "languages": { + "Ruby": { + "language_servers": ["ruby-lsp", "!solargraph", "..."], + }, + }, "lsp": { "ruby-lsp": { "initialization_options": { "enabledFeatures": { - "diagnostics": false - } - } - } - } + // This disables diagnostics + "diagnostics": false, + }, + }, + }, + }, } ``` @@ -164,11 +170,24 @@ Rubocop has unsafe autocorrection disabled by default. We can tell Zed to enable ```json { + "languages": { + "Ruby": { + // Use ruby-lsp as the primary language server and rubocop as the secondary. + "language_servers": ["ruby-lsp", "rubocop", "!solargraph", "..."] + } + }, "lsp": { "rubocop": { "initialization_options": { "safeAutocorrect": false } + }, + "ruby-lsp": { + "initialization_options": { + "enabledFeatures": { + "diagnostics": false + } + } } } } From aae26ee33de912aa0a5193a3d9d86ae5d4856723 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 18 Sep 2024 12:34:10 +0200 Subject: [PATCH 180/270] go: Fix tasks when running tests/benchs in packages (#17998) Turns out that #17645 reintroduced another regression and didn't catch all the regressions in #17108. Releases Notes: - Fixed Go tasks not working properly when running tests or benchmarks in subfolders/packages. Co-authored-by: Piotr --- crates/languages/src/go.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 2ddf779681..a103c4783c 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -519,7 +519,6 @@ impl ContextProvider for GoContextProvider { command: "go".into(), args: vec![ "test".into(), - GO_PACKAGE_TASK_VARIABLE.template_value(), "-run".into(), format!("^{}\\$", VariableName::Symbol.template_value(),), ], @@ -530,7 +529,7 @@ impl ContextProvider for GoContextProvider { TaskTemplate { label: format!("go test {}", GO_PACKAGE_TASK_VARIABLE.template_value()), command: "go".into(), - args: vec!["test".into(), GO_PACKAGE_TASK_VARIABLE.template_value()], + args: vec!["test".into()], cwd: package_cwd.clone(), ..TaskTemplate::default() }, @@ -572,7 +571,6 @@ impl ContextProvider for GoContextProvider { command: "go".into(), args: vec![ "test".into(), - GO_PACKAGE_TASK_VARIABLE.template_value(), "-benchmem".into(), "-run=^$".into(), "-bench".into(), From f68f4ab9825d6a3f22b9fd05e648af1a311bd7d6 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:34:39 +0200 Subject: [PATCH 181/270] docs: Add tweaks to the REPL page (#18000) Just capitalizing some things, making sure URLs are clickable links, and using the note blockquote callout when appropriate. Release Notes: - N/A --- docs/src/languages/ruby.md | 14 +++++++------- docs/src/repl.md | 10 +++++----- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/src/languages/ruby.md b/docs/src/languages/ruby.md index 47466a35cd..e8bec34a71 100644 --- a/docs/src/languages/ruby.md +++ b/docs/src/languages/ruby.md @@ -140,19 +140,19 @@ Ruby LSP uses pull-based diagnostics which Zed doesn't support yet. We can tell { "languages": { "Ruby": { - "language_servers": ["ruby-lsp", "!solargraph", "..."], - }, + "language_servers": ["ruby-lsp", "!solargraph", "..."] + } }, "lsp": { "ruby-lsp": { "initialization_options": { "enabledFeatures": { // This disables diagnostics - "diagnostics": false, - }, - }, - }, - }, + "diagnostics": false + } + } + } + } } ``` diff --git a/docs/src/repl.md b/docs/src/repl.md index 3d5d441e79..9c14ff925f 100644 --- a/docs/src/repl.md +++ b/docs/src/repl.md @@ -74,7 +74,7 @@ On macOS, your system Python will _not_ work. Either set up [pyenv](https://gith -To setup your current python to have an available kernel, run: +To setup your current Python to have an available kernel, run: ```sh pip install ipykernel @@ -99,7 +99,7 @@ python -m ipykernel install --user --name myenv --display-name "Python (myenv)" ### R (Ark Kernel) {#r-ark} -Install [Ark](https://github.com/posit-dev/ark/releases) by downloading the release for your operating system. E.g. for macOS just unpack `ark` binary and put it into `/usr/local/bin`. Then run: +Install [Ark](https://github.com/posit-dev/ark/releases) by downloading the release for your operating system. For example, for macOS just unpack `ark` binary and put it into `/usr/local/bin`. Then run: ```sh ark --install @@ -137,9 +137,9 @@ TBD: Improve Julia REPL instructions ### Scala -- Install Scala with `cs setup` (Coursier): https://www.scala-lang.org/download/ +- [Install Scala](https://www.scala-lang.org/download/) with `cs setup` (Coursier): - `brew install coursier/formulas/coursier && cs setup` -- REPL (Almond) Setup Instructions https://almond.sh/docs/quick-start-install +- REPL (Almond) [setup instructions](https://almond.sh/docs/quick-start-install): - `brew install --cask temurin` (Eclipse foundation official OpenJDK binaries) - `brew install coursier/formulas/coursier && cs setup` - `coursier launch --use-bootstrap almond -- --install` @@ -180,4 +180,4 @@ Available kernels: rust /Users/z/Library/Jupyter/kernels/rust ``` -Note: Zed makes best effort usage of `sys.prefix` and `CONDA_PREFIX` to find kernels in Python environments. If you want explicitly control run `python -m ipykernel install --user --name myenv --display-name "Python (myenv)"` to install the kernel directly while in the environment. +> Note: Zed makes best effort usage of `sys.prefix` and `CONDA_PREFIX` to find kernels in Python environments. If you want explicitly control run `python -m ipykernel install --user --name myenv --display-name "Python (myenv)"` to install the kernel directly while in the environment. From a149a50946d352be1cceccccfcee2505ac3c2641 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:34:51 +0200 Subject: [PATCH 182/270] docs: Fix links on the Telemetry page (#17995) This PR tweaks some broken links in the Telemetry page as well as capitalizing instances of "Zed". Release Notes: - N/A --- docs/src/telemetry.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/telemetry.md b/docs/src/telemetry.md index cdb44979ea..dd6556c24f 100644 --- a/docs/src/telemetry.md +++ b/docs/src/telemetry.md @@ -31,7 +31,7 @@ Telemetry is sent from the application to our servers. Data is proxied through o Diagnostic events include debug information (stack traces) from crash reports. Reports are sent on the first application launch after the crash occurred. We've built dashboards that allow us to visualize the frequency and severity of issues experienced by users. Having these reports sent automatically allows us to begin implementing fixes without the user needing to file a report in our issue tracker. The plots in the dashboards also give us an informal measurement of the stability of Zed. -You can see what data is sent when a panic occurs by inspecting the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L184) in the zed repo. You can find additional information in the [Debugging Crashes](./development/debugging-crashes.md) documentation. +You can see what data is sent when a panic occurs by inspecting the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L184) in the Zed repo. You can find additional information in the [Debugging Crashes](./development/debugging-crashes.md) documentation. ### Usage Data (Metrics) {#metrics} @@ -48,8 +48,8 @@ Usage Data is associated with a secure random telemetry ID which may be linked t You can audit the metrics data that Zed has reported by running the command {#action zed::OpenTelemetryLog} from the command palette, or clicking `Help > View Telemetry Log` in the application menu. -You can see the full list of the event types and exactly the data sent for each by inspecting the `Event` enum and the associated structs in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L63] in the zed repo. +You can see the full list of the event types and exactly the data sent for each by inspecting the `Event` enum and the associated structs in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L63) in the Zed repository. ## Concerns and Questions -If you have concerns about telemetry, please feel free to open issues in our [Zed repository](https://github.com/zed-industries/zed/issues/new/choose). +If you have concerns about telemetry, please feel free to [open an issue](https://github.com/zed-industries/zed/issues/new/choose). From 430ce073d2e00fed8e7fbbe8dae56d4e5338bd9f Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:36:02 +0200 Subject: [PATCH 183/270] docs: Improve warning callout docs (#17997) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR is a quick follow-up to https://github.com/zed-industries/zed/pull/1795. 😊 Release Notes: - N/A --- docs/theme/css/general.css | 2 +- docs/theme/css/variables.css | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index 6f086a1052..5567ae7fb0 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -268,7 +268,7 @@ blockquote .warning:before { .warning { margin: auto; padding: 1rem 1.25rem; - color: var(--fg); + color: var(--full-contrast); background-color: var(--warning-bg); border: 1px solid var(--warning-border); } diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index 59f2398264..a7c0ed7114 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -111,7 +111,7 @@ --icons-hover: hsl(220, 14%, 90%); --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.4); - --links: hsl(220, 93%, 70%); + --links: hsl(220, 93%, 75%); --link-line-decoration: hsl(220, 92%, 80%, 0.4); --link-line-decoration-hover: hsl(220, 92%, 80%, 0.8); --full-contrast: #fff; @@ -136,6 +136,10 @@ --table-header-bg: hsl(220, 13%, 25%, 0.5); --table-alternate-bg: hsl(220, 13%, 20%, 0.4); + --warning-border: hsl(25, 100%, 85%, 0.2); + --warning-bg: hsl(42, 100%, 40%, 0.1); + --warning-icon: hsl(42, 100%, 80%); + --searchbar-border-color: hsl(220, 13%, 30%); --searchbar-bg: hsl(220, 13%, 22%, 0.5); --searchbar-fg: hsl(220, 14%, 71%); From 3b153a54c271c21faf5ce53778e404cb0b6f8449 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 14:14:03 +0200 Subject: [PATCH 184/270] docs: Improve dark mode syntax highlighting (#18002) This PR introduces [GitHub Light](https://github.com/highlightjs/highlight.js/blob/main/src/styles/github.css) and [GitHub Dark](https://github.com/highlightjs/highlight.js/blob/main/src/styles/github-dark.css) as the syntax highlighting themes for the corresponding modes. Release Notes: - N/A --- docs/book.toml | 4 +- docs/src/completions.md | 2 +- docs/theme/css/general.css | 4 +- docs/theme/highlight.css | 251 +++++++++++++++++++++++++++++++++++++ 4 files changed, 256 insertions(+), 5 deletions(-) create mode 100644 docs/theme/highlight.css diff --git a/docs/book.toml b/docs/book.toml index 6696d0bb12..fbf55ef90a 100644 --- a/docs/book.toml +++ b/docs/book.toml @@ -8,8 +8,8 @@ site-url = "/docs/" [output.html] no-section-label = true -preferred-dark-theme = "light" -additional-css = ["theme/page-toc.css", "theme/plugins.css"] +preferred-dark-theme = "dark" +additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css"] additional-js = ["theme/page-toc.js", "theme/plugins.js"] [output.html.print] diff --git a/docs/src/completions.md b/docs/src/completions.md index 167c7c48e6..814bf051e1 100644 --- a/docs/src/completions.md +++ b/docs/src/completions.md @@ -10,7 +10,7 @@ Zed supports supports two sources for completions: When there is an appropriate language server available, Zed will by-default provide completions of variable names, functions, and other symbols in the current file. You can disable these by adding the following to your zed settings.json file: ```json - "show_completions_on_input": false +"show_completions_on_input": false ``` You can manually trigger completions with `ctrl-space` or by triggering the `editor::ShowCompletions` action from the command palette. diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index 5567ae7fb0..b422890751 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -355,7 +355,7 @@ kbd { font-style: italic; } -code.hljs { +code:not(pre code).hljs { color: var(--code-text) !important; - background-color: var(--code-bg); + background-color: var(--code-bg) !important; } diff --git a/docs/theme/highlight.css b/docs/theme/highlight.css new file mode 100644 index 0000000000..9d8f39d903 --- /dev/null +++ b/docs/theme/highlight.css @@ -0,0 +1,251 @@ +/*! + Theme: GitHub + Description: Light theme as seen on github.com + Author: github.com + Maintainer: @Hirse + Updated: 2021-05-15 + + Outdated base version: https://github.com/primer/github-syntax-light + Current colors taken from GitHub's CSS +*/ + +.hljs { + color: #24292e; + background: #ffffff; +} + +.hljs-doctag, +.hljs-keyword, +.hljs-meta .hljs-keyword, +.hljs-template-tag, +.hljs-template-variable, +.hljs-type, +.hljs-variable.language_ { + /* prettylights-syntax-keyword */ + color: #d73a49; +} + +.hljs-title, +.hljs-title.class_, +.hljs-title.class_.inherited__, +.hljs-title.function_ { + /* prettylights-syntax-entity */ + color: #6f42c1; +} + +.hljs-attr, +.hljs-attribute, +.hljs-literal, +.hljs-meta, +.hljs-number, +.hljs-operator, +.hljs-variable, +.hljs-selector-attr, +.hljs-selector-class, +.hljs-selector-id { + /* prettylights-syntax-constant */ + color: #005cc5; +} + +.hljs-regexp, +.hljs-string, +.hljs-meta .hljs-string { + /* prettylights-syntax-string */ + color: #032f62; +} + +.hljs-built_in, +.hljs-symbol { + /* prettylights-syntax-variable */ + color: #e36209; +} + +.hljs-comment, +.hljs-code, +.hljs-formula { + /* prettylights-syntax-comment */ + color: #6a737d; +} + +.hljs-name, +.hljs-quote, +.hljs-selector-tag, +.hljs-selector-pseudo { + /* prettylights-syntax-entity-tag */ + color: #22863a; +} + +.hljs-subst { + /* prettylights-syntax-storage-modifier-import */ + color: #24292e; +} + +.hljs-section { + /* prettylights-syntax-markup-heading */ + color: #005cc5; + font-weight: bold; +} + +.hljs-bullet { + /* prettylights-syntax-markup-list */ + color: #735c0f; +} + +.hljs-emphasis { + /* prettylights-syntax-markup-italic */ + color: #24292e; + font-style: italic; +} + +.hljs-strong { + /* prettylights-syntax-markup-bold */ + color: #24292e; + font-weight: bold; +} + +.hljs-addition { + /* prettylights-syntax-markup-inserted */ + color: #22863a; + background-color: #f0fff4; +} + +.hljs-deletion { + /* prettylights-syntax-markup-deleted */ + color: #b31d28; + background-color: #ffeef0; +} + +.hljs-char.escape_, +.hljs-link, +.hljs-params, +.hljs-property, +.hljs-punctuation, +.hljs-tag { + /* purposely ignored */ +} + +/*! + Theme: GitHub Dark + Description: Dark theme as seen on github.com + Author: github.com + Maintainer: @Hirse + Updated: 2021-05-15 + + Outdated base version: https://github.com/primer/github-syntax-dark + Current colors taken from GitHub's CSS +*/ + +.dark .hljs { + color: #c9d1d9; + background: #0d1117; +} + +.dark .hljs-doctag, +.dark .hljs-keyword, +.dark .hljs-meta .hljs-keyword, +.dark .hljs-template-tag, +.dark .hljs-template-variable, +.dark .hljs-type, +.dark .hljs-variable.language_ { + /* prettylights-syntax-keyword */ + color: #ff7b72; +} + +.dark .hljs-title, +.dark .hljs-title.class_, +.dark .hljs-title.class_.inherited__, +.dark .hljs-title.function_ { + /* prettylights-syntax-entity */ + color: #d2a8ff; +} + +.dark .hljs-attr, +.dark .hljs-attribute, +.dark .hljs-literal, +.dark .hljs-meta, +.dark .hljs-number, +.dark .hljs-operator, +.dark .hljs-variable, +.dark .hljs-selector-attr, +.dark .hljs-selector-class, +.dark .hljs-selector-id { + /* prettylights-syntax-constant */ + color: #79c0ff; +} + +.dark .hljs-regexp, +.dark .hljs-string, +.dark .hljs-meta .hljs-string { + /* prettylights-syntax-string */ + color: #a5d6ff; +} + +.dark .hljs-built_in, +.dark .hljs-symbol { + /* prettylights-syntax-variable */ + color: #ffa657; +} + +.dark .hljs-comment, +.dark .hljs-code, +.dark .hljs-formula { + /* prettylights-syntax-comment */ + color: #8b949e; +} + +.dark .hljs-name, +.dark .hljs-quote, +.dark .hljs-selector-tag, +.dark .hljs-selector-pseudo { + /* prettylights-syntax-entity-tag */ + color: #7ee787; +} + +.dark .hljs-subst { + /* prettylights-syntax-storage-modifier-import */ + color: #c9d1d9; +} + +.dark .hljs-section { + /* prettylights-syntax-markup-heading */ + color: #1f6feb; + font-weight: bold; +} + +.dark .hljs-bullet { + /* prettylights-syntax-markup-list */ + color: #f2cc60; +} + +.dark .hljs-emphasis { + /* prettylights-syntax-markup-italic */ + color: #c9d1d9; + font-style: italic; +} + +.dark .hljs-strong { + /* prettylights-syntax-markup-bold */ + color: #c9d1d9; + font-weight: bold; +} + +.dark .hljs-addition { + /* prettylights-syntax-markup-inserted */ + color: #aff5b4; + background-color: #033a16; +} + +.dark .hljs-deletion { + /* prettylights-syntax-markup-deleted */ + color: #ffdcd7; + background-color: #67060c; +} + +.dark .hljs-char.escape_, +.dark .hljs-link, +.dark .hljs-params, +.dark .hljs-property, +.dark .hljs-punctuation, +.dark .hljs-tag { + /* purposely ignored */ +} From 3ac201e448fd85b818d3d803615a36c1791d49bf Mon Sep 17 00:00:00 2001 From: ensi <66754841+ncor@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:32:37 +0300 Subject: [PATCH 185/270] gpui: Improve underline appearance (#17586) --- crates/gpui/src/platform/blade/shaders.wgsl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/platform/blade/shaders.wgsl b/crates/gpui/src/platform/blade/shaders.wgsl index c3983c7acc..6099cbd93a 100644 --- a/crates/gpui/src/platform/blade/shaders.wgsl +++ b/crates/gpui/src/platform/blade/shaders.wgsl @@ -488,8 +488,8 @@ fn fs_underline(input: UnderlineVarying) -> @location(0) vec4 { let half_thickness = underline.thickness * 0.5; let st = (input.position.xy - underline.bounds.origin) / underline.bounds.size.y - vec2(0.0, 0.5); - let frequency = M_PI_F * 3.0 * underline.thickness / 8.0; - let amplitude = 1.0 / (2.0 * underline.thickness); + let frequency = M_PI_F * 3.0 * underline.thickness / 3.0; + let amplitude = 1.0 / (4.0 * underline.thickness); let sine = sin(st.x * frequency) * amplitude; let dSine = cos(st.x * frequency) * amplitude * frequency; let distance = (st.y - sine) / sqrt(1.0 + dSine * dSine); From 1a62396b1ee9cd7c8beed803539728ed8a8c784f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 08:19:06 -0600 Subject: [PATCH 186/270] vim: Fix gv after indent/toggle comments (#17986) Release Notes: - vim: Fixed `gv` after > and < in visual mode --- crates/vim/src/{normal => }/indent.rs | 62 +++++++++++++- crates/vim/src/normal.rs | 104 ++++++++--------------- crates/vim/src/normal/mark.rs | 2 +- crates/vim/src/vim.rs | 2 + crates/vim/test_data/test_indent_gv.json | 8 ++ 5 files changed, 107 insertions(+), 71 deletions(-) rename crates/vim/src/{normal => }/indent.rs (58%) create mode 100644 crates/vim/test_data/test_indent_gv.json diff --git a/crates/vim/src/normal/indent.rs b/crates/vim/src/indent.rs similarity index 58% rename from crates/vim/src/normal/indent.rs rename to crates/vim/src/indent.rs index 4b4d5e7e80..676713c816 100644 --- a/crates/vim/src/normal/indent.rs +++ b/crates/vim/src/indent.rs @@ -1,6 +1,7 @@ -use crate::{motion::Motion, object::Object, Vim}; +use crate::{motion::Motion, object::Object, state::Mode, Vim}; use collections::HashMap; -use editor::{display_map::ToDisplayPoint, Bias}; +use editor::{display_map::ToDisplayPoint, Bias, Editor}; +use gpui::actions; use language::SelectionGoal; use ui::ViewContext; @@ -10,6 +11,46 @@ pub(crate) enum IndentDirection { Out, } +actions!(vim, [Indent, Outdent,]); + +pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { + Vim::action(editor, cx, |vim, _: &Indent, cx| { + vim.record_current_action(cx); + let count = vim.take_count(cx).unwrap_or(1); + vim.store_visual_marks(cx); + vim.update_editor(cx, |vim, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut original_positions = vim.save_selection_starts(editor, cx); + for _ in 0..count { + editor.indent(&Default::default(), cx); + } + vim.restore_selection_cursors(editor, cx, &mut original_positions); + }); + }); + if vim.mode.is_visual() { + vim.switch_mode(Mode::Normal, true, cx) + } + }); + + Vim::action(editor, cx, |vim, _: &Outdent, cx| { + vim.record_current_action(cx); + let count = vim.take_count(cx).unwrap_or(1); + vim.store_visual_marks(cx); + vim.update_editor(cx, |vim, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut original_positions = vim.save_selection_starts(editor, cx); + for _ in 0..count { + editor.outdent(&Default::default(), cx); + } + vim.restore_selection_cursors(editor, cx, &mut original_positions); + }); + }); + if vim.mode.is_visual() { + vim.switch_mode(Mode::Normal, true, cx) + } + }); +} + impl Vim { pub(crate) fn indent_motion( &mut self, @@ -78,3 +119,20 @@ impl Vim { }); } } + +#[cfg(test)] +mod test { + use crate::test::NeovimBackedTestContext; + + #[gpui::test] + async fn test_indent_gv(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("shiftwidth=4").await; + + cx.set_shared_state("ˇhello\nworld\n").await; + cx.simulate_shared_keystrokes("v j > g v").await; + cx.shared_state() + .await + .assert_eq("« hello\n ˇ» world\n"); + } +} diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 8198c0da53..741e09f178 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -2,7 +2,6 @@ mod case; mod change; mod delete; mod increment; -mod indent; pub(crate) mod mark; mod paste; pub(crate) mod repeat; @@ -16,6 +15,7 @@ use std::collections::HashMap; use std::sync::Arc; use crate::{ + indent::IndentDirection, motion::{self, first_non_whitespace, next_line_end, right, Motion}, object::Object, state::{Mode, Operator}, @@ -34,8 +34,6 @@ use language::{Point, SelectionGoal}; use log::error; use multi_buffer::MultiBufferRow; -use self::indent::IndentDirection; - actions!( vim, [ @@ -56,8 +54,6 @@ actions!( ConvertToUpperCase, ConvertToLowerCase, JoinLines, - Indent, - Outdent, ToggleComments, Undo, Redo, @@ -129,41 +125,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { }) }); if vim.mode.is_visual() { - vim.switch_mode(Mode::Normal, false, cx) - } - }); - - Vim::action(editor, cx, |vim, _: &Indent, cx| { - vim.record_current_action(cx); - let count = vim.take_count(cx).unwrap_or(1); - vim.update_editor(cx, |_, editor, cx| { - editor.transact(cx, |editor, cx| { - let mut original_positions = save_selection_starts(editor, cx); - for _ in 0..count { - editor.indent(&Default::default(), cx); - } - restore_selection_cursors(editor, cx, &mut original_positions); - }); - }); - if vim.mode.is_visual() { - vim.switch_mode(Mode::Normal, false, cx) - } - }); - - Vim::action(editor, cx, |vim, _: &Outdent, cx| { - vim.record_current_action(cx); - let count = vim.take_count(cx).unwrap_or(1); - vim.update_editor(cx, |_, editor, cx| { - editor.transact(cx, |editor, cx| { - let mut original_positions = save_selection_starts(editor, cx); - for _ in 0..count { - editor.outdent(&Default::default(), cx); - } - restore_selection_cursors(editor, cx, &mut original_positions); - }); - }); - if vim.mode.is_visual() { - vim.switch_mode(Mode::Normal, false, cx) + vim.switch_mode(Mode::Normal, true, cx) } }); @@ -428,15 +390,16 @@ impl Vim { fn toggle_comments(&mut self, _: &ToggleComments, cx: &mut ViewContext) { self.record_current_action(cx); - self.update_editor(cx, |_, editor, cx| { + self.store_visual_marks(cx); + self.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = save_selection_starts(editor, cx); + let mut original_positions = vim.save_selection_starts(editor, cx); editor.toggle_comments(&Default::default(), cx); - restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, &mut original_positions); }); }); if self.mode.is_visual() { - self.switch_mode(Mode::Normal, false, cx) + self.switch_mode(Mode::Normal, true, cx) } } @@ -480,33 +443,38 @@ impl Vim { }); self.pop_operator(cx); } -} -fn save_selection_starts(editor: &Editor, cx: &mut ViewContext) -> HashMap { - let (map, selections) = editor.selections.all_display(cx); - selections - .iter() - .map(|selection| { - ( - selection.id, - map.display_point_to_anchor(selection.start, Bias::Right), - ) - }) - .collect::>() -} + pub fn save_selection_starts( + &self, + editor: &Editor, + cx: &mut ViewContext, + ) -> HashMap { + let (map, selections) = editor.selections.all_display(cx); + selections + .iter() + .map(|selection| { + ( + selection.id, + map.display_point_to_anchor(selection.start, Bias::Right), + ) + }) + .collect::>() + } -fn restore_selection_cursors( - editor: &mut Editor, - cx: &mut ViewContext, - positions: &mut HashMap, -) { - editor.change_selections(Some(Autoscroll::fit()), cx, |s| { - s.move_with(|map, selection| { - if let Some(anchor) = positions.remove(&selection.id) { - selection.collapse_to(anchor.to_display_point(map), SelectionGoal::None); - } + pub fn restore_selection_cursors( + &self, + editor: &mut Editor, + cx: &mut ViewContext, + positions: &mut HashMap, + ) { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + if let Some(anchor) = positions.remove(&selection.id) { + selection.collapse_to(anchor.to_display_point(map), SelectionGoal::None); + } + }); }); - }); + } } #[cfg(test)] mod test { diff --git a/crates/vim/src/normal/mark.rs b/crates/vim/src/normal/mark.rs index ae6dd3eed7..787430e747 100644 --- a/crates/vim/src/normal/mark.rs +++ b/crates/vim/src/normal/mark.rs @@ -54,7 +54,7 @@ impl Vim { ); starts.push( map.buffer_snapshot - .anchor_after(selection.start.to_offset(&map, Bias::Right)), + .anchor_before(selection.start.to_offset(&map, Bias::Left)), ); reversed.push(selection.reversed) } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index fc5097d845..a4b77b1a7a 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -6,6 +6,7 @@ mod test; mod change_list; mod command; mod digraph; +mod indent; mod insert; mod mode_indicator; mod motion; @@ -289,6 +290,7 @@ impl Vim { motion::register(editor, cx); command::register(editor, cx); replace::register(editor, cx); + indent::register(editor, cx); object::register(editor, cx); visual::register(editor, cx); change_list::register(editor, cx); diff --git a/crates/vim/test_data/test_indent_gv.json b/crates/vim/test_data/test_indent_gv.json new file mode 100644 index 0000000000..2c24406aee --- /dev/null +++ b/crates/vim/test_data/test_indent_gv.json @@ -0,0 +1,8 @@ +{"SetOption":{"value":"shiftwidth=4"}} +{"Put":{"state":"ˇhello\nworld\n"}} +{"Key":"v"} +{"Key":"j"} +{"Key":">"} +{"Key":"g"} +{"Key":"v"} +{"Get":{"state":"« hello\n ˇ» world\n","mode":"Visual"}} From 84f2e0ee375a0cd006061a735c75bc90979bc944 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:36:32 +0200 Subject: [PATCH 187/270] Use buffer font in the terminal inline assistant (#18009) This PR is a follow up to https://github.com/zed-industries/zed/pull/17875. Release Notes: - N/A --- crates/assistant/src/terminal_inline_assistant.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index d5c085b646..06661944d9 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -570,7 +570,7 @@ impl Render for PromptEditor { .bg(cx.theme().colors().editor_background) .border_y_1() .border_color(cx.theme().status().info_border) - .py_1p5() + .py_2() .h_full() .w_full() .on_action(cx.listener(Self::confirm)) @@ -949,12 +949,11 @@ impl PromptEditor { } else { cx.theme().colors().text }, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), - font_weight: settings.ui_font.weight, - line_height: relative(1.3), + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size.into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), ..Default::default() }; EditorElement::new( From 425c8f8c3e8a9cc0cb9985e403ed26a1286c4a10 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 18 Sep 2024 10:42:17 -0400 Subject: [PATCH 188/270] Alphabetize actions (#18007) Drive-by maintenance PR while working on another PR. Release Notes: - N/A --- crates/editor/src/actions.rs | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 5811ec7b92..93c83af195 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -156,14 +156,14 @@ pub struct DeleteToPreviousWordStart { impl_actions!( editor, [ + ComposeCompletion, ConfirmCodeAction, ConfirmCompletion, - ComposeCompletion, DeleteToNextWordEnd, DeleteToPreviousWordStart, ExpandExcerpts, - ExpandExcerptsUp, ExpandExcerptsDown, + ExpandExcerptsUp, FoldAt, HandleInput, MoveDownByLines, @@ -188,8 +188,8 @@ impl_actions!( gpui::actions!( editor, [ - AcceptPartialCopilotSuggestion, AcceptInlineCompletion, + AcceptPartialCopilotSuggestion, AcceptPartialInlineCompletion, AddSelectionAbove, AddSelectionBelow, @@ -210,10 +210,10 @@ gpui::actions!( ConvertToUpperCamelCase, ConvertToUpperCase, Copy, + CopyFileLocation, CopyHighlightJson, CopyPath, CopyPermalinkToLine, - CopyFileLocation, CopyRelativePath, Cut, CutToEndOfLine, @@ -232,10 +232,10 @@ gpui::actions!( Fold, FoldSelectedRanges, Format, - GoToDefinition, - GoToDefinitionSplit, GoToDeclaration, GoToDeclarationSplit, + GoToDefinition, + GoToDefinitionSplit, GoToDiagnostic, GoToHunk, GoToImplementation, @@ -273,9 +273,9 @@ gpui::actions!( NextScreen, OpenExcerpts, OpenExcerptsSplit, + OpenFile, OpenPermalinkToLine, OpenUrl, - OpenFile, Outdent, PageDown, PageUp, @@ -284,23 +284,25 @@ gpui::actions!( Redo, RedoSelection, Rename, - Rewrap, RestartLanguageServer, RevealInFileManager, ReverseLines, RevertFile, RevertSelectedHunks, + Rewrap, ScrollCursorBottom, ScrollCursorCenter, - ScrollCursorTop, ScrollCursorCenterTopBottom, + ScrollCursorTop, SelectAll, SelectAllMatches, SelectDown, - SelectLargerSyntaxNode, SelectEnclosingSymbol, + SelectLargerSyntaxNode, SelectLeft, SelectLine, + SelectPageDown, + SelectPageUp, SelectRight, SelectSmallerSyntaxNode, SelectToBeginning, @@ -312,8 +314,6 @@ gpui::actions!( SelectToPreviousWordStart, SelectToStartOfParagraph, SelectUp, - SelectPageDown, - SelectPageUp, ShowCharacterPalette, ShowInlineCompletion, ShowSignatureHelp, @@ -327,13 +327,13 @@ gpui::actions!( ToggleAutoSignatureHelp, ToggleGitBlame, ToggleGitBlameInline, - ToggleSelectionMenu, ToggleHunkDiff, + ToggleIndentGuides, ToggleInlayHints, ToggleInlineCompletions, ToggleLineNumbers, ToggleRelativeLineNumbers, - ToggleIndentGuides, + ToggleSelectionMenu, ToggleSoftWrap, ToggleTabBar, Transpose, From 373a17acf413c293e445bbb7b266dc970079a7ba Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 11:11:38 -0400 Subject: [PATCH 189/270] Add ability to display backgrounds for inlay hints (#18010) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds the ability to display backgrounds for inlay hints within the editor. This is controlled by the new `inlay_hints.show_background` setting. This setting defaults to `false`. To enable the setting, add the following to your `settings.json`: ```json { "inlay_hints": { "enabled": true, "show_background": true } } ``` When enabled, the inlay hint backgrounds will use the `hint.background` color from the theme. | Disabled | Enabled | | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | | Screenshot 2024-09-17 at 4 21 53 PM | Screenshot 2024-09-17 at 4 21 43 PM | Related issues: - #12485 - #17392 Release Notes: - Added an `inlay_hints.show_background` setting to allow displaying backgrounds for inlay hints in the editor. - This setting defaults to `false`. - If enabled, the inlay hint backgrounds will use the `hint.background` color from the theme. --- assets/settings/default.json | 4 ++++ crates/assistant/src/prompt_library.rs | 6 ++---- crates/collab/src/tests/editor_tests.rs | 4 ++++ crates/editor/src/editor.rs | 21 +++++++++++++++------ crates/editor/src/hover_links.rs | 1 + crates/editor/src/hover_popover.rs | 1 + crates/editor/src/inlay_hint_cache.rs | 15 +++++++++++++++ crates/language/src/language_settings.rs | 8 ++++++++ docs/src/configuring-zed.md | 1 + 9 files changed, 51 insertions(+), 10 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 35b2ca20f2..a9e1865258 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -318,6 +318,10 @@ "show_parameter_hints": true, // Corresponds to null/None LSP hint type value. "show_other_hints": true, + // Whether to show a background for inlay hints. + // + // If set to `true`, the background will use the `hint.background` color from the current theme. + "show_background": false, // Time to wait after editing the buffer, before requesting the hints, // set to 0 to disable debouncing. "edit_debounce_ms": 700, diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs index c99a7c1521..76ee95d507 100644 --- a/crates/assistant/src/prompt_library.rs +++ b/crates/assistant/src/prompt_library.rs @@ -921,10 +921,8 @@ impl PromptLibrary { scrollbar_width: Pixels::ZERO, syntax: cx.theme().syntax().clone(), status: cx.theme().status().clone(), - inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), - ..HighlightStyle::default() - }, + inlay_hints_style: + editor::make_inlay_hints_style(cx), suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), ..HighlightStyle::default() diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index de03144774..7fb1a49f87 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1524,6 +1524,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( show_type_hints: true, show_parameter_hints: false, show_other_hints: true, + show_background: false, }) }); }); @@ -1538,6 +1539,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( show_type_hints: true, show_parameter_hints: false, show_other_hints: true, + show_background: false, }) }); }); @@ -1786,6 +1788,7 @@ async fn test_inlay_hint_refresh_is_forwarded( show_type_hints: false, show_parameter_hints: false, show_other_hints: false, + show_background: false, }) }); }); @@ -1800,6 +1803,7 @@ async fn test_inlay_hint_refresh_is_forwarded( show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); }); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 61a59665c1..f797f82832 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -412,6 +412,19 @@ impl Default for EditorStyle { } } +pub fn make_inlay_hints_style(cx: &WindowContext) -> HighlightStyle { + let show_background = all_language_settings(None, cx) + .language(None) + .inlay_hints + .show_background; + + HighlightStyle { + color: Some(cx.theme().status().hint), + background_color: show_background.then(|| cx.theme().status().hint_background), + ..HighlightStyle::default() + } +} + type CompletionId = usize; #[derive(Clone, Debug)] @@ -10034,9 +10047,8 @@ impl Editor { syntax: cx.editor_style.syntax.clone(), status: cx.editor_style.status.clone(), inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), font_weight: Some(FontWeight::BOLD), - ..HighlightStyle::default() + ..make_inlay_hints_style(cx) }, suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), @@ -12992,10 +13004,7 @@ impl Render for Editor { scrollbar_width: EditorElement::SCROLLBAR_WIDTH, syntax: cx.theme().syntax().clone(), status: cx.theme().status().clone(), - inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), - ..HighlightStyle::default() - }, + inlay_hints_style: make_inlay_hints_style(cx), suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), ..HighlightStyle::default() diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 3f590273df..ac30b91996 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -1205,6 +1205,7 @@ mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index adbb5899ff..f6eb837ae8 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1337,6 +1337,7 @@ mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 62c5cde9d8..24ccf64c4c 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -1296,6 +1296,7 @@ pub mod tests { show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)), show_other_hints: allowed_hint_kinds.contains(&None), + show_background: false, }) }); @@ -1428,6 +1429,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -1547,6 +1549,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -1777,6 +1780,7 @@ pub mod tests { show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)), show_other_hints: allowed_hint_kinds.contains(&None), + show_background: false, }) }); @@ -1941,6 +1945,7 @@ pub mod tests { show_parameter_hints: new_allowed_hint_kinds .contains(&Some(InlayHintKind::Parameter)), show_other_hints: new_allowed_hint_kinds.contains(&None), + show_background: false, }) }); cx.executor().run_until_parked(); @@ -1987,6 +1992,7 @@ pub mod tests { show_parameter_hints: another_allowed_hint_kinds .contains(&Some(InlayHintKind::Parameter)), show_other_hints: another_allowed_hint_kinds.contains(&None), + show_background: false, }) }); cx.executor().run_until_parked(); @@ -2047,6 +2053,7 @@ pub mod tests { show_parameter_hints: final_allowed_hint_kinds .contains(&Some(InlayHintKind::Parameter)), show_other_hints: final_allowed_hint_kinds.contains(&None), + show_background: false, }) }); cx.executor().run_until_parked(); @@ -2122,6 +2129,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -2256,6 +2264,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -2551,6 +2560,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -2902,6 +2912,7 @@ pub mod tests { show_type_hints: false, show_parameter_hints: false, show_other_hints: false, + show_background: false, }) }); @@ -3096,6 +3107,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); cx.executor().run_until_parked(); @@ -3131,6 +3143,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -3225,6 +3238,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -3305,6 +3319,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); cx.executor().run_until_parked(); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index b465173cee..77c9a1d18c 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -741,6 +741,14 @@ pub struct InlayHintSettings { /// Default: true #[serde(default = "default_true")] pub show_other_hints: bool, + /// Whether to show a background for inlay hints. + /// + /// If set to `true`, the background will use the `hint.background` color + /// from the current theme. + /// + /// Default: false + #[serde(default)] + pub show_background: bool, /// Whether or not to debounce inlay hints updates after buffer edits. /// /// Set to 0 to disable debouncing. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 87cd053f1a..382c33c216 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -982,6 +982,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files "show_type_hints": true, "show_parameter_hints": true, "show_other_hints": true, + "show_background": false, "edit_debounce_ms": 700, "scroll_debounce_ms": 50 } From a7977aa64dc8a4c600eb167d645de880e9964d68 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 17:18:56 +0200 Subject: [PATCH 190/270] Tweak multibuffer header padding (#18011) --- crates/editor/src/element.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7e2b3cc63f..47107b9754 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2079,13 +2079,13 @@ impl EditorElement { .id(("path excerpt header", EntityId::from(block_id))) .w_full() .px(header_padding) + .pt(header_padding) .child( h_flex() .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) .id("path header block") .h(2. * cx.line_height()) - .pl(gpui::px(12.)) - .pr(gpui::px(8.)) + .px(gpui::px(12.)) .rounded_md() .shadow_md() .border_1() From eda7e88fd4a9e500c951344f6ddd659ee13ca0fc Mon Sep 17 00:00:00 2001 From: Marek Fajkus Date: Wed, 18 Sep 2024 18:51:11 +0200 Subject: [PATCH 191/270] nix: Fix (potential) glibc errors in dev shell (#17974) Previously the rustc and cargo did were not declared dependencies supplied to devshell. This means that shell relied some impure cargo and rustc version found in the system. This lead to issues with GLIBC version on systems which have different GLIBC version globally. This package exposes nixpkgs rustc and cargo version into the shell preventing issues with incompatibility. Release Notes: - N/A --- nix/shell.nix | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nix/shell.nix b/nix/shell.nix index 03e298e132..476374b67e 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -20,6 +20,8 @@ in wayland xorg.libxcb vulkan-loader + rustc + cargo ]; in pkgs.mkShell.override {inherit stdenv;} { @@ -36,10 +38,7 @@ in inherit buildInputs; shellHook = '' - export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath ([ - pkgs.vulkan-loader - ] - ++ buildInputs)}:$LD_LIBRARY_PATH" + export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath buildInputs}:$LD_LIBRARY_PATH" export PROTOC="${pkgs.protobuf}/bin/protoc" ''; From 826777a257b8e43900fd1d8cbf7cee3757bbe765 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 11:15:46 -0600 Subject: [PATCH 192/270] Tidy up LSP (#17973) Release Notes: - N/A --- crates/extension/src/extension_lsp_adapter.rs | 2 +- crates/language/src/language.rs | 10 +++++++--- crates/language/src/language_registry.rs | 12 +++++------- crates/project/src/lsp_store.rs | 11 ----------- crates/remote_server/src/headless_project.rs | 6 +----- 5 files changed, 14 insertions(+), 27 deletions(-) diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index f82b6c9e0e..d6125241f1 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -38,7 +38,7 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, + _: Option>, delegate: Arc, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7901a49d00..309a67a1a9 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -208,7 +208,7 @@ impl CachedLspAdapter { pub async fn get_language_server_command( self: Arc, - container_dir: Arc, + container_dir: Option>, delegate: Arc, cx: &mut AsyncAppContext, ) -> Result { @@ -294,7 +294,7 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, - container_dir: Arc, + container_dir: Option>, delegate: Arc, mut cached_binary: futures::lock::MutexGuard<'a, Option>, cx: &'a mut AsyncAppContext, @@ -325,6 +325,10 @@ pub trait LspAdapter: 'static + Send + Sync { return Ok(cached_binary.clone()); } + let Some(container_dir) = container_dir else { + anyhow::bail!("cannot download language servers for remotes (yet)") + }; + if !container_dir.exists() { smol::fs::create_dir_all(&container_dir) .await @@ -1664,7 +1668,7 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, + _: Option>, _: Arc, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 918da4873f..17ebef50e8 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -869,12 +869,10 @@ impl LanguageRegistry { adapter.name.0 ); - let download_dir = &self - .language_server_download_dir - .clone() - .ok_or_else(|| anyhow!("language server download directory has not been assigned before starting server")) - .log_err()?; - let container_dir: Arc = Arc::from(download_dir.join(adapter.name.0.as_ref())); + let container_dir: Option> = self + .language_server_download_dir + .as_ref() + .map(|dir| Arc::from(dir.join(adapter.name.0.as_ref()))); let root_path = root_path.clone(); let this = Arc::downgrade(self); @@ -969,7 +967,7 @@ impl LanguageRegistry { Some(PendingLanguageServer { server_id, task, - container_dir: Some(container_dir), + container_dir, }) } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 24852afd70..6dd528147b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4742,17 +4742,6 @@ impl LspStore { .reorder_language_servers(&language, enabled_lsp_adapters); } - /* - ssh client owns the lifecycle of the language servers - ssh host actually runs the binaries - - in the future: ssh client will use the local extensions to get the downloads etc. - and send them up over the ssh connection (but today) we'll just the static config - - languages::() <-- registers lsp adapters - on the ssh host we won't have adapters for the LSPs - */ - fn start_language_server_on_ssh_host( &mut self, worktree: &Model, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ec26bddfc3..bbd82281d8 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -42,11 +42,7 @@ impl HeadlessProject { } pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { - let mut languages = LanguageRegistry::new(cx.background_executor().clone()); - languages - .set_language_server_download_dir(PathBuf::from("/Users/conrad/what-could-go-wrong")); - - let languages = Arc::new(languages); + let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); let buffer_store = cx.new_model(|cx| { From fb7a7a564a54aa12f0f97a9dd36bf9bf30c16807 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 11:15:54 -0600 Subject: [PATCH 193/270] ssh remoting: open settings locally (#18020) Release Notes: - ssh remoting: Open settings files in a non-remote window. --- crates/workspace/src/workspace.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a7c63c57f6..98ac49992d 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1578,7 +1578,7 @@ impl Workspace { T: 'static, F: 'static + FnOnce(&mut Workspace, &mut ViewContext) -> T, { - if self.project.read(cx).is_local_or_ssh() { + if self.project.read(cx).is_local() { Task::Ready(Some(Ok(callback(self, cx)))) } else { let env = self.project.read(cx).cli_environment(cx); From 772bda54a21079be9c6019f8a8836576add71d8c Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 13:35:55 -0400 Subject: [PATCH 194/270] Move remaining self-hosted jobs to BuildJet (#18018) --- .github/workflows/bump_patch_version.yml | 3 +-- .github/workflows/deploy_collab.yml | 6 ++---- .github/workflows/randomized_tests.yml | 3 +-- .github/workflows/release_nightly.yml | 3 +-- 4 files changed, 5 insertions(+), 10 deletions(-) diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index e2789a7da7..d05da31e6a 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -15,8 +15,7 @@ concurrency: jobs: bump_patch_version: runs-on: - - self-hosted - - test + - buildjet-16vcpu-ubuntu-2204 steps: - name: Checkout code uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 6801be2a54..7abd52e5a6 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,8 +61,7 @@ jobs: - style - tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -89,8 +88,7 @@ jobs: needs: - publish runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - name: Sign into Kubernetes diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index 8b628fe5a2..57f43d4961 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -19,8 +19,7 @@ jobs: tests: name: Run randomized tests runs-on: - - self-hosted - - randomized-tests + - buildjet-16vcpu-ubuntu-2204 steps: - name: Install Node uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index d8e6b6d919..450c63b82f 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -97,8 +97,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 needs: tests env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} From 97dc1d193f9a4b7cc90a63c7cc01870dbf05db79 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 14:24:09 -0400 Subject: [PATCH 195/270] Use `@tag.doctype` for HTML doctype highlights (#18024) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the following extensions to use the `@tag.doctype` selector for highlighting HTML doctypes: - Astro - Elixir (HEEx) - HTML Additionally, it also changes the base selector for HTML tags from `@keyword` to `@tag`. | Before | After | | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | | Screenshot 2024-09-18 at 2 04 41 PM | Screenshot 2024-09-18 at 2 05 00 PM | Extracted this from https://github.com/zed-industries/zed/pull/16723. Release Notes: - N/A --------- Co-authored-by: 狐狸 <134658521+Huliiiiii@users.noreply.github.com> --- docs/src/extensions/languages.md | 1 + extensions/astro/languages/astro/highlights.scm | 2 +- extensions/elixir/languages/heex/highlights.scm | 2 +- extensions/html/languages/html/highlights.scm | 4 ++-- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index c003285303..174a27c6e6 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -123,6 +123,7 @@ This query marks strings, object keys, and numbers for highlighting. The followi | @string.special | Captures special strings | | @string.special.symbol | Captures special symbols | | @tag | Captures tags | +| @tag.doctype | Captures doctypes (e.g., in HTML) | | @text.literal | Captures literal text | | @title | Captures titles | | @type | Captures types | diff --git a/extensions/astro/languages/astro/highlights.scm b/extensions/astro/languages/astro/highlights.scm index 491e8cc337..a565e22b6e 100644 --- a/extensions/astro/languages/astro/highlights.scm +++ b/extensions/astro/languages/astro/highlights.scm @@ -1,6 +1,6 @@ (tag_name) @tag (erroneous_end_tag_name) @keyword -(doctype) @constant +(doctype) @tag.doctype (attribute_name) @property (attribute_value) @string (comment) @comment diff --git a/extensions/elixir/languages/heex/highlights.scm b/extensions/elixir/languages/heex/highlights.scm index 5252b71fac..9662c95524 100644 --- a/extensions/elixir/languages/heex/highlights.scm +++ b/extensions/elixir/languages/heex/highlights.scm @@ -27,7 +27,7 @@ "=" @operator ; HEEx inherits the DOCTYPE tag from HTML -(doctype) @constant +(doctype) @tag.doctype (comment) @comment diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index e2b8e35bf4..6bb0c23374 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,6 +1,6 @@ -(tag_name) @keyword +(tag_name) @tag (erroneous_end_tag_name) @keyword -(doctype) @constant +(doctype) @tag.doctype (attribute_name) @property (attribute_value) @string (comment) @comment From 30ef7e62bfff3b2e4c5cdbeead502d5a0814c83e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 14:28:00 -0400 Subject: [PATCH 196/270] Fix arm buildjet (#18023) Run `apt-get update` before `apt-get install` on Linux. Hopefully will fix building on Linux Arm. --- script/linux | 1 + 1 file changed, 1 insertion(+) diff --git a/script/linux b/script/linux index d894d33ea8..eca3bf7f7d 100755 --- a/script/linux +++ b/script/linux @@ -33,6 +33,7 @@ if [[ -n $apt ]]; then elfutils libsqlite3-dev ) + $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" exit 0 fi From 71b6f739cdce3303ba129e41673cb6e38044f279 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:08:19 -0400 Subject: [PATCH 197/270] Pin actions/checkout action to 692973e (#18030) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/checkout](https://redirect.github.com/actions/checkout) | action | pinDigest | -> `692973e` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/bump_nightly_tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/bump_nightly_tag.yml b/.github/workflows/bump_nightly_tag.yml index 54a3970a1c..0959ae9677 100644 --- a/.github/workflows/bump_nightly_tag.yml +++ b/.github/workflows/bump_nightly_tag.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 with: fetch-depth: 0 From 97f5fcf8e6a42c07d0b12982030b701246ac3d65 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 15:18:29 -0400 Subject: [PATCH 198/270] Fix nightly linux x86 build (#18029) Makes our nightly script for Linux x86 (broken) match the steps for Linux ARM (working). --- .github/workflows/release_nightly.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 450c63b82f..17db66a264 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -113,6 +113,12 @@ jobs: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install Linux dependencies + run: ./script/linux + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 100 + - name: Set release channel to nightly run: | set -euo pipefail From 9016de5d6350e0a9bbf6a51076c04acd9b7fba96 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:56:40 -0400 Subject: [PATCH 199/270] Update Rust crate anyhow to v1.0.89 (#18031) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [anyhow](https://redirect.github.com/dtolnay/anyhow) | workspace.dependencies | patch | `1.0.86` -> `1.0.89` | --- ### Release Notes
dtolnay/anyhow (anyhow) ### [`v1.0.89`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.89) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.88...1.0.89) - Make anyhow::Error's `UnwindSafe` and `RefUnwindSafe` impl consistently available between versions of Rust newer and older than 1.72 ([#​386](https://redirect.github.com/dtolnay/anyhow/issues/386)) ### [`v1.0.88`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.88) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.87...1.0.88) - Documentation improvements ### [`v1.0.87`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.87) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.86...1.0.87) - Support more APIs, including `Error::new` and `Error::chain`, in no-std mode on Rust 1.81+ ([#​383](https://redirect.github.com/dtolnay/anyhow/issues/383))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 930415440b..0640aff19c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,9 +263,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "approx" From 2c8a6ee7cc18cb8b3e29fa4c7efa74dde8458f4f Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 18 Sep 2024 23:29:34 +0200 Subject: [PATCH 200/270] remote_server: Remove dependency on libssl and libcrypto (#15446) Fixes: #15599 Release Notes: - N/A --------- Co-authored-by: Mikayla Co-authored-by: Conrad --- Cargo.lock | 176 ++++++++++++++--- Cargo.toml | 7 + crates/auto_update/Cargo.toml | 1 - crates/auto_update/src/auto_update.rs | 3 +- crates/client/Cargo.toml | 4 +- crates/client/src/client.rs | 42 ++-- crates/collab/Cargo.toml | 1 + crates/collab/src/llm.rs | 3 +- crates/collab/src/rpc.rs | 8 +- crates/evals/Cargo.toml | 1 + crates/evals/src/eval.rs | 22 ++- crates/extension/Cargo.toml | 1 + crates/extension/src/extension_builder.rs | 2 + crates/extension/src/extension_store.rs | 13 +- crates/extension/src/extension_store_test.rs | 24 ++- crates/extension_cli/Cargo.toml | 2 +- crates/extension_cli/src/main.rs | 10 +- crates/git_hosting_providers/Cargo.toml | 1 - .../src/providers/codeberg.rs | 10 +- .../src/providers/github.rs | 10 +- crates/gpui/Cargo.toml | 6 +- crates/gpui/examples/image/image.rs | 1 + crates/gpui/src/app.rs | 31 ++- crates/gpui/src/elements/img.rs | 7 +- crates/gpui/src/gpui.rs | 1 + crates/http_client/Cargo.toml | 5 +- crates/http_client/src/async_body.rs | 109 +++++++++++ crates/http_client/src/github.rs | 5 +- crates/http_client/src/http_client.rs | 179 +++++++++--------- crates/isahc_http_client/Cargo.toml | 22 +++ crates/isahc_http_client/LICENSE-APACHE | 1 + .../src/isahc_http_client.rs | 93 +++++++++ crates/ollama/Cargo.toml | 1 - crates/ollama/src/ollama.rs | 31 ++- crates/project/src/lsp_store.rs | 12 +- crates/semantic_index/Cargo.toml | 1 + crates/semantic_index/examples/index.rs | 7 +- crates/semantic_index/src/embedding/ollama.rs | 2 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 31 ++- script/bundle-linux | 9 +- 41 files changed, 670 insertions(+), 226 deletions(-) create mode 100644 crates/http_client/src/async_body.rs create mode 100644 crates/isahc_http_client/Cargo.toml create mode 120000 crates/isahc_http_client/LICENSE-APACHE create mode 100644 crates/isahc_http_client/src/isahc_http_client.rs diff --git a/Cargo.lock b/Cargo.lock index 0640aff19c..652c584fd5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -876,6 +876,20 @@ version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" +[[package]] +name = "async-tls" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" +dependencies = [ + "futures-core", + "futures-io", + "rustls 0.20.9", + "rustls-pemfile 1.0.4", + "webpki", + "webpki-roots 0.22.6", +] + [[package]] name = "async-trait" version = "0.1.81" @@ -893,8 +907,8 @@ version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" dependencies = [ - "async-native-tls", "async-std", + "async-tls", "futures-io", "futures-util", "log", @@ -981,7 +995,6 @@ dependencies = [ "editor", "gpui", "http_client", - "isahc", "log", "markdown_preview", "menu", @@ -1049,7 +1062,7 @@ dependencies = [ "fastrand 2.1.1", "hex", "http 0.2.12", - "ring", + "ring 0.17.8", "time", "tokio", "tracing", @@ -1218,7 +1231,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring", + "ring 0.17.8", "sha2", "subtle", "time", @@ -1331,7 +1344,7 @@ dependencies = [ "once_cell", "pin-project-lite", "pin-utils", - "rustls", + "rustls 0.21.12", "tokio", "tracing", ] @@ -2405,6 +2418,8 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", + "rustls 0.20.9", + "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2553,6 +2568,7 @@ dependencies = [ "http_client", "hyper", "indoc", + "isahc_http_client", "jsonwebtoken", "language", "language_model", @@ -4015,6 +4031,7 @@ dependencies = [ "git", "gpui", "http_client", + "isahc_http_client", "language", "languages", "node_runtime", @@ -4110,6 +4127,7 @@ dependencies = [ "http_client", "indexed_docs", "isahc", + "isahc_http_client", "language", "log", "lsp", @@ -4148,7 +4166,7 @@ dependencies = [ "env_logger", "extension", "fs", - "http_client", + "isahc_http_client", "language", "log", "rpc", @@ -4395,7 +4413,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin", + "spin 0.9.8", ] [[package]] @@ -4904,7 +4922,6 @@ dependencies = [ "git", "gpui", "http_client", - "isahc", "pretty_assertions", "regex", "serde", @@ -5537,12 +5554,11 @@ dependencies = [ "anyhow", "derive_more", "futures 0.3.30", - "futures-lite 1.13.0", - "http 1.1.0", - "isahc", + "http 0.2.12", "log", "serde", "serde_json", + "smol", "url", ] @@ -5604,8 +5620,8 @@ dependencies = [ "http 0.2.12", "hyper", "log", - "rustls", - "rustls-native-certs", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", "tokio", "tokio-rustls", ] @@ -6017,6 +6033,17 @@ dependencies = [ "waker-fn", ] +[[package]] +name = "isahc_http_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.30", + "http_client", + "isahc", + "util", +] + [[package]] name = "itertools" version = "0.10.5" @@ -6121,7 +6148,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring", + "ring 0.17.8", "serde", "serde_json", "simple_asn1", @@ -6372,7 +6399,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -7483,7 +7510,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -9175,7 +9201,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", @@ -9239,6 +9265,21 @@ dependencies = [ "util", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.8" @@ -9249,8 +9290,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin", - "untrusted", + "spin 0.9.8", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -9406,7 +9447,7 @@ dependencies = [ "futures 0.3.30", "glob", "rand 0.8.5", - "ring", + "ring 0.17.8", "serde", "serde_json", "shellexpand 3.1.0", @@ -9527,6 +9568,18 @@ dependencies = [ "rustix 0.38.35", ] +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + [[package]] name = "rustls" version = "0.21.12" @@ -9534,7 +9587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring", + "ring 0.17.8", "rustls-webpki", "sct", ] @@ -9546,7 +9599,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.3", + "rustls-pki-types", "schannel", "security-framework", ] @@ -9560,14 +9626,30 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + [[package]] name = "rustls-webpki" version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9681,8 +9763,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9878,6 +9960,7 @@ dependencies = [ "gpui", "heed", "http_client", + "isahc_http_client", "language", "language_model", "languages", @@ -10437,6 +10520,12 @@ dependencies = [ "smallvec", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -10559,8 +10648,8 @@ dependencies = [ "paste", "percent-encoding", "rust_decimal", - "rustls", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "sha2", @@ -10573,7 +10662,7 @@ dependencies = [ "tracing", "url", "uuid", - "webpki-roots", + "webpki-roots 0.25.4", ] [[package]] @@ -11705,7 +11794,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", "tokio", ] @@ -12232,7 +12321,6 @@ dependencies = [ "http 0.2.12", "httparse", "log", - "native-tls", "rand 0.8.5", "sha1", "thiserror", @@ -12417,6 +12505,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -13271,6 +13365,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + [[package]] name = "webpki-roots" version = "0.25.4" @@ -14305,6 +14418,7 @@ dependencies = [ "inline_completion_button", "install_cli", "isahc", + "isahc_http_client", "journal", "language", "language_model", diff --git a/Cargo.toml b/Cargo.toml index ec3138179b..2071fdcb6f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,6 +52,7 @@ members = [ "crates/indexed_docs", "crates/inline_completion_button", "crates/install_cli", + "crates/isahc_http_client", "crates/journal", "crates/language", "crates/language_model", @@ -173,6 +174,9 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] + + + # # Workspace member crates # @@ -212,6 +216,7 @@ file_icons = { path = "crates/file_icons" } fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } +isahc_http_client = { path = "crates/isahc_http_client" } git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } @@ -394,6 +399,8 @@ runtimelib = { version = "0.15", default-features = false, features = [ ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } +rustls = "0.20.3" +rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 12e669780d..1e08c9a768 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -19,7 +19,6 @@ db.workspace = true editor.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true log.workspace = true markdown_preview.workspace = true menu.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index c0863e41d1..cfda6d6e58 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -9,7 +9,6 @@ use gpui::{ actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext, SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext, }; -use isahc::AsyncBody; use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; use schemars::JsonSchema; @@ -20,7 +19,7 @@ use smol::{fs, io::AsyncReadExt}; use settings::{Settings, SettingsSources, SettingsStore}; use smol::{fs::File, process::Command}; -use http_client::{HttpClient, HttpClientWithUrl}; +use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use std::{ env::{ diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 82237ebaa5..8ae4f15c97 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,7 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" -async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] } +async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true collections.workspace = true @@ -35,6 +35,8 @@ postage.workspace = true rand.workspace = true release_channel.workspace = true rpc = { workspace = true, features = ["gpui"] } +rustls.workspace = true +rustls-native-certs.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 6e1362c43e..09286300d9 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -240,8 +240,6 @@ pub enum EstablishConnectionError { #[error("{0}")] Other(#[from] anyhow::Error), #[error("{0}")] - Http(#[from] http_client::Error), - #[error("{0}")] InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue), #[error("{0}")] Io(#[from] std::io::Error), @@ -529,19 +527,13 @@ impl Client { } pub fn production(cx: &mut AppContext) -> Arc { - let user_agent = format!( - "Zed/{} ({}; {})", - AppVersion::global(cx), - std::env::consts::OS, - std::env::consts::ARCH - ); let clock = Arc::new(clock::RealSystemClock); - let http = Arc::new(HttpClientWithUrl::new( + let http = Arc::new(HttpClientWithUrl::new_uri( + cx.http_client(), &ClientSettings::get_global(cx).server_url, - Some(user_agent), - ProxySettings::get_global(cx).proxy.clone(), + cx.http_client().proxy().cloned(), )); - Self::new(clock, http.clone(), cx) + Self::new(clock, http, cx) } pub fn id(&self) -> u64 { @@ -1145,8 +1137,32 @@ impl Client { match url_scheme { Https => { + let client_config = { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates( + &root_certs + .certs + .into_iter() + .map(|cert| cert.as_ref().to_owned()) + .collect::>(), + ); + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth() + }; let (stream, _) = - async_tungstenite::async_std::client_async_tls(request, stream).await?; + async_tungstenite::async_tls::client_async_tls_with_connector( + request, + stream, + Some(client_config.into()), + ) + .await?; Ok(Connection::new( stream .map_err(|error| anyhow!(error)) diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index f8ba847ab2..296809158d 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -36,6 +36,7 @@ envy = "0.4.2" futures.workspace = true google_ai.workspace = true hex.workspace = true +isahc_http_client.workspace = true http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index def4499ae4..53f0bfdfd0 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -22,7 +22,7 @@ use chrono::{DateTime, Duration, Utc}; use collections::HashMap; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; -use http_client::IsahcHttpClient; +use isahc_http_client::IsahcHttpClient; use rpc::ListModelsResponse; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, @@ -72,6 +72,7 @@ impl LlmState { let http_client = IsahcHttpClient::builder() .default_header("User-Agent", user_agent) .build() + .map(IsahcHttpClient::from) .context("failed to construct http client")?; let this = Self { diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 4146eafb87..b2a694027a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -35,6 +35,8 @@ use chrono::Utc; use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; +use http_client::HttpClient; +use isahc_http_client::IsahcHttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -45,7 +47,6 @@ use futures::{ stream::FuturesUnordered, FutureExt, SinkExt, StreamExt, TryStreamExt, }; -use http_client::IsahcHttpClient; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ @@ -139,7 +140,7 @@ struct Session { connection_pool: Arc>, app_state: Arc, supermaven_client: Option>, - http_client: Arc, + http_client: Arc, /// The GeoIP country code for the user. #[allow(unused)] geoip_country_code: Option, @@ -955,9 +956,10 @@ impl Server { tracing::info!("connection opened"); + let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { - Ok(http_client) => Arc::new(http_client), + Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); return; diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index e680e4f504..400ab139aa 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -24,6 +24,7 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true +isahc_http_client.workspace = true language.workspace = true languages.workspace = true http_client.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index d7e63fafbf..751dcd09aa 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -97,13 +97,14 @@ fn main() -> Result<()> { gpui::App::headless().run(move |cx| { let executor = cx.background_executor().clone(); - + let client = isahc_http_client::IsahcHttpClient::new(None, None); + cx.set_http_client(client.clone()); match cli.command { Commands::Fetch {} => { executor .clone() .spawn(async move { - if let Err(err) = fetch_evaluation_resources(&executor).await { + if let Err(err) = fetch_evaluation_resources(client, &executor).await { eprintln!("Error: {}", err); exit(1); } @@ -127,10 +128,12 @@ fn main() -> Result<()> { Ok(()) } -async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> { - let http_client = http_client::HttpClientWithProxy::new(None, None); - fetch_code_search_net_resources(&http_client).await?; - fetch_eval_repos(executor, &http_client).await?; +async fn fetch_evaluation_resources( + http_client: Arc, + executor: &BackgroundExecutor, +) -> Result<()> { + fetch_code_search_net_resources(&*http_client).await?; + fetch_eval_repos(executor, &*http_client).await?; Ok(()) } @@ -239,6 +242,7 @@ async fn run_evaluation( executor: &BackgroundExecutor, cx: &mut AsyncAppContext, ) -> Result<()> { + let mut http_client = None; cx.update(|cx| { let mut store = SettingsStore::new(cx); store @@ -248,15 +252,15 @@ async fn run_evaluation( client::init_settings(cx); language::init(cx); Project::init_settings(cx); + http_client = Some(cx.http_client()); cx.update_flags(false, vec![]); }) .unwrap(); - + let http_client = http_client.unwrap(); let dataset_dir = Path::new(CODESEARCH_NET_DIR); let evaluations_path = dataset_dir.join("evaluations.json"); let repos_dir = Path::new(EVAL_REPOS_DIR); let db_path = Path::new(EVAL_DB_PATH); - let http_client = http_client::HttpClientWithProxy::new(None, None); let api_key = std::env::var("OPENAI_API_KEY").unwrap(); let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc; @@ -266,9 +270,9 @@ async fn run_evaluation( Client::new( clock, Arc::new(http_client::HttpClientWithUrl::new( + http_client.clone(), "https://zed.dev", None, - None, )), cx, ) diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 0371b1866d..edf6184d38 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -57,6 +57,7 @@ task.workspace = true serde_json_lenient.workspace = true [dev-dependencies] +isahc_http_client.workspace = true ctor.workspace = true env_logger.workspace = true parking_lot.workspace = true diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index e42929f78e..7380e699f9 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -246,6 +246,7 @@ impl ExtensionBuilder { .args(scanner_path.exists().then_some(scanner_path)) .output() .context("failed to run clang")?; + if !clang_output.status.success() { bail!( "failed to compile {} parser with clang: {}", @@ -431,6 +432,7 @@ impl ExtensionBuilder { let body = BufReader::new(response.body_mut()); let body = GzipDecoder::new(body); let tar = Archive::new(body); + tar.unpack(&tar_out_dir) .await .context("failed to unpack wasi-sdk archive")?; diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 3ebc4f20d3..bd416f4029 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -190,6 +190,7 @@ pub fn init( None, fs, client.http_client().clone(), + client.http_client().clone(), Some(client.telemetry().clone()), node_runtime, language_registry, @@ -225,6 +226,7 @@ impl ExtensionStore { build_dir: Option, fs: Arc, http_client: Arc, + builder_client: Arc, telemetry: Option>, node_runtime: Arc, language_registry: Arc, @@ -244,12 +246,7 @@ impl ExtensionStore { extension_index: Default::default(), installed_dir, index_path, - builder: Arc::new(ExtensionBuilder::new( - // Construct a real HTTP client for the extension builder, as we - // don't want to use a fake one in the tests. - ::http_client::client(None, http_client.proxy().cloned()), - build_dir, - )), + builder: Arc::new(ExtensionBuilder::new(builder_client, build_dir)), outstanding_operations: Default::default(), modified_extensions: Default::default(), reload_complete_senders: Vec::new(), @@ -830,7 +827,6 @@ impl ExtensionStore { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_source_path).await?; let extension_id = extension_manifest.id.clone(); - if !this.update(&mut cx, |this, cx| { match this.outstanding_operations.entry(extension_id.clone()) { btree_map::Entry::Occupied(_) => return false, @@ -854,7 +850,6 @@ impl ExtensionStore { .ok(); } }); - cx.background_executor() .spawn({ let extension_source_path = extension_source_path.clone(); @@ -885,10 +880,8 @@ impl ExtensionStore { bail!("extension {extension_id} is already installed"); } } - fs.create_symlink(output_path, extension_source_path) .await?; - this.update(&mut cx, |this, cx| this.reload(None, cx))? .await; Ok(()) diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 326c713bd5..0fbd00e0b4 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -13,10 +13,12 @@ use futures::{io::BufReader, AsyncReadExt, StreamExt}; use gpui::{Context, SemanticVersion, TestAppContext}; use http_client::{FakeHttpClient, Response}; use indexed_docs::IndexedDocsRegistry; +use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; use node_runtime::FakeNodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; +use release_channel::AppVersion; use serde_json::json; use settings::{Settings as _, SettingsStore}; use snippet_provider::SnippetRegistry; @@ -270,6 +272,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { None, fs.clone(), http_client.clone(), + http_client.clone(), None, node_runtime.clone(), language_registry.clone(), @@ -397,6 +400,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { None, fs.clone(), http_client.clone(), + http_client.clone(), None, node_runtime.clone(), language_registry.clone(), @@ -453,6 +457,8 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } +// TODO remove +#[ignore] #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); @@ -502,7 +508,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { http_request_count: 0, })); - let http_client = FakeHttpClient::create({ + let extension_client = FakeHttpClient::create({ let language_server_version = language_server_version.clone(); move |request| { let language_server_version = language_server_version.clone(); @@ -558,19 +564,33 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); Ok(Response::new(gzipped_bytes.into())) + // } else if uri == WASI_ADAPTER_URL { + // let binary_contents = + // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice(); + // Ok(Response::new(binary_contents.into())) } else { Ok(Response::builder().status(404).body("not found".into())?) } } } }); + let user_agent = cx.update(|cx| { + format!( + "Zed/{} ({}; {})", + AppVersion::global(cx), + std::env::consts::OS, + std::env::consts::ARCH + ) + }); + let builder_client = IsahcHttpClient::new(None, Some(user_agent)); let extension_store = cx.new_model(|cx| { ExtensionStore::new( extensions_dir.clone(), Some(cache_dir), fs.clone(), - http_client.clone(), + extension_client.clone(), + builder_client, None, node_runtime, language_registry.clone(), diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 54c47f4a82..bc649d8e04 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -http_client.workspace = true +isahc_http_client.workspace = true language.workspace = true log.workspace = true rpc.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 029c560e57..6eaebca2f0 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -7,13 +7,13 @@ use std::{ }; use ::fs::{copy_recursive, CopyOptions, Fs, RealFs}; -use ::http_client::HttpClientWithProxy; use anyhow::{anyhow, bail, Context, Result}; use clap::Parser; use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, ExtensionManifest, }; +use isahc_http_client::IsahcHttpClient; use language::LanguageConfig; use theme::ThemeRegistry; use tree_sitter::{Language, Query, WasmStore}; @@ -66,7 +66,13 @@ async fn main() -> Result<()> { std::env::consts::OS, std::env::consts::ARCH ); - let http_client = Arc::new(HttpClientWithProxy::new(Some(user_agent), None)); + let http_client = Arc::new( + IsahcHttpClient::builder() + .default_header("User-Agent", user_agent) + .build() + .map(IsahcHttpClient::from)?, + ); + let builder = ExtensionBuilder::new(http_client, scratch_dir); builder .compile_extension( diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index caca91c1ab..b8ad1ed05d 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -18,7 +18,6 @@ futures.workspace = true git.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true regex.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index b34d809100..eaadca1ecf 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -3,9 +3,7 @@ use std::sync::Arc; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::HttpClient; -use isahc::config::Configurable; -use isahc::{AsyncBody, Request}; +use http_client::{AsyncBody, HttpClient, Request}; use serde::Deserialize; use url::Url; @@ -51,16 +49,14 @@ impl Codeberg { let url = format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}"); - let mut request = Request::get(&url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json"); + let mut request = Request::get(&url).header("Content-Type", "application/json"); if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") { request = request.header("Authorization", format!("Bearer {}", codeberg_token)); } let mut response = client - .send(request.body(AsyncBody::default())?) + .send_with_redirect_policy(request.body(AsyncBody::default())?, true) .await .with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?; diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 103f6ae1ce..be46b51ddf 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -3,9 +3,7 @@ use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::HttpClient; -use isahc::config::Configurable; -use isahc::{AsyncBody, Request}; +use http_client::{AsyncBody, HttpClient, Request}; use regex::Regex; use serde::Deserialize; use url::Url; @@ -55,16 +53,14 @@ impl Github { ) -> Result> { let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}"); - let mut request = Request::get(&url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json"); + let mut request = Request::get(&url).header("Content-Type", "application/json"); if let Ok(github_token) = std::env::var("GITHUB_TOKEN") { request = request.header("Authorization", format!("Bearer {}", github_token)); } let mut response = client - .send(request.body(AsyncBody::default())?) + .send_with_redirect_policy(request.body(AsyncBody::default())?, true) .await .with_context(|| format!("error fetching GitHub commit details at {:?}", url))?; diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index d0d75b73e9..e2339a38ed 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -11,13 +11,13 @@ license = "Apache-2.0" workspace = true [features] -default = [] +default = ["http_client"] test-support = [ "backtrace", "collections/test-support", "rand", "util/test-support", - "http_client/test-support", + "http_client?/test-support", ] runtime_shaders = [] macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck"] @@ -40,7 +40,7 @@ derive_more.workspace = true etagere = "0.2" futures.workspace = true gpui_macros.workspace = true -http_client.workspace = true +http_client = { optional = true, workspace = true } image = "0.25.1" itertools.workspace = true linkme = "0.3" diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index ac7af186d3..157dbdf70f 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -131,6 +131,7 @@ fn main() { PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(), ), remote_resource: "https://picsum.photos/512/512".into(), + asset_resource: "image/color.svg".into(), }) }) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ee7a6ef191..6cb491b100 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -117,7 +117,7 @@ impl App { Self(AppContext::new( current_platform(false), Arc::new(()), - http_client::client(None, None), + Arc::new(NullHttpClient), )) } @@ -128,7 +128,7 @@ impl App { Self(AppContext::new( current_platform(true), Arc::new(()), - http_client::client(None, None), + Arc::new(NullHttpClient), )) } @@ -142,6 +142,14 @@ impl App { self } + /// Set the http client for the application + pub fn with_http_client(self, http_client: Arc) -> Self { + let mut context_lock = self.0.borrow_mut(); + context_lock.http_client = http_client; + drop(context_lock); + self + } + /// Start the application. The provided callback will be called once the /// app is fully launched. pub fn run(self, on_finish_launching: F) @@ -1512,3 +1520,22 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } + +struct NullHttpClient; + +impl HttpClient for NullHttpClient { + fn send_with_redirect_policy( + &self, + _req: http_client::Request, + _follow_redirects: bool, + ) -> futures::future::BoxFuture< + 'static, + Result, anyhow::Error>, + > { + async move { Err(anyhow!("No HttpClient available")) }.boxed() + } + + fn proxy(&self) -> Option<&http_client::Uri> { + None + } +} diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index f1e8bb68e3..63236d5309 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -345,7 +345,10 @@ impl Asset for ImageAsset { let bytes = match source.clone() { UriOrPath::Path(uri) => fs::read(uri.as_ref())?, UriOrPath::Uri(uri) => { - let mut response = client.get(uri.as_ref(), ().into(), true).await?; + let mut response = client + .get(uri.as_ref(), ().into(), true) + .await + .map_err(|e| ImageCacheError::Client(Arc::new(e)))?; let mut body = Vec::new(); response.body_mut().read_to_end(&mut body).await?; if !response.status().is_success() { @@ -429,7 +432,7 @@ impl Asset for ImageAsset { pub enum ImageCacheError { /// An error that occurred while fetching an image from a remote source. #[error("http error: {0}")] - Client(#[from] http_client::Error), + Client(#[from] Arc), /// An error that occurred while reading the image from disk. #[error("IO error: {0}")] Io(Arc), diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index a447478a9b..7ba3ce055e 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -128,6 +128,7 @@ pub use executor::*; pub use geometry::*; pub use global::*; pub use gpui_macros::{register_action, test, IntoElement, Render}; +pub use http_client; pub use input::*; pub use interactive::*; use key_dispatch::*; diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index ae017685a9..0244ac4104 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,13 +16,12 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "1.0.0" +http = "0.2" anyhow.workspace = true derive_more.workspace = true futures.workspace = true -isahc.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true -futures-lite.workspace = true +smol.workspace = true url.workspace = true diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs new file mode 100644 index 0000000000..e2544f60fe --- /dev/null +++ b/crates/http_client/src/async_body.rs @@ -0,0 +1,109 @@ +use std::{borrow::Cow, io::Read, pin::Pin, task::Poll}; + +use futures::{AsyncRead, AsyncReadExt}; + +/// Based on the implementation of AsyncBody in +/// https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs +pub struct AsyncBody(pub Inner); + +pub enum Inner { + /// An empty body. + Empty, + + /// A body stored in memory. + SyncReader(std::io::Cursor>), + + /// An asynchronous reader. + AsyncReader(Pin>), +} + +impl AsyncBody { + /// Create a new empty body. + /// + /// An empty body represents the *absence* of a body, which is semantically + /// different than the presence of a body of zero length. + pub fn empty() -> Self { + Self(Inner::Empty) + } + /// Create a streaming body that reads from the given reader. + pub fn from_reader(read: R) -> Self + where + R: AsyncRead + Send + Sync + 'static, + { + Self(Inner::AsyncReader(Box::pin(read))) + } +} + +impl Default for AsyncBody { + fn default() -> Self { + Self(Inner::Empty) + } +} + +impl From<()> for AsyncBody { + fn from(_: ()) -> Self { + Self(Inner::Empty) + } +} + +impl From> for AsyncBody { + fn from(body: Vec) -> Self { + Self(Inner::SyncReader(std::io::Cursor::new(Cow::Owned(body)))) + } +} + +impl From<&'_ [u8]> for AsyncBody { + fn from(body: &[u8]) -> Self { + body.to_vec().into() + } +} + +impl From for AsyncBody { + fn from(body: String) -> Self { + body.into_bytes().into() + } +} + +impl From<&'_ str> for AsyncBody { + fn from(body: &str) -> Self { + body.as_bytes().into() + } +} + +impl> From> for AsyncBody { + fn from(body: Option) -> Self { + match body { + Some(body) => body.into(), + None => Self(Inner::Empty), + } + } +} + +impl std::io::Read for AsyncBody { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + match &mut self.0 { + Inner::Empty => Ok(0), + Inner::SyncReader(cursor) => cursor.read(buf), + Inner::AsyncReader(async_reader) => smol::block_on(async_reader.read(buf)), + } + } +} + +impl futures::AsyncRead for AsyncBody { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut [u8], + ) -> std::task::Poll> { + // SAFETY: Standard Enum pin projection + let inner = unsafe { &mut self.get_unchecked_mut().0 }; + match inner { + Inner::Empty => Poll::Ready(Ok(0)), + // Blocking call is over an in-memory buffer + Inner::SyncReader(cursor) => Poll::Ready(cursor.read(buf)), + Inner::AsyncReader(async_reader) => { + AsyncRead::poll_read(async_reader.as_mut(), cx, buf) + } + } + } +} diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index a64a5bae5c..70587fa3ce 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -34,7 +34,7 @@ pub async fn latest_github_release( ) -> Result { let mut response = http .get( - &format!("https://api.github.com/repos/{repo_name_with_owner}/releases"), + format!("https://api.github.com/repos/{repo_name_with_owner}/releases").as_str(), Default::default(), true, ) @@ -91,13 +91,14 @@ pub async fn get_release_by_tag_name( .context("error fetching latest release")?; let mut body = Vec::new(); + let status = response.status(); response .body_mut() .read_to_end(&mut body) .await .context("error reading latest release")?; - if response.status().is_client_error() { + if status.is_client_error() { let text = String::from_utf8_lossy(body.as_slice()); bail!( "status error {}, response: {text:?}", diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 7ea0029d79..d78b2dd23c 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -1,47 +1,48 @@ +mod async_body; pub mod github; pub use anyhow::{anyhow, Result}; +pub use async_body::{AsyncBody, Inner}; use derive_more::Deref; +pub use http::{self, Method, Request, Response, StatusCode, Uri}; + use futures::future::BoxFuture; -use futures_lite::FutureExt; -use isahc::config::{Configurable, RedirectPolicy}; -pub use isahc::http; -pub use isahc::{ - http::{Method, StatusCode, Uri}, - AsyncBody, Error, HttpClient as IsahcHttpClient, Request, Response, -}; +use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; -use std::{ - sync::{Arc, Mutex}, - time::Duration, -}; +use std::sync::{Arc, Mutex}; pub use url::Url; -pub trait HttpClient: Send + Sync { +pub trait HttpClient: 'static + Send + Sync { fn send( + &self, + req: http::Request, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send_with_redirect_policy(req, false) + } + + // TODO: Make a better API for this + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>>; + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>>; fn get<'a>( &'a self, uri: &str, body: AsyncBody, follow_redirects: bool, - ) -> BoxFuture<'a, Result, Error>> { - let request = isahc::Request::builder() - .redirect_policy(if follow_redirects { - RedirectPolicy::Follow - } else { - RedirectPolicy::None - }) - .method(Method::GET) - .uri(uri) - .body(body); + ) -> BoxFuture<'a, Result, anyhow::Error>> { + let request = Builder::new().uri(uri).body(body); + match request { - Ok(request) => self.send(request), - Err(error) => async move { Err(error.into()) }.boxed(), + Ok(request) => Box::pin(async move { + self.send_with_redirect_policy(request, follow_redirects) + .await + .map_err(Into::into) + }), + Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -49,15 +50,16 @@ pub trait HttpClient: Send + Sync { &'a self, uri: &str, body: AsyncBody, - ) -> BoxFuture<'a, Result, Error>> { - let request = isahc::Request::builder() - .method(Method::POST) + ) -> BoxFuture<'a, Result, anyhow::Error>> { + let request = Builder::new() .uri(uri) + .method(Method::POST) .header("Content-Type", "application/json") .body(body); + match request { - Ok(request) => self.send(request), - Err(error) => async move { Err(error.into()) }.boxed(), + Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }), + Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -74,29 +76,28 @@ pub struct HttpClientWithProxy { impl HttpClientWithProxy { /// Returns a new [`HttpClientWithProxy`] with the given proxy URL. - pub fn new(user_agent: Option, proxy_url: Option) -> Self { - let proxy_url = proxy_url - .and_then(|input| { - input - .parse::() - .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e)) - .ok() - }) + pub fn new(client: Arc, proxy_url: Option) -> Self { + let proxy_uri = proxy_url + .and_then(|proxy| proxy.parse().ok()) .or_else(read_proxy_from_env); + Self::new_uri(client, proxy_uri) + } + pub fn new_uri(client: Arc, proxy_uri: Option) -> Self { Self { - client: client(user_agent, proxy_url.clone()), - proxy: proxy_url, + client, + proxy: proxy_uri, } } } impl HttpClient for HttpClientWithProxy { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -105,11 +106,12 @@ impl HttpClient for HttpClientWithProxy { } impl HttpClient for Arc { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -123,14 +125,35 @@ pub struct HttpClientWithUrl { client: HttpClientWithProxy, } +impl std::ops::Deref for HttpClientWithUrl { + type Target = HttpClientWithProxy; + + fn deref(&self) -> &Self::Target { + &self.client + } +} + impl HttpClientWithUrl { /// Returns a new [`HttpClientWithUrl`] with the given base URL. pub fn new( + client: Arc, base_url: impl Into, - user_agent: Option, proxy_url: Option, ) -> Self { - let client = HttpClientWithProxy::new(user_agent, proxy_url); + let client = HttpClientWithProxy::new(client, proxy_url); + + Self { + base_url: Mutex::new(base_url.into()), + client, + } + } + + pub fn new_uri( + client: Arc, + base_url: impl Into, + proxy_uri: Option, + ) -> Self { + let client = HttpClientWithProxy::new_uri(client, proxy_uri); Self { base_url: Mutex::new(base_url.into()), @@ -195,11 +218,12 @@ impl HttpClientWithUrl { } impl HttpClient for Arc { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -208,11 +232,12 @@ impl HttpClient for Arc { } impl HttpClient for HttpClientWithUrl { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -220,26 +245,7 @@ impl HttpClient for HttpClientWithUrl { } } -pub fn client(user_agent: Option, proxy: Option) -> Arc { - let mut builder = isahc::HttpClient::builder() - // Some requests to Qwen2 models on Runpod can take 32+ seconds, - // especially if there's a cold boot involved. We may need to have - // those requests use a different http client, because global timeouts - // of 50 and 60 seconds, respectively, would be very high! - .connect_timeout(Duration::from_secs(5)) - .low_speed_timeout(100, Duration::from_secs(30)) - .proxy(proxy.clone()); - if let Some(user_agent) = user_agent { - builder = builder.default_header("User-Agent", user_agent); - } - - Arc::new(HttpClientWithProxy { - client: Arc::new(builder.build().unwrap()), - proxy, - }) -} - -fn read_proxy_from_env() -> Option { +pub fn read_proxy_from_env() -> Option { const ENV_VARS: &[&str] = &[ "ALL_PROXY", "all_proxy", @@ -258,23 +264,9 @@ fn read_proxy_from_env() -> Option { None } -impl HttpClient for isahc::HttpClient { - fn send( - &self, - req: Request, - ) -> BoxFuture<'static, Result, Error>> { - let client = self.clone(); - Box::pin(async move { client.send_async(req).await }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } -} - #[cfg(feature = "test-support")] type FakeHttpHandler = Box< - dyn Fn(Request) -> BoxFuture<'static, Result, Error>> + dyn Fn(Request) -> BoxFuture<'static, Result, anyhow::Error>> + Send + Sync + 'static, @@ -289,7 +281,7 @@ pub struct FakeHttpClient { impl FakeHttpClient { pub fn create(handler: F) -> Arc where - Fut: futures::Future, Error>> + Send + 'static, + Fut: futures::Future, anyhow::Error>> + Send + 'static, F: Fn(Request) -> Fut + Send + Sync + 'static, { Arc::new(HttpClientWithUrl { @@ -331,12 +323,13 @@ impl fmt::Debug for FakeHttpClient { #[cfg(feature = "test-support")] impl HttpClient for FakeHttpClient { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { + _follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { let future = (self.handler)(req); - Box::pin(async move { future.await.map(Into::into) }) + future } fn proxy(&self) -> Option<&Uri> { diff --git a/crates/isahc_http_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml new file mode 100644 index 0000000000..b90163ef74 --- /dev/null +++ b/crates/isahc_http_client/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "isahc_http_client" +version = "0.1.0" +edition = "2021" +publish = false +license = "Apache-2.0" + +[lints] +workspace = true + +[features] +test-support = [] + +[lib] +path = "src/isahc_http_client.rs" + +[dependencies] +http_client.workspace = true +isahc.workspace = true +futures.workspace = true +anyhow.workspace = true +util.workspace = true diff --git a/crates/isahc_http_client/LICENSE-APACHE b/crates/isahc_http_client/LICENSE-APACHE new file mode 120000 index 0000000000..1cd601d0a3 --- /dev/null +++ b/crates/isahc_http_client/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs new file mode 100644 index 0000000000..6c40b9f53b --- /dev/null +++ b/crates/isahc_http_client/src/isahc_http_client.rs @@ -0,0 +1,93 @@ +use std::{mem, sync::Arc, time::Duration}; + +use futures::future::BoxFuture; +use isahc::config::RedirectPolicy; +use util::maybe; + +pub use isahc::config::Configurable; +pub struct IsahcHttpClient(isahc::HttpClient); + +pub use http_client::*; + +impl IsahcHttpClient { + pub fn new(proxy: Option, user_agent: Option) -> Arc { + let mut builder = isahc::HttpClient::builder() + .connect_timeout(Duration::from_secs(5)) + .low_speed_timeout(100, Duration::from_secs(5)) + .proxy(proxy.clone()); + if let Some(agent) = user_agent { + builder = builder.default_header("User-Agent", agent); + } + Arc::new(IsahcHttpClient(builder.build().unwrap())) + } + pub fn builder() -> isahc::HttpClientBuilder { + isahc::HttpClientBuilder::new() + } +} + +impl From for IsahcHttpClient { + fn from(client: isahc::HttpClient) -> Self { + Self(client) + } +} + +impl HttpClient for IsahcHttpClient { + fn proxy(&self) -> Option<&Uri> { + None + } + + fn send_with_redirect_policy( + &self, + req: http_client::http::Request, + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> + { + let req = maybe!({ + let (mut parts, body) = req.into_parts(); + let mut builder = isahc::Request::builder() + .method(parts.method) + .uri(parts.uri) + .version(parts.version); + + let headers = builder.headers_mut()?; + mem::swap(headers, &mut parts.headers); + + let extensions = builder.extensions_mut()?; + mem::swap(extensions, &mut parts.extensions); + + let isahc_body = match body.0 { + http_client::Inner::Empty => isahc::AsyncBody::empty(), + http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader), + http_client::Inner::SyncReader(reader) => { + isahc::AsyncBody::from_bytes_static(reader.into_inner()) + } + }; + + builder + .redirect_policy(if follow_redirects { + RedirectPolicy::Follow + } else { + RedirectPolicy::None + }) + .body(isahc_body) + .ok() + }); + + let client = self.0.clone(); + + Box::pin(async move { + match req { + Some(req) => client + .send_async(req) + .await + .map_err(Into::into) + .map(|response| { + let (parts, body) = response.into_parts(); + let body = http_client::AsyncBody::from_reader(body); + http_client::Response::from_parts(parts, body) + }), + None => Err(anyhow::anyhow!("Request was malformed")), + } + }) + } +} diff --git a/crates/ollama/Cargo.toml b/crates/ollama/Cargo.toml index 76a8b1a8c1..34d8802b97 100644 --- a/crates/ollama/Cargo.toml +++ b/crates/ollama/Cargo.toml @@ -19,7 +19,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index a65d6eaf90..972520e61f 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -1,7 +1,6 @@ use anyhow::{anyhow, Context, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{http, AsyncBody, HttpClient, Method, Request as HttpRequest}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{value::RawValue, Value}; @@ -262,18 +261,14 @@ pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, request: ChatRequest, - low_speed_timeout: Option, + _: Option, ) -> Result>> { let uri = format!("{api_url}/api/chat"); - let mut request_builder = HttpRequest::builder() + let request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); - if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); - }; - let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { @@ -305,18 +300,14 @@ pub async fn stream_chat_completion( pub async fn get_models( client: &dyn HttpClient, api_url: &str, - low_speed_timeout: Option, + _: Option, ) -> Result> { let uri = format!("{api_url}/api/tags"); - let mut request_builder = HttpRequest::builder() + let request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) .header("Accept", "application/json"); - if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); - }; - let request = request_builder.body(AsyncBody::default())?; let mut response = client.send(request).await?; @@ -354,13 +345,13 @@ pub async fn preload_model(client: Arc, api_url: &str, model: &s let mut response = match client.send(request).await { Ok(response) => response, - Err(err) => { + Err(error) => { // Be ok with a timeout during preload of the model - if err.is_timeout() { - return Ok(()); - } else { - return Err(err.into()); - } + // if err.is_timeout() { + // return Ok(()); + // } else { + return Err(error); + //} } }; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6dd528147b..58d9ba8926 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -26,7 +26,7 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; +use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, @@ -7339,7 +7339,7 @@ impl HttpClient for BlockedHttpClient { fn send( &self, _req: Request, - ) -> BoxFuture<'static, Result, Error>> { + ) -> BoxFuture<'static, Result, anyhow::Error>> { Box::pin(async { Err(std::io::Error::new( std::io::ErrorKind::PermissionDenied, @@ -7352,6 +7352,14 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } + + fn send_with_redirect_policy( + &self, + req: Request, + _: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send(req) + } } struct SshLspAdapterDelegate { diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index c8dbb6a9f5..691d6e57f6 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -51,6 +51,7 @@ workspace.workspace = true worktree.workspace = true [dev-dependencies] +isahc_http_client.workspace = true env_logger.workspace = true client = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 977473d1dc..0cc3f9f317 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -2,6 +2,7 @@ use client::Client; use futures::channel::oneshot; use gpui::App; use http_client::HttpClientWithUrl; +use isahc_http_client::IsahcHttpClient; use language::language_settings::AllLanguageSettings; use project::Project; use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; @@ -26,8 +27,12 @@ fn main() { }); let clock = Arc::new(FakeSystemClock::default()); - let http = Arc::new(HttpClientWithUrl::new("http://localhost:11434", None, None)); + let http = Arc::new(HttpClientWithUrl::new( + IsahcHttpClient::new(None, None), + "http://localhost:11434", + None, + )); let client = client::Client::new(clock, http.clone(), cx); Client::set_global(client.clone(), cx); diff --git a/crates/semantic_index/src/embedding/ollama.rs b/crates/semantic_index/src/embedding/ollama.rs index 09d33c584a..6d3fa67902 100644 --- a/crates/semantic_index/src/embedding/ollama.rs +++ b/crates/semantic_index/src/embedding/ollama.rs @@ -1,5 +1,5 @@ use anyhow::{Context as _, Result}; -use futures::{future::BoxFuture, AsyncReadExt, FutureExt}; +use futures::{future::BoxFuture, AsyncReadExt as _, FutureExt}; use http_client::HttpClient; use serde::{Deserialize, Serialize}; use std::sync::Arc; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 28d2c7f825..7fa9602a14 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -47,6 +47,7 @@ file_finder.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +isahc_http_client.workspace = true git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index eb6d2853fd..d3a722ec65 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -11,7 +11,7 @@ use assistant::PromptBuilder; use chrono::Offset; use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{parse_zed_link, Client, DevServerToken, UserStore}; +use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; use db::kvp::KEY_VALUE_STORE; use editor::Editor; @@ -23,6 +23,8 @@ use gpui::{ Action, App, AppContext, AsyncAppContext, Context, DismissEvent, Global, Task, UpdateGlobal as _, VisualContext, }; +use http_client::{read_proxy_from_env, Uri}; +use isahc_http_client::IsahcHttpClient; use language::LanguageRegistry; use log::LevelFilter; @@ -327,7 +329,10 @@ fn main() { init_logger(); log::info!("========== starting zed =========="); - let app = App::new().with_assets(Assets); + + let app = App::new() + .with_assets(Assets) + .with_http_client(IsahcHttpClient::new(None, None)); let (installation_id, existing_installation_id_found) = app .background_executor() @@ -436,6 +441,26 @@ fn main() { if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx); } + settings::init(cx); + client::init_settings(cx); + let user_agent = format!( + "Zed/{} ({}; {})", + AppVersion::global(cx), + std::env::consts::OS, + std::env::consts::ARCH + ); + let proxy_str = ProxySettings::get_global(cx).proxy.to_owned(); + let proxy_url = proxy_str + .as_ref() + .and_then(|input| { + input + .parse::() + .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e)) + .ok() + }) + .or_else(read_proxy_from_env); + let http = IsahcHttpClient::new(proxy_url, Some(user_agent)); + cx.set_http_client(http); ::set_global(fs.clone(), cx); @@ -444,11 +469,9 @@ fn main() { OpenListener::set_global(cx, open_listener.clone()); - settings::init(cx); handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); - client::init_settings(cx); let client = Client::production(cx); cx.set_http_client(client.http_client().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); diff --git a/script/bundle-linux b/script/bundle-linux index 029d748f4f..deecd0984b 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -43,7 +43,10 @@ script/generate-licenses # Build binary in release mode export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" -cargo build --release --target "${target_triple}" --package zed --package cli --package remote_server +cargo build --release --target "${target_triple}" --package zed --package cli +# Build remote_server in separate invocation to prevent feature unification from other crates +# from influencing dynamic libraries required by it. +cargo build --release --target "${target_triple}" --package remote_server # Strip the binary of all debug symbols # Later, we probably want to do something like this: https://github.com/GabrielMajeri/separate-symbols @@ -51,6 +54,10 @@ strip --strip-debug "${target_dir}/${target_triple}/release/zed" strip --strip-debug "${target_dir}/${target_triple}/release/cli" strip --strip-debug "${target_dir}/${target_triple}/release/remote_server" + +# Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. +ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' + suffix="" if [ "$channel" != "stable" ]; then suffix="-$channel" From a62e8f6396bf41176ddd00cbc705b699d71fe6cf Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 18:05:30 -0400 Subject: [PATCH 201/270] ci: Explicitly set cache-provider for swatinem/rust-cache (#18034) - Switches the Cache Dependencies step (`swatinem/rust-cache`) of Linux tests to use buildjet as `cache-provider`. Explicitly add 'github' (the default cache provider) to other uses of `swatinem/rust-cache` for consistency. Release Notes: - N/A --- .github/workflows/ci.yml | 2 ++ .github/workflows/publish_extension_cli.yml | 1 + 2 files changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c55a3a9907..f059b47004 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -115,6 +115,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux @@ -143,6 +144,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: cargo clippy # Windows can't run shell scripts, so we need to use `cargo xtask`. diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index 698a09ad00..7c47ec5ded 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -24,6 +24,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: Configure linux shell: bash -euxo pipefail {0} From 2cd9a88f53954051f639b120940c06d7bebcf250 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 19:39:15 -0400 Subject: [PATCH 202/270] Clean up after `isahc_http_client` introduction (#18045) This PR does some clean up after #15446. Release Notes: - N/A --- Cargo.toml | 5 +---- crates/collab/Cargo.toml | 2 +- crates/collab/src/rpc.rs | 1 - crates/extension/src/extension_store.rs | 4 ++++ crates/isahc_http_client/Cargo.toml | 4 ++-- crates/zed/Cargo.toml | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2071fdcb6f..c72fec020f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -174,9 +174,6 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] - - - # # Workspace member crates # @@ -216,7 +213,6 @@ file_icons = { path = "crates/file_icons" } fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } -isahc_http_client = { path = "crates/isahc_http_client" } git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } @@ -231,6 +227,7 @@ image_viewer = { path = "crates/image_viewer" } indexed_docs = { path = "crates/indexed_docs" } inline_completion_button = { path = "crates/inline_completion_button" } install_cli = { path = "crates/install_cli" } +isahc_http_client = { path = "crates/isahc_http_client" } journal = { path = "crates/journal" } language = { path = "crates/language" } language_model = { path = "crates/language_model" } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 296809158d..ad43d2d1f0 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -36,8 +36,8 @@ envy = "0.4.2" futures.workspace = true google_ai.workspace = true hex.workspace = true -isahc_http_client.workspace = true http_client.workspace = true +isahc_http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index b2a694027a..bc0f827e78 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -956,7 +956,6 @@ impl Server { tracing::info!("connection opened"); - let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index bd416f4029..8dbd618a25 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -827,6 +827,7 @@ impl ExtensionStore { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_source_path).await?; let extension_id = extension_manifest.id.clone(); + if !this.update(&mut cx, |this, cx| { match this.outstanding_operations.entry(extension_id.clone()) { btree_map::Entry::Occupied(_) => return false, @@ -850,6 +851,7 @@ impl ExtensionStore { .ok(); } }); + cx.background_executor() .spawn({ let extension_source_path = extension_source_path.clone(); @@ -880,8 +882,10 @@ impl ExtensionStore { bail!("extension {extension_id} is already installed"); } } + fs.create_symlink(output_path, extension_source_path) .await?; + this.update(&mut cx, |this, cx| this.reload(None, cx))? .await; Ok(()) diff --git a/crates/isahc_http_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml index b90163ef74..82f7621bf8 100644 --- a/crates/isahc_http_client/Cargo.toml +++ b/crates/isahc_http_client/Cargo.toml @@ -15,8 +15,8 @@ test-support = [] path = "src/isahc_http_client.rs" [dependencies] +anyhow.workspace = true +futures.workspace = true http_client.workspace = true isahc.workspace = true -futures.workspace = true -anyhow.workspace = true util.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 7fa9602a14..645d12fc76 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -47,7 +47,6 @@ file_finder.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true -isahc_http_client.workspace = true git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true @@ -58,6 +57,7 @@ image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true isahc.workspace = true +isahc_http_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true From 106ca5076fd8d485a9016fa202d618efb66e40dc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 18 Sep 2024 16:43:59 -0700 Subject: [PATCH 203/270] Fix leak of LMDB connection in semantic index (#17992) Apparently, to close LMDB's file descriptors when using the `heed` library, you need to explicitly call `prepare_for_closing`. Release Notes: - N/A --------- Co-authored-by: Richard Feldman Co-authored-by: Jason --- crates/evals/src/eval.rs | 9 +++++++++ crates/semantic_index/src/semantic_index.rs | 12 +++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 751dcd09aa..708cfa7511 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -446,6 +446,15 @@ async fn run_evaluation( println!("{}", serde_json::to_string(&query_results).unwrap()); } + + user_store + .update(cx, |_, _| { + drop(semantic_index); + drop(project); + drop(worktree); + drop(project_index); + }) + .unwrap(); } eprint!( diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 3435d0a9ca..6c97ece024 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -25,7 +25,7 @@ pub use summary_index::FileSummary; pub struct SemanticDb { embedding_provider: Arc, - db_connection: heed::Env, + db_connection: Option, project_indices: HashMap, Model>, } @@ -70,7 +70,7 @@ impl SemanticDb { .ok(); Ok(SemanticDb { - db_connection, + db_connection: Some(db_connection), embedding_provider, project_indices: HashMap::default(), }) @@ -148,7 +148,7 @@ impl SemanticDb { let project_index = cx.new_model(|cx| { ProjectIndex::new( project.clone(), - self.db_connection.clone(), + self.db_connection.clone().unwrap(), self.embedding_provider.clone(), cx, ) @@ -171,6 +171,12 @@ impl SemanticDb { } } +impl Drop for SemanticDb { + fn drop(&mut self) { + self.db_connection.take().unwrap().prepare_for_closing(); + } +} + #[cfg(test)] mod tests { use super::*; From eef44aff7f9b17f1ea38cbc64ac52bbbd435ef10 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 19:48:34 -0400 Subject: [PATCH 204/270] extension: Re-enable `test_extension_store_with_test_extension` test (#18046) The `test_extension_store_with_test_extension` test was disabled in #15446, which got merged before re-enabling the test. This PR re-enables that test. Release Notes: - N/A --- crates/extension/src/extension_store_test.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 0fbd00e0b4..4bdafaa32c 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -457,8 +457,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } -// TODO remove -#[ignore] #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); @@ -564,10 +562,6 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); Ok(Response::new(gzipped_bytes.into())) - // } else if uri == WASI_ADAPTER_URL { - // let binary_contents = - // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice(); - // Ok(Response::new(binary_contents.into())) } else { Ok(Response::builder().status(404).body("not found".into())?) } From b43b800a54919103062e8fd7f5ff82c80026f211 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 18:07:39 -0600 Subject: [PATCH 205/270] More assistant events (#18032) Release Notes: - N/A --- crates/assistant/src/context.rs | 3 +- crates/assistant/src/inline_assistant.rs | 29 +++++++++++++++++++ .../src/terminal_inline_assistant.rs | 1 + crates/client/src/telemetry.rs | 8 +++-- crates/collab/src/api/events.rs | 2 ++ .../telemetry_events/src/telemetry_events.rs | 28 +++++++++++++++++- 6 files changed, 66 insertions(+), 5 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d55b1aee08..d72b04e3cd 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -46,7 +46,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use telemetry_events::AssistantKind; +use telemetry_events::{AssistantKind, AssistantPhase}; use text::BufferSnapshot; use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; @@ -2134,6 +2134,7 @@ impl Context { telemetry.report_assistant_event( Some(this.id.0.clone()), AssistantKind::Panel, + AssistantPhase::Response, model.telemetry_id(), response_latency, error_message, diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index eb1bc1eee8..c9360213ae 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -174,6 +174,18 @@ impl InlineAssistant { initial_prompt: Option, cx: &mut WindowContext, ) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Invoked, + model.telemetry_id(), + None, + None, + ); + } + } let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); let mut selections = Vec::>::new(); @@ -708,6 +720,22 @@ impl InlineAssistant { } pub fn finish_assist(&mut self, assist_id: InlineAssistId, undo: bool, cx: &mut WindowContext) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + if undo { + telemetry_events::AssistantPhase::Rejected + } else { + telemetry_events::AssistantPhase::Accepted + }, + model.telemetry_id(), + None, + None, + ); + } + } if let Some(assist) = self.assists.get(&assist_id) { let assist_group_id = assist.group_id; if self.assist_groups[&assist_group_id].linked { @@ -2558,6 +2586,7 @@ impl Codegen { telemetry.report_assistant_event( None, telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Response, model_telemetry_id, response_latency, error_message, diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 06661944d9..caf819bae5 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -1066,6 +1066,7 @@ impl Codegen { telemetry.report_assistant_event( None, telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Response, model_telemetry_id, response_latency, error_message, diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index b415cae14c..46304819a4 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -16,9 +16,9 @@ use std::io::Write; use std::{env, mem, path::PathBuf, sync::Arc, time::Duration}; use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System}; use telemetry_events::{ - ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent, - EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, - MemoryEvent, ReplEvent, SettingEvent, + ActionEvent, AppEvent, AssistantEvent, AssistantKind, AssistantPhase, CallEvent, CpuEvent, + EditEvent, EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, + InlineCompletionEvent, MemoryEvent, ReplEvent, SettingEvent, }; use tempfile::NamedTempFile; #[cfg(not(debug_assertions))] @@ -391,6 +391,7 @@ impl Telemetry { self: &Arc, conversation_id: Option, kind: AssistantKind, + phase: AssistantPhase, model: String, response_latency: Option, error_message: Option, @@ -398,6 +399,7 @@ impl Telemetry { let event = Event::Assistant(AssistantEvent { conversation_id, kind, + phase, model: model.to_string(), response_latency, error_message, diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 30ed10a76f..45c25d261e 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -834,6 +834,7 @@ pub struct AssistantEventRow { // AssistantEventRow conversation_id: String, kind: String, + phase: String, model: String, response_latency_in_ms: Option, error_message: Option, @@ -866,6 +867,7 @@ impl AssistantEventRow { time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), + phase: event.phase.to_string(), model: event.model, response_latency_in_ms: event .response_latency diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 87ecfb76b6..eb84322e83 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -44,7 +44,6 @@ pub enum AssistantKind { Panel, Inline, } - impl Display for AssistantKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( @@ -58,6 +57,31 @@ impl Display for AssistantKind { } } +#[derive(Default, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AssistantPhase { + #[default] + Response, + Invoked, + Accepted, + Rejected, +} + +impl Display for AssistantPhase { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + Self::Response => "response", + Self::Invoked => "invoked", + Self::Accepted => "accepted", + Self::Rejected => "rejected", + } + ) + } +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(tag = "type")] pub enum Event { @@ -121,6 +145,8 @@ pub struct AssistantEvent { pub conversation_id: Option, /// The kind of assistant (Panel, Inline) pub kind: AssistantKind, + #[serde(default)] + pub phase: AssistantPhase, /// Name of the AI model used (gpt-4o, claude-3-5-sonnet, etc) pub model: String, pub response_latency: Option, From 43e005e936e13947ed99799375bcbfa35703b8cd Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 02:19:58 +0200 Subject: [PATCH 206/270] chore: Remove commented out code following 15446 (#18047) Closes #ISSUE Release Notes: - N/A --- crates/ollama/src/ollama.rs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 972520e61f..51c4829048 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -343,17 +343,7 @@ pub async fn preload_model(client: Arc, api_url: &str, model: &s }), )?))?; - let mut response = match client.send(request).await { - Ok(response) => response, - Err(error) => { - // Be ok with a timeout during preload of the model - // if err.is_timeout() { - // return Ok(()); - // } else { - return Err(error); - //} - } - }; + let mut response = client.send(request).await?; if response.status().is_success() { Ok(()) From c3f47b8040a83b6414b8a28399628370fb7224f4 Mon Sep 17 00:00:00 2001 From: hekmyr <163496286+hekmyr@users.noreply.github.com> Date: Thu, 19 Sep 2024 02:28:31 +0200 Subject: [PATCH 207/270] vim: Fix increment/decrement command (#17644) Improving vim increment and decrement command. Closes: #16672 ## Release Notes: - vim: Improved edge-case handling for ctrl-a/ctrl-x --------- Co-authored-by: Conrad Irwin --- crates/vim/src/normal/increment.rs | 241 ++++++++++++++++-- ...st_increment_bin_wrapping_and_padding.json | 10 + .../test_data/test_increment_hex_casing.json | 5 + ...st_increment_hex_wrapping_and_padding.json | 10 + .../vim/test_data/test_increment_inline.json | 10 + .../test_data/test_increment_sign_change.json | 6 + .../test_data/test_increment_wrapping.json | 13 + 7 files changed, 273 insertions(+), 22 deletions(-) create mode 100644 crates/vim/test_data/test_increment_bin_wrapping_and_padding.json create mode 100644 crates/vim/test_data/test_increment_hex_casing.json create mode 100644 crates/vim/test_data/test_increment_hex_wrapping_and_padding.json create mode 100644 crates/vim/test_data/test_increment_inline.json create mode 100644 crates/vim/test_data/test_increment_sign_change.json create mode 100644 crates/vim/test_data/test_increment_wrapping.json diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 8786eae872..6d66e380c3 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -28,18 +28,18 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.record_current_action(cx); let count = vim.take_count(cx).unwrap_or(1); let step = if action.step { 1 } else { 0 }; - vim.increment(count as i32, step, cx) + vim.increment(count as i64, step, cx) }); Vim::action(editor, cx, |vim, action: &Decrement, cx| { vim.record_current_action(cx); let count = vim.take_count(cx).unwrap_or(1); let step = if action.step { -1 } else { 0 }; - vim.increment(-(count as i32), step, cx) + vim.increment(-(count as i64), step, cx) }); } impl Vim { - fn increment(&mut self, mut delta: i32, step: i32, cx: &mut ViewContext) { + fn increment(&mut self, mut delta: i64, step: i32, cx: &mut ViewContext) { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { let mut edits = Vec::new(); @@ -60,23 +60,14 @@ impl Vim { }; if let Some((range, num, radix)) = find_number(&snapshot, start) { - if let Ok(val) = i32::from_str_radix(&num, radix) { - let result = val + delta; - delta += step; - let replace = match radix { - 10 => format!("{}", result), - 16 => { - if num.to_ascii_lowercase() == num { - format!("{:x}", result) - } else { - format!("{:X}", result) - } - } - 2 => format!("{:b}", result), - _ => unreachable!(), - }; - edits.push((range.clone(), replace)); - } + let replace = match radix { + 10 => increment_decimal_string(&num, delta), + 16 => increment_hex_string(&num, delta), + 2 => increment_binary_string(&num, delta), + _ => unreachable!(), + }; + delta += step as i64; + edits.push((range.clone(), replace)); if selection.is_empty() { new_anchors.push((false, snapshot.anchor_after(range.end))) } @@ -107,6 +98,70 @@ impl Vim { } } +fn increment_decimal_string(mut num: &str, mut delta: i64) -> String { + let mut negative = false; + if num.chars().next() == Some('-') { + negative = true; + delta = 0 - delta; + num = &num[1..]; + } + let result = if let Ok(value) = u64::from_str_radix(num, 10) { + let wrapped = value.wrapping_add_signed(delta); + if delta < 0 && wrapped > value { + negative = !negative; + (u64::MAX - wrapped).wrapping_add(1) + } else if delta > 0 && wrapped < value { + negative = !negative; + u64::MAX - wrapped + } else { + wrapped + } + } else { + u64::MAX + }; + + if result == 0 || !negative { + format!("{}", result) + } else { + format!("-{}", result) + } +} + +fn increment_hex_string(num: &str, delta: i64) -> String { + let result = if let Ok(val) = u64::from_str_radix(&num, 16) { + val.wrapping_add_signed(delta) + } else { + u64::MAX + }; + if should_use_lowercase(num) { + format!("{:0width$x}", result, width = num.len()) + } else { + format!("{:0width$X}", result, width = num.len()) + } +} + +fn should_use_lowercase(num: &str) -> bool { + let mut use_uppercase = false; + for ch in num.chars() { + if ch.is_ascii_lowercase() { + return true; + } + if ch.is_ascii_uppercase() { + use_uppercase = true; + } + } + !use_uppercase +} + +fn increment_binary_string(num: &str, delta: i64) -> String { + let result = if let Ok(val) = u64::from_str_radix(&num, 2) { + val.wrapping_add_signed(delta) + } else { + u64::MAX + }; + format!("{:0width$b}", result, width = num.len()) +} + fn find_number( snapshot: &MultiBufferSnapshot, start: Point, @@ -114,10 +169,10 @@ fn find_number( let mut offset = start.to_offset(snapshot); let ch0 = snapshot.chars_at(offset).next(); - if ch0.as_ref().is_some_and(char::is_ascii_digit) || matches!(ch0, Some('-' | 'b' | 'x')) { + if ch0.as_ref().is_some_and(char::is_ascii_hexdigit) || matches!(ch0, Some('-' | 'b' | 'x')) { // go backwards to the start of any number the selection is within for ch in snapshot.reversed_chars_at(offset) { - if ch.is_ascii_digit() || ch == '-' || ch == 'b' || ch == 'x' { + if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' { offset -= ch.len_utf8(); continue; } @@ -158,6 +213,8 @@ fn find_number( begin = Some(offset); } num.push(ch); + println!("pushing {}", ch); + println!(); } else if begin.is_some() { end = Some(offset); break; @@ -250,6 +307,146 @@ mod test { "}); } + #[gpui::test] + async fn test_increment_sign_change(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + ˇ0 + "}) + .await; + cx.simulate_shared_keystrokes("ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + -ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + ˇ1 + "}); + } + + #[gpui::test] + async fn test_increment_bin_wrapping_and_padding(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0 + "}); + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}); + } + + #[gpui::test] + async fn test_increment_hex_wrapping_and_padding(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0xfffffffffffffffffffˇf + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000fffffffffffffffˇf + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000000000000000000ˇ0 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000000000000000000ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000fffffffffffffffˇf + "}); + } + + #[gpui::test] + async fn test_increment_wrapping(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 1844674407370955161ˇ9 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 1844674407370955161ˇ5 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ5 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ4 + "}); + cx.simulate_shared_keystrokes("3 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 1844674407370955161ˇ4 + "}); + cx.simulate_shared_keystrokes("2 ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ5 + "}); + } + + #[gpui::test] + async fn test_increment_inline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + inline0x3ˇ9u32 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3ˇau32 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3ˇbu32 + "}); + cx.simulate_shared_keystrokes("l l l ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3bu3ˇ3 + "}); + } + + #[gpui::test] + async fn test_increment_hex_casing(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0xFˇa + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0xfˇb + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0xfˇc + "}); + } + #[gpui::test] async fn test_increment_radix(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json new file mode 100644 index 0000000000..4f1a6aa1d3 --- /dev/null +++ b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json @@ -0,0 +1,10 @@ +{"Put":{"state":"0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-x"} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_casing.json b/crates/vim/test_data/test_increment_hex_casing.json new file mode 100644 index 0000000000..951906fa25 --- /dev/null +++ b/crates/vim/test_data/test_increment_hex_casing.json @@ -0,0 +1,5 @@ +{"Put":{"state":"0xFˇa\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0xfˇb\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0xfˇc\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json new file mode 100644 index 0000000000..23a5611264 --- /dev/null +++ b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json @@ -0,0 +1,10 @@ +{"Put":{"state":"0xfffffffffffffffffffˇf\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000000000000000000ˇ0\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000000000000000000ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-x"} +{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_inline.json b/crates/vim/test_data/test_increment_inline.json new file mode 100644 index 0000000000..98c4fc2805 --- /dev/null +++ b/crates/vim/test_data/test_increment_inline.json @@ -0,0 +1,10 @@ +{"Put":{"state":"inline0x3ˇ9u32\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3ˇau32\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3ˇbu32\n", "mode":"Normal"}} +{"Key":"l"} +{"Key":"l"} +{"Key":"l"} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3bu3ˇ3\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_sign_change.json b/crates/vim/test_data/test_increment_sign_change.json new file mode 100644 index 0000000000..1f4edd57b4 --- /dev/null +++ b/crates/vim/test_data/test_increment_sign_change.json @@ -0,0 +1,6 @@ +{"Put":{"state":"ˇ0\n"}} +{"Key":"ctrl-x"} +{"Get":{"state":"-ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-a"} +{"Get":{"state":"ˇ1\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_wrapping.json b/crates/vim/test_data/test_increment_wrapping.json new file mode 100644 index 0000000000..9f84c8cb11 --- /dev/null +++ b/crates/vim/test_data/test_increment_wrapping.json @@ -0,0 +1,13 @@ +{"Put":{"state":"1844674407370955161ˇ9\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"1844674407370955161ˇ5\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Key":"3"} +{"Key":"ctrl-x"} +{"Get":{"state":"1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} From 1b612108bae7e4c7ac194e5803b4144fbc218df6 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:40:01 +0200 Subject: [PATCH 208/270] linux: Fix invalid check for denylisted dependencies (#18050) Closes #ISSUE Release Notes: - N/A --- script/bundle-linux | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/bundle-linux b/script/bundle-linux index deecd0984b..c519f3b9ab 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -56,7 +56,7 @@ strip --strip-debug "${target_dir}/${target_triple}/release/remote_server" # Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. -ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' +! ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' suffix="" if [ "$channel" != "stable" ]; then From 5e6d1814e5c69d4e2e50d01744f5effe2b92ce70 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 19 Sep 2024 12:22:10 +0200 Subject: [PATCH 209/270] Add stray UI tweaks on the task picker (#18059) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds tiny UI tweaks to the task picker. Just making sure it is consistent with other pickers throughout Zed. | Before | After | |--------|--------| | Screenshot 2024-09-19 at 12 07 44 PM | Screenshot 2024-09-19 at 12 07 09 PM | Release Notes: - N/A --- crates/tasks_ui/src/modal.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 1255d3a94e..931a0b09c3 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -410,7 +410,7 @@ impl PickerDelegate for TasksModalDelegate { Some( ListItem::new(SharedString::from(format!("tasks-modal-{ix}"))) - .inset(false) + .inset(true) .start_slot::(icon) .end_slot::(history_run_icon) .spacing(ListItemSpacing::Sparse) @@ -448,7 +448,7 @@ impl PickerDelegate for TasksModalDelegate { picker.refresh(cx); })) .tooltip(|cx| { - Tooltip::text("Delete previously scheduled task", cx) + Tooltip::text("Delete Previously Scheduled Task", cx) }), ); item.end_hover_slot(delete_button) @@ -499,7 +499,7 @@ impl PickerDelegate for TasksModalDelegate { .last_scheduled_task(None) .is_some() { - Some(("Rerun last task", Rerun::default().boxed_clone())) + Some(("Rerun Last Task", Rerun::default().boxed_clone())) } else { None }; @@ -511,6 +511,8 @@ impl PickerDelegate for TasksModalDelegate { .justify_between() .rounded_b_md() .bg(cx.theme().colors().ghost_element_selected) + .border_t_1() + .border_color(cx.theme().colors().border_variant) .child( left_button .map(|(label, action)| { @@ -535,9 +537,9 @@ impl PickerDelegate for TasksModalDelegate { .boxed_clone(); this.children(KeyBinding::for_action(&*action, cx).map(|keybind| { let spawn_oneshot_label = if current_modifiers.secondary() { - "Spawn oneshot without history" + "Spawn Oneshot Without History" } else { - "Spawn oneshot" + "Spawn Oneshot" }; Button::new("spawn-onehshot", spawn_oneshot_label) @@ -549,9 +551,9 @@ impl PickerDelegate for TasksModalDelegate { this.children(KeyBinding::for_action(&menu::SecondaryConfirm, cx).map( |keybind| { let label = if is_recent_selected { - "Rerun without history" + "Rerun Without History" } else { - "Spawn without history" + "Spawn Without History" }; Button::new("spawn", label) .label_size(LabelSize::Small) From ca4980df02aa2618ebcb3969963c6fdc8ac23fd7 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 07:20:27 -0400 Subject: [PATCH 210/270] Add system_id (#18040) This PR adds `system_id` to telemetry, which is contained within a new `global` database (accessible by any release channel of Zed on a single system). This will help us get a more accurate understanding of user count, instead of relying on `installationd_id`, which is different per release channel. This doesn't solve the problem of a user with multiple machines, but it gets us closer. Release Notes: - N/A --- crates/client/src/telemetry.rs | 17 ++-- crates/collab/src/api/events.rs | 25 ++++-- crates/db/src/db.rs | 68 ++++++++------ crates/db/src/kvp.rs | 30 +++++++ crates/feedback/src/feedback_modal.rs | 6 +- .../telemetry_events/src/telemetry_events.rs | 8 +- crates/zed/src/main.rs | 88 ++++++++++++++----- crates/zed/src/reliability.rs | 4 +- 8 files changed, 184 insertions(+), 62 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 46304819a4..6c1803df3d 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -37,9 +37,10 @@ pub struct Telemetry { struct TelemetryState { settings: TelemetrySettings, - metrics_id: Option>, // Per logged-in user + system_id: Option>, // Per system installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) session_id: Option, // Per app launch + metrics_id: Option>, // Per logged-in user release_channel: Option<&'static str>, architecture: &'static str, events_queue: Vec, @@ -191,9 +192,10 @@ impl Telemetry { settings: *TelemetrySettings::get_global(cx), architecture: env::consts::ARCH, release_channel, + system_id: None, installation_id: None, - metrics_id: None, session_id: None, + metrics_id: None, events_queue: Vec::new(), flush_events_task: None, log_file: None, @@ -283,11 +285,13 @@ impl Telemetry { pub fn start( self: &Arc, + system_id: Option, installation_id: Option, session_id: String, cx: &mut AppContext, ) { let mut state = self.state.lock(); + state.system_id = system_id.map(|id| id.into()); state.installation_id = installation_id.map(|id| id.into()); state.session_id = Some(session_id); state.app_version = release_channel::AppVersion::global(cx).to_string(); @@ -637,9 +641,10 @@ impl Telemetry { let state = this.state.lock(); let request_body = EventRequestBody { + system_id: state.system_id.as_deref().map(Into::into), installation_id: state.installation_id.as_deref().map(Into::into), - metrics_id: state.metrics_id.as_deref().map(Into::into), session_id: state.session_id.clone(), + metrics_id: state.metrics_id.as_deref().map(Into::into), is_staff: state.is_staff, app_version: state.app_version.clone(), os_name: state.os_name.clone(), @@ -711,6 +716,7 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); @@ -718,7 +724,7 @@ mod tests { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); @@ -796,13 +802,14 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); cx.update(|cx| { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 45c25d261e..1be8f9c37b 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -149,7 +149,8 @@ pub async fn post_crash( installation_id = %installation_id, description = %description, backtrace = %summary, - "crash report"); + "crash report" + ); if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { let payload = slack::WebhookBody::new(|w| { @@ -627,7 +628,9 @@ where #[derive(Serialize, Debug, clickhouse::Row)] pub struct EditorEventRow { + system_id: String, installation_id: String, + session_id: Option, metrics_id: String, operation: String, app_version: String, @@ -647,7 +650,6 @@ pub struct EditorEventRow { historical_event: bool, architecture: String, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -677,9 +679,10 @@ impl EditorEventRow { os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), architecture: body.architecture.clone(), + system_id: body.system_id.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), - metrics_id: body.metrics_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), + metrics_id: body.metrics_id.clone().unwrap_or_default(), is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, @@ -699,6 +702,7 @@ impl EditorEventRow { #[derive(Serialize, Debug, clickhouse::Row)] pub struct InlineCompletionEventRow { installation_id: String, + session_id: Option, provider: String, suggestion_accepted: bool, app_version: String, @@ -713,7 +717,6 @@ pub struct InlineCompletionEventRow { city: String, time: i64, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -879,7 +882,9 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { + system_id: Option, installation_id: Option, + session_id: Option, is_staff: Option, usage_as_percentage: f32, core_count: u32, @@ -888,7 +893,6 @@ pub struct CpuEventRow { os_name: String, os_version: String, time: i64, - session_id: Option, // pub normalized_cpu_usage: f64, MATERIALIZED major: Option, minor: Option, @@ -917,6 +921,7 @@ impl CpuEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -940,6 +945,7 @@ pub struct MemoryEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -971,6 +977,7 @@ impl MemoryEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -994,6 +1001,7 @@ pub struct AppEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1024,6 +1032,7 @@ impl AppEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1046,6 +1055,7 @@ pub struct SettingEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1076,6 +1086,7 @@ impl SettingEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1099,6 +1110,7 @@ pub struct ExtensionEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1134,6 +1146,7 @@ impl ExtensionEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1224,6 +1237,7 @@ pub struct EditEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, // Note: This column name has a typo in the ClickHouse table. #[serde(rename = "sesssion_id")] @@ -1261,6 +1275,7 @@ impl EditEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 768f382203..4d87222c77 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -11,16 +11,14 @@ pub use smol; pub use sqlez; pub use sqlez_macros; -use release_channel::ReleaseChannel; pub use release_channel::RELEASE_CHANNEL; use sqlez::domain::Migrator; use sqlez::thread_safe_connection::ThreadSafeConnection; use sqlez_macros::sql; -use std::env; use std::future::Future; use std::path::Path; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::LazyLock; +use std::sync::{atomic::Ordering, LazyLock}; +use std::{env, sync::atomic::AtomicBool}; use util::{maybe, ResultExt}; const CONNECTION_INITIALIZE_QUERY: &str = sql!( @@ -47,16 +45,12 @@ pub static ALL_FILE_DB_FAILED: LazyLock = LazyLock::new(|| AtomicBoo /// This will retry a couple times if there are failures. If opening fails once, the db directory /// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. /// In either case, static variables are set so that the user can be notified. -pub async fn open_db( - db_dir: &Path, - release_channel: &ReleaseChannel, -) -> ThreadSafeConnection { +pub async fn open_db(db_dir: &Path, scope: &str) -> ThreadSafeConnection { if *ZED_STATELESS { return open_fallback_db().await; } - let release_channel_name = release_channel.dev_name(); - let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); + let main_db_dir = db_dir.join(format!("0-{}", scope)); let connection = maybe!(async { smol::fs::create_dir_all(&main_db_dir) @@ -118,7 +112,7 @@ pub async fn open_test_db(db_name: &str) -> ThreadSafeConnection /// Implements a basic DB wrapper for a given domain #[macro_export] macro_rules! define_connection { - (pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>); impl ::std::ops::Deref for $t { @@ -139,18 +133,23 @@ macro_rules! define_connection { } } - use std::sync::LazyLock; #[cfg(any(test, feature = "test-support"))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { $t($crate::smol::block_on($crate::open_test_db(stringify!($id)))) }); #[cfg(not(any(test, feature = "test-support")))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; - (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>); impl ::std::ops::Deref for $t { @@ -178,7 +177,13 @@ macro_rules! define_connection { #[cfg(not(any(test, feature = "test-support")))] pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; } @@ -225,7 +230,11 @@ mod tests { .prefix("DbTests") .tempdir() .unwrap(); - let _bad_db = open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let _bad_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; } /// Test that DB exists but corrupted (causing recreate) @@ -262,13 +271,19 @@ mod tests { .tempdir() .unwrap(); { - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } - let good_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let good_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() .unwrap() @@ -311,8 +326,11 @@ mod tests { .unwrap(); { // Setup the bad database - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } @@ -323,7 +341,7 @@ mod tests { let guard = thread::spawn(move || { let good_db = smol::block_on(open_db::( tmp_path.as_path(), - &release_channel::ReleaseChannel::Dev, + &release_channel::ReleaseChannel::Dev.dev_name(), )); assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index 0b0cdd9aa1..c9d994d34d 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -60,3 +60,33 @@ mod tests { assert_eq!(db.read_kvp("key-1").unwrap(), None); } } + +define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> = + &[sql!( + CREATE TABLE IF NOT EXISTS kv_store( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ) STRICT; + )]; + global +); + +impl GlobalKeyValueStore { + query! { + pub fn read_kvp(key: &str) -> Result> { + SELECT value FROM kv_store WHERE key = (?) + } + } + + query! { + pub async fn write_kvp(key: String, value: String) -> Result<()> { + INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?)) + } + } + + query! { + pub async fn delete_kvp(key: String) -> Result<()> { + DELETE FROM kv_store WHERE key = (?) + } + } +} diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index 7369bcd853..a4a07ad2ad 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -44,8 +44,8 @@ const FEEDBACK_SUBMISSION_ERROR_TEXT: &str = struct FeedbackRequestBody<'a> { feedback_text: &'a str, email: Option, - metrics_id: Option>, installation_id: Option>, + metrics_id: Option>, system_specs: SystemSpecs, is_staff: bool, } @@ -296,16 +296,16 @@ impl FeedbackModal { } let telemetry = zed_client.telemetry(); - let metrics_id = telemetry.metrics_id(); let installation_id = telemetry.installation_id(); + let metrics_id = telemetry.metrics_id(); let is_staff = telemetry.is_staff(); let http_client = zed_client.http_client(); let feedback_endpoint = http_client.build_url("/api/feedback"); let request = FeedbackRequestBody { feedback_text, email, - metrics_id, installation_id, + metrics_id, system_specs, is_staff: is_staff.unwrap_or(false), }; diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index eb84322e83..d6e737b929 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -5,12 +5,14 @@ use std::{fmt::Display, sync::Arc, time::Duration}; #[derive(Serialize, Deserialize, Debug)] pub struct EventRequestBody { + /// Identifier unique to each system Zed is installed on + pub system_id: Option, /// Identifier unique to each Zed installation (differs for stable, preview, dev) pub installation_id: Option, /// Identifier unique to each logged in Zed user (randomly generated on first sign in) - pub metrics_id: Option, /// Identifier unique to each Zed session (differs for each time you open Zed) pub session_id: Option, + pub metrics_id: Option, /// True for Zed staff, otherwise false pub is_staff: Option, /// Zed version number @@ -34,6 +36,7 @@ pub struct EventWrapper { pub signed_in: bool, /// Duration between this event's timestamp and the timestamp of the first event in the current batch pub milliseconds_since_first_event: i64, + /// The event itself #[serde(flatten)] pub event: Event, } @@ -245,8 +248,11 @@ pub struct Panic { pub architecture: String, /// The time the panic occurred (UNIX millisecond timestamp) pub panicked_on: i64, + /// Identifier unique to each system Zed is installed on #[serde(skip_serializing_if = "Option::is_none")] + pub system_id: Option, /// Identifier unique to each Zed installation (differs for stable, preview, dev) + #[serde(skip_serializing_if = "Option::is_none")] pub installation_id: Option, /// Identifier unique to each Zed session (differs for each time you open Zed) pub session_id: String, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d3a722ec65..c127a975a9 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -13,7 +13,7 @@ use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; use editor::Editor; use env_logger::Builder; use fs::{Fs, RealFs}; @@ -334,19 +334,17 @@ fn main() { .with_assets(Assets) .with_http_client(IsahcHttpClient::new(None, None)); - let (installation_id, existing_installation_id_found) = app - .background_executor() - .block(installation_id()) - .ok() - .unzip(); - + let system_id = app.background_executor().block(system_id()).ok(); + let installation_id = app.background_executor().block(installation_id()).ok(); + let session_id = Uuid::new_v4().to_string(); let session = app.background_executor().block(Session::new()); - let app_version = AppVersion::init(env!("CARGO_PKG_VERSION")); + reliability::init_panic_hook( - installation_id.clone(), app_version, - session.id().to_owned(), + system_id.as_ref().map(|id| id.to_string()), + installation_id.as_ref().map(|id| id.to_string()), + session_id.clone(), ); let (open_listener, mut open_rx) = OpenListener::new(); @@ -491,14 +489,26 @@ fn main() { client::init(&client, cx); language::init(cx); let telemetry = client.telemetry(); - telemetry.start(installation_id.clone(), session.id().to_owned(), cx); - telemetry.report_app_event( - match existing_installation_id_found { - Some(false) => "first open", - _ => "open", - } - .to_string(), + telemetry.start( + system_id.as_ref().map(|id| id.to_string()), + installation_id.as_ref().map(|id| id.to_string()), + session_id, + cx, ); + if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { + match (&system_id, &installation_id) { + (IdType::New(_), IdType::New(_)) => { + telemetry.report_app_event("first open".to_string()); + telemetry.report_app_event("first open for release channel".to_string()); + } + (IdType::Existing(_), IdType::New(_)) => { + telemetry.report_app_event("first open for release channel".to_string()); + } + (_, IdType::Existing(_)) => { + telemetry.report_app_event("open".to_string()); + } + } + } let app_session = cx.new_model(|cx| AppSession::new(session, cx)); let app_state = Arc::new(AppState { @@ -514,7 +524,11 @@ fn main() { AppState::set_global(Arc::downgrade(&app_state), cx); auto_update::init(client.http_client(), cx); - reliability::init(client.http_client(), installation_id, cx); + reliability::init( + client.http_client(), + installation_id.clone().map(|id| id.to_string()), + cx, + ); let prompt_builder = init_common(app_state.clone(), cx); let args = Args::parse(); @@ -755,7 +769,23 @@ async fn authenticate(client: Arc, cx: &AsyncAppContext) -> Result<()> { Ok::<_, anyhow::Error>(()) } -async fn installation_id() -> Result<(String, bool)> { +async fn system_id() -> Result { + let key_name = "system_id".to_string(); + + if let Ok(Some(system_id)) = GLOBAL_KEY_VALUE_STORE.read_kvp(&key_name) { + return Ok(IdType::Existing(system_id)); + } + + let system_id = Uuid::new_v4().to_string(); + + GLOBAL_KEY_VALUE_STORE + .write_kvp(key_name, system_id.clone()) + .await?; + + Ok(IdType::New(system_id)) +} + +async fn installation_id() -> Result { let legacy_key_name = "device_id".to_string(); let key_name = "installation_id".to_string(); @@ -765,11 +795,11 @@ async fn installation_id() -> Result<(String, bool)> { .write_kvp(key_name, installation_id.clone()) .await?; KEY_VALUE_STORE.delete_kvp(legacy_key_name).await?; - return Ok((installation_id, true)); + return Ok(IdType::Existing(installation_id)); } if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&key_name) { - return Ok((installation_id, true)); + return Ok(IdType::Existing(installation_id)); } let installation_id = Uuid::new_v4().to_string(); @@ -778,7 +808,7 @@ async fn installation_id() -> Result<(String, bool)> { .write_kvp(key_name, installation_id.clone()) .await?; - Ok((installation_id, false)) + Ok(IdType::New(installation_id)) } async fn restore_or_create_workspace( @@ -1087,6 +1117,20 @@ struct Args { dev_server_token: Option, } +#[derive(Clone, Debug)] +enum IdType { + New(String), + Existing(String), +} + +impl ToString for IdType { + fn to_string(&self) -> String { + match self { + IdType::New(id) | IdType::Existing(id) => id.clone(), + } + } +} + fn parse_url_arg(arg: &str, cx: &AppContext) -> Result { match std::fs::canonicalize(Path::new(&arg)) { Ok(path) => Ok(format!( diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 188cf417f7..9e811d7c9a 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -28,8 +28,9 @@ use crate::stdout_is_a_pty; static PANIC_COUNT: AtomicU32 = AtomicU32::new(0); pub fn init_panic_hook( - installation_id: Option, app_version: SemanticVersion, + system_id: Option, + installation_id: Option, session_id: String, ) { let is_pty = stdout_is_a_pty(); @@ -102,6 +103,7 @@ pub fn init_panic_hook( architecture: env::consts::ARCH.into(), panicked_on: Utc::now().timestamp_millis(), backtrace, + system_id: system_id.clone(), installation_id: installation_id.clone(), session_id: session_id.clone(), }; From 1723713dc292074e558935b9f5c81eac7938c396 Mon Sep 17 00:00:00 2001 From: thataboy Date: Thu, 19 Sep 2024 04:43:49 -0700 Subject: [PATCH 211/270] Add ability to copy assistant code block to clipboard or insert into editor, without manual selection (#17853) Some notes: - You can put the cursor on the start or end line with triple backticks, it doesn't actually have to be inside the block. - Placing the cursor outside of a code block does nothing. - Code blocks are determined by counting triple backticks pairs from either start or end of buffer, and nothing else. - If you manually select something, the selection takes precedence over any code blocks. Release Notes: - Added the ability to copy surrounding code blocks in the assistant panel into the clipboard, or inserting them directly into the editor, without manually selecting. Place cursor anywhere in a code block (marked by triple backticks) and use the `assistant::CopyCode` action (`cmd-k c` / `ctrl-k c`) to copy to the clipboard, or the `assistant::InsertIntoEditor` action (`cmd-<` / `ctrl-<`) to insert into editor. --------- Co-authored-by: Thorsten Ball Co-authored-by: Bennet --- Cargo.lock | 2 + assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + crates/assistant/Cargo.toml | 2 + crates/assistant/src/assistant.rs | 1 + crates/assistant/src/assistant_panel.rs | 218 ++++++++++++++++++++++-- 6 files changed, 207 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 652c584fd5..a37a5350f5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -402,6 +402,7 @@ dependencies = [ "indoc", "language", "language_model", + "languages", "log", "markdown", "menu", @@ -436,6 +437,7 @@ dependencies = [ "text", "theme", "toml 0.8.19", + "tree-sitter-md", "ui", "unindent", "util", diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 02fc6d8e04..542f6c2df4 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -166,6 +166,7 @@ { "context": "AssistantPanel", "bindings": { + "ctrl-k c": "assistant::CopyCode", "ctrl-g": "search::SelectNextMatch", "ctrl-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 33536cc9ff..77fac3254b 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -188,6 +188,7 @@ { "context": "AssistantPanel", "bindings": { + "cmd-k c": "assistant::CopyCode", "cmd-g": "search::SelectNextMatch", "cmd-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index b700702062..9f715d8224 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -94,9 +94,11 @@ editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true language = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } +languages = { workspace = true, features = ["test-support"] } log.workspace = true project = { workspace = true, features = ["test-support"] } rand.workspace = true serde_json_lenient.workspace = true text = { workspace = true, features = ["test-support"] } +tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index af7f03ebb3..d7466878c9 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -58,6 +58,7 @@ actions!( [ Assist, Split, + CopyCode, CycleMessageRole, QuoteSelection, InsertIntoEditor, diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 5d06720fe0..094d187df2 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -12,11 +12,11 @@ use crate::{ slash_command_picker, terminal_inline_assistant::TerminalInlineAssistant, Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, - ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, - InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata, - MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand, - PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split, - ToggleFocus, ToggleModelSelector, WorkflowStepResolution, + ContextStoreEvent, CopyCode, CycleMessageRole, DeployHistory, DeployPromptLibrary, + InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, + MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, + PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, + SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, WorkflowStepResolution, }; use anyhow::{anyhow, Result}; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection}; @@ -45,7 +45,8 @@ use gpui::{ }; use indexed_docs::IndexedDocsStore; use language::{ - language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, + language_settings::SoftWrap, BufferSnapshot, Capability, LanguageRegistry, LspAdapterDelegate, + ToOffset, }; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, @@ -56,6 +57,7 @@ use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::lsp_store::LocalLspAdapterDelegate; use project::{Project, Worktree}; +use rope::Point; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; @@ -81,9 +83,10 @@ use util::{maybe, ResultExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, item::{self, FollowableItem, Item, ItemHandle}, + notifications::NotificationId, pane::{self, SaveIntent}, searchable::{SearchEvent, SearchableItem}, - DraggedSelection, Pane, Save, ShowConfiguration, ToggleZoom, ToolbarItemEvent, + DraggedSelection, Pane, Save, ShowConfiguration, Toast, ToggleZoom, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, }; use workspace::{searchable::SearchableItemHandle, DraggedTab}; @@ -105,6 +108,7 @@ pub fn init(cx: &mut AppContext) { .register_action(AssistantPanel::inline_assist) .register_action(ContextEditor::quote_selection) .register_action(ContextEditor::insert_selection) + .register_action(ContextEditor::copy_code) .register_action(ContextEditor::insert_dragged_files) .register_action(AssistantPanel::show_configuration) .register_action(AssistantPanel::create_new_context); @@ -3100,6 +3104,40 @@ impl ContextEditor { }); } + /// Returns either the selected text, or the content of the Markdown code + /// block surrounding the cursor. + fn get_selection_or_code_block( + context_editor_view: &View, + cx: &mut ViewContext, + ) -> Option<(String, bool)> { + let context_editor = context_editor_view.read(cx).editor.read(cx); + + if context_editor.selections.newest::(cx).is_empty() { + let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let (_, _, snapshot) = snapshot.as_singleton()?; + + let head = context_editor.selections.newest::(cx).head(); + let offset = snapshot.point_to_offset(head); + + let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; + let text = snapshot + .text_for_range(surrounding_code_block_range) + .collect::(); + + (!text.is_empty()).then_some((text, true)) + } else { + let anchor = context_editor.selections.newest_anchor(); + let text = context_editor + .buffer() + .read(cx) + .read(cx) + .text_for_range(anchor.range()) + .collect::(); + + (!text.is_empty()).then_some((text, false)) + } + } + fn insert_selection( workspace: &mut Workspace, _: &InsertIntoEditor, @@ -3118,17 +3156,7 @@ impl ContextEditor { return; }; - let context_editor = context_editor_view.read(cx).editor.read(cx); - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); - - // If nothing is selected, don't delete the current selection; instead, be a no-op. - if !text.is_empty() { + if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) { active_editor_view.update(cx, |editor, cx| { editor.insert(&text, cx); editor.focus(cx); @@ -3136,6 +3164,36 @@ impl ContextEditor { } } + fn copy_code(workspace: &mut Workspace, _: &CopyCode, cx: &mut ViewContext) { + let result = maybe!({ + let panel = workspace.panel::(cx)?; + let context_editor_view = panel.read(cx).active_context_editor(cx)?; + Self::get_selection_or_code_block(&context_editor_view, cx) + }); + let Some((text, is_code_block)) = result else { + return; + }; + + cx.write_to_clipboard(ClipboardItem::new_string(text)); + + struct CopyToClipboardToast; + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + format!( + "{} copied to clipboard.", + if is_code_block { + "Code block" + } else { + "Selection" + } + ), + ) + .autohide(), + cx, + ); + } + fn insert_dragged_files( workspace: &mut Workspace, action: &InsertDraggedFiles, @@ -4215,6 +4273,48 @@ impl ContextEditor { } } +/// Returns the contents of the *outermost* fenced code block that contains the given offset. +fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option> { + const CODE_BLOCK_NODE: &'static str = "fenced_code_block"; + const CODE_BLOCK_CONTENT: &'static str = "code_fence_content"; + + let layer = snapshot.syntax_layers().next()?; + + let root_node = layer.node(); + let mut cursor = root_node.walk(); + + // Go to the first child for the given offset + while cursor.goto_first_child_for_byte(offset).is_some() { + // If we're at the end of the node, go to the next one. + // Example: if you have a fenced-code-block, and you're on the start of the line + // right after the closing ```, you want to skip the fenced-code-block and + // go to the next sibling. + if cursor.node().end_byte() == offset { + cursor.goto_next_sibling(); + } + + if cursor.node().start_byte() > offset { + break; + } + + // We found the fenced code block. + if cursor.node().kind() == CODE_BLOCK_NODE { + // Now we need to find the child node that contains the code. + cursor.goto_first_child(); + loop { + if cursor.node().kind() == CODE_BLOCK_CONTENT { + return Some(cursor.node().byte_range()); + } + if !cursor.goto_next_sibling() { + break; + } + } + } + } + + None +} + fn render_fold_icon_button( editor: WeakView, icon: IconName, @@ -5497,3 +5597,85 @@ fn configuration_error(cx: &AppContext) -> Option { None } + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{AppContext, Context}; + use language::Buffer; + use unindent::Unindent; + + #[gpui::test] + fn test_find_code_blocks(cx: &mut AppContext) { + let markdown = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + + let buffer = cx.new_model(|cx| { + let text = r#" + line 0 + line 1 + ```rust + fn main() {} + ``` + line 5 + line 6 + line 7 + ```go + func main() {} + ``` + line 11 + ``` + this is plain text code block + ``` + + ```go + func another() {} + ``` + line 19 + "# + .unindent(); + let mut buffer = Buffer::local(text, cx); + buffer.set_language(Some(markdown.clone()), cx); + buffer + }); + let snapshot = buffer.read(cx).snapshot(); + + let code_blocks = vec![ + Point::new(3, 0)..Point::new(4, 0), + Point::new(9, 0)..Point::new(10, 0), + Point::new(13, 0)..Point::new(14, 0), + Point::new(17, 0)..Point::new(18, 0), + ] + .into_iter() + .map(|range| snapshot.point_to_offset(range.start)..snapshot.point_to_offset(range.end)) + .collect::>(); + + let expected_results = vec![ + (0, None), + (1, None), + (2, Some(code_blocks[0].clone())), + (3, Some(code_blocks[0].clone())), + (4, Some(code_blocks[0].clone())), + (5, None), + (6, None), + (7, None), + (8, Some(code_blocks[1].clone())), + (9, Some(code_blocks[1].clone())), + (10, Some(code_blocks[1].clone())), + (11, None), + (12, Some(code_blocks[2].clone())), + (13, Some(code_blocks[2].clone())), + (14, Some(code_blocks[2].clone())), + (15, None), + (16, Some(code_blocks[3].clone())), + (17, Some(code_blocks[3].clone())), + (18, Some(code_blocks[3].clone())), + (19, None), + ]; + + for (row, expected) in expected_results { + let offset = snapshot.point_to_offset(Point::new(row, 0)); + let range = find_surrounding_code_block(&snapshot, offset); + assert_eq!(range, expected, "unexpected result on row {:?}", row); + } + } +} From 23e1faa48524f55c5a9a2c7be084d730667abecb Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 19 Sep 2024 14:43:56 +0200 Subject: [PATCH 212/270] assistant panel: Fix copying code when trailing newline is missing (#18067) Follow-up to #17853. Apparently tree-sitter-md extends the range of the content node to include the backticks when there is no newline. Release Notes: - N/A Co-authored-by: Bennet --- crates/assistant/src/assistant_panel.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 094d187df2..da176ebeee 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3110,6 +3110,8 @@ impl ContextEditor { context_editor_view: &View, cx: &mut ViewContext, ) -> Option<(String, bool)> { + const CODE_FENCE_DELIMITER: &'static str = "```"; + let context_editor = context_editor_view.read(cx).editor.read(cx); if context_editor.selections.newest::(cx).is_empty() { @@ -3120,10 +3122,17 @@ impl ContextEditor { let offset = snapshot.point_to_offset(head); let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; - let text = snapshot + let mut text = snapshot .text_for_range(surrounding_code_block_range) .collect::(); + // If there is no newline trailing the closing three-backticks, then + // tree-sitter-md extends the range of the content node to include + // the backticks. + if text.ends_with(CODE_FENCE_DELIMITER) { + text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + } + (!text.is_empty()).then_some((text, true)) } else { let anchor = context_editor.selections.newest_anchor(); From 4338ff6be496edcdd86d5b97284f3a5ba9e140c2 Mon Sep 17 00:00:00 2001 From: Casey Watson Date: Thu, 19 Sep 2024 07:01:28 -0600 Subject: [PATCH 213/270] terminal: Add ability to open file from Git diff (#17446) - strip "a/" and "b/" prefix for potential paths. Release Notes: - Allow clicking on filepaths when using `git diff` inside the built-in terminal --- crates/terminal_view/src/terminal_view.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index f19bfa7010..e0b92035d1 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -58,6 +58,8 @@ const REGEX_SPECIAL_CHARS: &[char] = &[ const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); +const GIT_DIFF_PATH_PREFIXES: &[char] = &['a', 'b']; + ///Event to transmit the scroll from the element to the view #[derive(Clone, Debug, PartialEq)] pub struct ScrollTerminal(pub i32); @@ -826,6 +828,19 @@ fn possible_open_targets( { potential_cwd_and_workspace_paths.insert(potential_worktree_path); } + + for prefix in GIT_DIFF_PATH_PREFIXES { + let prefix_str = &prefix.to_string(); + if maybe_path.starts_with(prefix_str) { + let stripped = maybe_path.strip_prefix(prefix_str).unwrap_or(&maybe_path); + for potential_worktree_path in workspace + .worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().join(&stripped)) + { + potential_cwd_and_workspace_paths.insert(potential_worktree_path); + } + } + } }); } From 3d5c023fdae99907cf7cf9e67f7ae20bd7bd080c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 09:55:51 -0400 Subject: [PATCH 214/270] ci: Move collab deploys back to DigitalOcean runners (#18071) This PR moves the collab deployment steps in CI back to the DigitalOcean runners temporarily, so that we can deploy collab. Release Notes: - N/A --- .github/workflows/deploy_collab.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 7abd52e5a6..6801be2a54 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,7 +61,8 @@ jobs: - style - tests runs-on: - - buildjet-16vcpu-ubuntu-2204 + - self-hosted + - deploy steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -88,7 +89,8 @@ jobs: needs: - publish runs-on: - - buildjet-16vcpu-ubuntu-2204 + - self-hosted + - deploy steps: - name: Sign into Kubernetes From d91e62524f7c6437349426687ded6d1182ad7346 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Thu, 19 Sep 2024 22:41:42 +0800 Subject: [PATCH 215/270] assistant: Fix offset calculation not in char boundary (#18069) Closes #17825 Release Notes: - N/A --- crates/assistant/src/prompts.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 83e894f797..ae2ab4787e 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -220,7 +220,8 @@ impl PromptBuilder { let before_range = 0..range.start; let truncated_before = if before_range.len() > MAX_CTX { is_truncated = true; - range.start - MAX_CTX..range.start + let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right); + start..range.start } else { before_range }; @@ -228,7 +229,8 @@ impl PromptBuilder { let after_range = range.end..buffer.len(); let truncated_after = if after_range.len() > MAX_CTX { is_truncated = true; - range.end..range.end + MAX_CTX + let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left); + range.end..end } else { after_range }; From d2894ce9c99586e9cd0588fa9b4db27bbd64f0ca Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 17:00:26 +0200 Subject: [PATCH 216/270] pane: Do not autopin new item created as a neighbour of pinned tab (#18072) When I used editor::NewFile or ProjectSearch from a pinned tab, the resulting new tab would be pinned (and the last pinned tab would be pushed off). This PR fixes it by always storing new tabs outside of the pinned area if there's no destination index for the new tab. Release Notes: - Fixed tab bar not preserving pinned tab state when an editor::NewFile action is executed. --- crates/workspace/src/pane.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 09b4683c0c..a5f83f961f 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -831,13 +831,14 @@ impl Pane { } } } - // If no destination index is specified, add or move the item after the active item. + // If no destination index is specified, add or move the item after the + // active item (or at the start of tab bar, if the active item is pinned) let mut insertion_index = { cmp::min( if let Some(destination_index) = destination_index { destination_index } else { - self.active_item_index + 1 + cmp::max(self.active_item_index + 1, self.pinned_count()) }, self.items.len(), ) From a944bb2f24bea7e492ced79fe0e92a7205d6f42e Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Thu, 19 Sep 2024 11:02:44 -0400 Subject: [PATCH 217/270] v0.155.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a37a5350f5..ca5d68881f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14375,7 +14375,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.154.0" +version = "0.155.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 645d12fc76..ad2e7cd48c 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.154.0" +version = "0.155.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 7d0a7541bfd1ca44a7511ec077067902b0e461ef Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 11:45:06 -0400 Subject: [PATCH 218/270] ci: Fix collab deploys (#18077) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes issues with deploying collab. We reverted 4882a75971abafa89467e779466749086d7d3f96—as the DigitalOcean runners are gone now—and moved back to BuildJet. We needed to make some changes to the deployment jobs to setup `doctl`. This PR also adds an automatic bump of the `collab-staging` tag on merges to `main`. This should help catch issues with collab deploys earlier. Release Notes: - N/A --------- Co-authored-by: Conrad --- .github/workflows/bump_collab_staging.yml | 23 +++++++++++++++++++++++ .github/workflows/deploy_collab.yml | 23 ++++++++++++++++------- 2 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/bump_collab_staging.yml diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml new file mode 100644 index 0000000000..89cc7c4848 --- /dev/null +++ b/.github/workflows/bump_collab_staging.yml @@ -0,0 +1,23 @@ +name: Bump collab-staging Tag + +on: + push: + branches: + - main + +jobs: + update-collab-staging-tag: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + + - name: Update collab-staging tag + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f collab-staging + git push origin collab-staging --force diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 6801be2a54..c4193adcd2 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -8,7 +8,6 @@ on: env: DOCKER_BUILDKIT: 1 - DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} jobs: style: @@ -61,11 +60,12 @@ jobs: - style - tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} - name: Sign into DigitalOcean docker registry run: doctl registry login @@ -89,10 +89,19 @@ jobs: needs: - publish runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + clean: false + + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} + - name: Sign into Kubernetes run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} From e9f2e72ff03c60f8a9a1ce9612cc51a368813cf0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 19 Sep 2024 17:51:28 +0200 Subject: [PATCH 219/270] Workspace persistence for SSH projects (#17996) TODOs: - [x] Add tests to `workspace/src/persistence.rs` - [x] Add a icon for ssh projects - [x] Fix all `TODO` comments - [x] Use `port` if it's passed in the ssh connection options In next PRs: - Make sure unsaved buffers are persisted/restored, along with other items/layout - Handle multiple paths/worktrees correctly Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- Cargo.lock | 1 + crates/recent_projects/src/dev_servers.rs | 7 +- crates/recent_projects/src/recent_projects.rs | 108 +++-- crates/recent_projects/src/ssh_connections.rs | 67 ++-- crates/remote/src/ssh_session.rs | 5 + crates/sqlez/src/bindable.rs | 16 + crates/sqlez/src/typed_statements.rs | 2 +- crates/workspace/Cargo.toml | 1 + crates/workspace/src/persistence.rs | 374 +++++++++++++++--- crates/workspace/src/persistence/model.rs | 66 +++- crates/workspace/src/workspace.rs | 80 +++- crates/zed/src/main.rs | 6 +- 12 files changed, 592 insertions(+), 141 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ca5d68881f..16ee627d2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14096,6 +14096,7 @@ dependencies = [ "parking_lot", "postage", "project", + "remote", "schemars", "serde", "serde_json", diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index 491f378f30..af5f51f14f 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -39,7 +39,6 @@ use ui::{ RadioWithLabel, Tooltip, }; use ui_input::{FieldLabelLayout, TextField}; -use util::paths::PathWithPosition; use util::ResultExt; use workspace::notifications::NotifyResultExt; use workspace::OpenOptions; @@ -987,11 +986,7 @@ impl DevServerProjects { cx.spawn(|_, mut cx| async move { let result = open_ssh_project( server.into(), - project - .paths - .into_iter() - .map(|path| PathWithPosition::from_path(PathBuf::from(path))) - .collect(), + project.paths.into_iter().map(PathBuf::from).collect(), app_state, OpenOptions::default(), &mut cx, diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 182cec4614..cb3d3ab659 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -2,6 +2,7 @@ mod dev_servers; pub mod disconnected_overlay; mod ssh_connections; mod ssh_remotes; +use remote::SshConnectionOptions; pub use ssh_connections::open_ssh_project; use client::{DevServerProjectId, ProjectId}; @@ -32,8 +33,8 @@ use ui::{ }; use util::{paths::PathExt, ResultExt}; use workspace::{ - AppState, CloseIntent, ModalView, SerializedWorkspaceLocation, Workspace, WorkspaceId, - WORKSPACE_DB, + AppState, CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace, + WorkspaceId, WORKSPACE_DB, }; #[derive(PartialEq, Clone, Deserialize, Default)] @@ -172,7 +173,7 @@ pub struct RecentProjectsDelegate { create_new_window: bool, // Flag to reset index when there is a new query vs not reset index when user delete an item reset_selected_match_index: bool, - has_any_dev_server_projects: bool, + has_any_non_local_projects: bool, } impl RecentProjectsDelegate { @@ -185,16 +186,16 @@ impl RecentProjectsDelegate { create_new_window, render_paths, reset_selected_match_index: true, - has_any_dev_server_projects: false, + has_any_non_local_projects: false, } } pub fn set_workspaces(&mut self, workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>) { self.workspaces = workspaces; - self.has_any_dev_server_projects = self + self.has_any_non_local_projects = !self .workspaces .iter() - .any(|(_, location)| matches!(location, SerializedWorkspaceLocation::DevServer(_))); + .all(|(_, location)| matches!(location, SerializedWorkspaceLocation::Local(_, _))); } } impl EventEmitter for RecentProjectsDelegate {} @@ -258,6 +259,23 @@ impl PickerDelegate for RecentProjectsDelegate { dev_server_project.paths.join("") ) } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + format!( + "{}{}{}{}", + ssh_project.host, + ssh_project + .port + .as_ref() + .map(|port| port.to_string()) + .unwrap_or_default(), + ssh_project.path, + ssh_project + .user + .as_ref() + .map(|user| user.to_string()) + .unwrap_or_default() + ) + } }; StringMatchCandidate::new(id, combined_string) @@ -364,6 +382,33 @@ impl PickerDelegate for RecentProjectsDelegate { }; open_dev_server_project(replace_current_window, dev_server_project.id, project_id, cx) } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + let app_state = workspace.app_state().clone(); + + let replace_window = if replace_current_window { + cx.window_handle().downcast::() + } else { + None + }; + + let open_options = OpenOptions { + replace_window, + ..Default::default() + }; + + let connection_options = SshConnectionOptions { + host: ssh_project.host.clone(), + username: ssh_project.user.clone(), + port: ssh_project.port, + password: None, + }; + + let paths = vec![PathBuf::from(ssh_project.path.clone())]; + + cx.spawn(|_, mut cx| async move { + open_ssh_project(connection_options, paths, app_state, open_options, &mut cx).await + }) + } } } }) @@ -392,7 +437,6 @@ impl PickerDelegate for RecentProjectsDelegate { let (_, location) = self.workspaces.get(hit.candidate_id)?; - let is_remote = matches!(location, SerializedWorkspaceLocation::DevServer(_)); let dev_server_status = if let SerializedWorkspaceLocation::DevServer(dev_server_project) = location { let store = dev_server_projects::Store::global(cx).read(cx); @@ -416,6 +460,9 @@ impl PickerDelegate for RecentProjectsDelegate { .filter_map(|i| paths.paths().get(*i).cloned()) .collect(), ), + SerializedWorkspaceLocation::Ssh(ssh_project) => { + Arc::new(vec![PathBuf::from(ssh_project.ssh_url())]) + } SerializedWorkspaceLocation::DevServer(dev_server_project) => { Arc::new(vec![PathBuf::from(format!( "{}:{}", @@ -457,29 +504,34 @@ impl PickerDelegate for RecentProjectsDelegate { h_flex() .flex_grow() .gap_3() - .when(self.has_any_dev_server_projects, |this| { - this.child(if is_remote { - // if disabled, Color::Disabled - let indicator_color = match dev_server_status { - Some(DevServerStatus::Online) => Color::Created, - Some(DevServerStatus::Offline) => Color::Hidden, - _ => unreachable!(), - }; - IconWithIndicator::new( - Icon::new(IconName::Server).color(Color::Muted), - Some(Indicator::dot()), - ) - .indicator_color(indicator_color) - .indicator_border_color(if selected { - Some(cx.theme().colors().element_selected) - } else { - None - }) - .into_any_element() - } else { - Icon::new(IconName::Screen) + .when(self.has_any_non_local_projects, |this| { + this.child(match location { + SerializedWorkspaceLocation::Local(_, _) => { + Icon::new(IconName::Screen) + .color(Color::Muted) + .into_any_element() + } + SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Screen) .color(Color::Muted) + .into_any_element(), + SerializedWorkspaceLocation::DevServer(_) => { + let indicator_color = match dev_server_status { + Some(DevServerStatus::Online) => Color::Created, + Some(DevServerStatus::Offline) => Color::Hidden, + _ => unreachable!(), + }; + IconWithIndicator::new( + Icon::new(IconName::Server).color(Color::Muted), + Some(Indicator::dot()), + ) + .indicator_color(indicator_color) + .indicator_border_color(if selected { + Some(cx.theme().colors().element_selected) + } else { + None + }) .into_any_element() + } }) }) .child({ diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 8da4284b7f..ad23a5c896 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -19,7 +19,6 @@ use ui::{ h_flex, v_flex, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, WindowContext, }; -use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; #[derive(Deserialize)] @@ -358,24 +357,29 @@ pub fn connect_over_ssh( pub async fn open_ssh_project( connection_options: SshConnectionOptions, - paths: Vec, + paths: Vec, app_state: Arc, - _open_options: workspace::OpenOptions, + open_options: workspace::OpenOptions, cx: &mut AsyncAppContext, ) -> Result<()> { let options = cx.update(|cx| (app_state.build_window_options)(None, cx))?; - let window = cx.open_window(options, |cx| { - let project = project::Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ); - cx.new_view(|cx| Workspace::new(None, project, app_state.clone(), cx)) - })?; + + let window = if let Some(window) = open_options.replace_window { + window + } else { + cx.open_window(options, |cx| { + let project = project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + cx, + ); + cx.new_view(|cx| Workspace::new(None, project, app_state.clone(), cx)) + })? + }; let result = window .update(cx, |workspace, cx| { @@ -387,40 +391,17 @@ pub async fn open_ssh_project( .read(cx) .prompt .clone(); - connect_over_ssh(connection_options, ui, cx) + connect_over_ssh(connection_options.clone(), ui, cx) })? .await; if result.is_err() { window.update(cx, |_, cx| cx.remove_window()).ok(); } - let session = result?; - let project = cx.update(|cx| { - project::Project::ssh( - session, - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx, - ) - })?; - - for path in paths { - project - .update(cx, |project, cx| { - project.find_or_create_worktree(&path.path, true, cx) - })? - .await?; - } - - window.update(cx, |_, cx| { - cx.replace_root_view(|cx| Workspace::new(None, project, app_state, cx)) - })?; - window.update(cx, |_, cx| cx.activate_window())?; - - Ok(()) + cx.update(|cx| { + workspace::open_ssh_project(window, connection_options, session, app_state, paths, cx) + })? + .await } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 7556b38f3e..4aab731e64 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -33,6 +33,11 @@ use std::{ }; use tempfile::TempDir; +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, +)] +pub struct SshProjectId(pub u64); + #[derive(Clone)] pub struct SshSocket { connection_options: SshConnectionOptions, diff --git a/crates/sqlez/src/bindable.rs b/crates/sqlez/src/bindable.rs index e8b9679936..8cf4329f92 100644 --- a/crates/sqlez/src/bindable.rs +++ b/crates/sqlez/src/bindable.rs @@ -196,6 +196,22 @@ impl Column for u32 { } } +impl StaticColumnCount for u16 {} +impl Bind for u16 { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + (*self as i64) + .bind(statement, start_index) + .with_context(|| format!("Failed to bind usize at index {start_index}")) + } +} + +impl Column for u16 { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let result = statement.column_int64(start_index)?; + Ok((result as u16, start_index + 1)) + } +} + impl StaticColumnCount for usize {} impl Bind for usize { fn bind(&self, statement: &Statement, start_index: i32) -> Result { diff --git a/crates/sqlez/src/typed_statements.rs b/crates/sqlez/src/typed_statements.rs index d7f25cde51..95f4f829ec 100644 --- a/crates/sqlez/src/typed_statements.rs +++ b/crates/sqlez/src/typed_statements.rs @@ -74,7 +74,7 @@ impl Connection { } /// Prepare a statement which takes a binding and selects a single row - /// from the database. WIll return none if no rows are returned and will + /// from the database. Will return none if no rows are returned and will /// error if more than 1 row is returned. /// /// Note: If there are multiple statements that depend upon each other diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 7f5c1ccce8..1b998eeabe 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -51,6 +51,7 @@ postage.workspace = true project.workspace = true dev_server_projects.workspace = true task.workspace = true +remote.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 88ede4228d..034328a30b 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -7,6 +7,7 @@ use client::DevServerProjectId; use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId}; +use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, @@ -20,7 +21,7 @@ use crate::WorkspaceId; use model::{ GroupId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, - SerializedWorkspace, + SerializedSshProject, SerializedWorkspace, }; use self::model::{ @@ -354,7 +355,17 @@ define_connection! { ), sql!( ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; - ) + ), + sql!( + CREATE TABLE ssh_projects ( + id INTEGER PRIMARY KEY, + host TEXT NOT NULL, + port INTEGER, + path TEXT NOT NULL, + user TEXT + ); + ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; + ), ]; } @@ -374,7 +385,6 @@ impl WorkspaceDb { workspace_id, local_paths, local_paths_order, - dev_server_project_id, window_bounds, display, centered_layout, @@ -384,7 +394,6 @@ impl WorkspaceDb { WorkspaceId, Option, Option, - Option, Option, Option, Option, @@ -396,7 +405,6 @@ impl WorkspaceDb { workspace_id, local_paths, local_paths_order, - dev_server_project_id, window_state, window_x, window_y, @@ -422,28 +430,13 @@ impl WorkspaceDb { .warn_on_err() .flatten()?; - let location = if let Some(dev_server_project_id) = dev_server_project_id { - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - SerializedWorkspaceLocation::DevServer(dev_server_project) - } else if let Some(local_paths) = local_paths { - match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } + let local_paths = local_paths?; + let location = match local_paths_order { + Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), + None => { + let order = LocalPathsOrder::default_for_paths(&local_paths); + SerializedWorkspaceLocation::Local(local_paths, order) } - } else { - return None; }; Some(SerializedWorkspace { @@ -470,8 +463,6 @@ impl WorkspaceDb { // and we've grabbed the most recent workspace let ( workspace_id, - local_paths, - local_paths_order, dev_server_project_id, window_bounds, display, @@ -480,8 +471,6 @@ impl WorkspaceDb { window_id, ): ( WorkspaceId, - Option, - Option, Option, Option, Option, @@ -492,8 +481,6 @@ impl WorkspaceDb { .select_row_bound(sql! { SELECT workspace_id, - local_paths, - local_paths_order, dev_server_project_id, window_state, window_x, @@ -520,29 +507,20 @@ impl WorkspaceDb { .warn_on_err() .flatten()?; - let location = if let Some(dev_server_project_id) = dev_server_project_id { - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - SerializedWorkspaceLocation::DevServer(dev_server_project) - } else if let Some(local_paths) = local_paths { - match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } - } - } else { - return None; - }; + let dev_server_project_id = dev_server_project_id?; + + let dev_server_project: SerializedDevServerProject = self + .select_row_bound(sql! { + SELECT id, path, dev_server_name + FROM dev_server_projects + WHERE id = ? + }) + .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) + .context("No remote project found") + .warn_on_err() + .flatten()?; + + let location = SerializedWorkspaceLocation::DevServer(dev_server_project); Some(SerializedWorkspace { id: workspace_id, @@ -560,6 +538,62 @@ impl WorkspaceDb { }) } + pub(crate) fn workspace_for_ssh_project( + &self, + ssh_project: &SerializedSshProject, + ) -> Option { + let (workspace_id, window_bounds, display, centered_layout, docks, window_id): ( + WorkspaceId, + Option, + Option, + Option, + DockStructure, + Option, + ) = self + .select_row_bound(sql! { + SELECT + workspace_id, + window_state, + window_x, + window_y, + window_width, + window_height, + display, + centered_layout, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + window_id + FROM workspaces + WHERE ssh_project_id = ? + }) + .and_then(|mut prepared_statement| (prepared_statement)(ssh_project.id.0)) + .context("No workspaces found") + .warn_on_err() + .flatten()?; + + Some(SerializedWorkspace { + id: workspace_id, + location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()), + center_group: self + .get_center_pane_group(workspace_id) + .context("Getting center group") + .log_err()?, + window_bounds, + centered_layout: centered_layout.unwrap_or(false), + display, + docks, + session_id: None, + window_id, + }) + } + /// Saves a workspace using the worktree roots. Will garbage collect any workspaces /// that used this workspace previously pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) { @@ -674,6 +708,49 @@ impl WorkspaceDb { workspace.docks, )) .context("Updating workspace")?; + }, + SerializedWorkspaceLocation::Ssh(ssh_project) => { + conn.exec_bound(sql!( + DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? + ))?((ssh_project.id.0, workspace.id)) + .context("clearing out old locations")?; + + // Upsert + conn.exec_bound(sql!( + INSERT INTO workspaces( + workspace_id, + ssh_project_id, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + timestamp + ) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP) + ON CONFLICT DO + UPDATE SET + ssh_project_id = ?2, + left_dock_visible = ?3, + left_dock_active_panel = ?4, + left_dock_zoom = ?5, + right_dock_visible = ?6, + right_dock_active_panel = ?7, + right_dock_zoom = ?8, + bottom_dock_visible = ?9, + bottom_dock_active_panel = ?10, + bottom_dock_zoom = ?11, + timestamp = CURRENT_TIMESTAMP + ))?(( + workspace.id, + ssh_project.id.0, + workspace.docks, + )) + .context("Updating workspace")?; } } @@ -688,6 +765,46 @@ impl WorkspaceDb { .await; } + pub(crate) async fn get_or_create_ssh_project( + &self, + host: String, + port: Option, + path: String, + user: Option, + ) -> Result { + if let Some(project) = self + .get_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await? + { + Ok(project) + } else { + self.insert_ssh_project(host, port, path, user) + .await? + .ok_or_else(|| anyhow!("failed to insert ssh project")) + } + } + + query! { + async fn get_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + SELECT id, host, port, path, user + FROM ssh_projects + WHERE host IS ? AND port IS ? AND path IS ? AND user IS ? + LIMIT 1 + } + } + + query! { + async fn insert_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + INSERT INTO ssh_projects( + host, + port, + path, + user + ) VALUES (?1, ?2, ?3, ?4) + RETURNING id, host, port, path, user + } + } + query! { pub async fn next_id() -> Result { INSERT INTO workspaces DEFAULT VALUES RETURNING workspace_id @@ -695,10 +812,12 @@ impl WorkspaceDb { } query! { - fn recent_workspaces() -> Result)>> { - SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id + fn recent_workspaces() -> Result, Option)>> { + SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id, ssh_project_id FROM workspaces - WHERE local_paths IS NOT NULL OR dev_server_project_id IS NOT NULL + WHERE local_paths IS NOT NULL + OR dev_server_project_id IS NOT NULL + OR ssh_project_id IS NOT NULL ORDER BY timestamp DESC } } @@ -719,6 +838,13 @@ impl WorkspaceDb { } } + query! { + fn ssh_projects() -> Result> { + SELECT id, host, port, path, user + FROM ssh_projects + } + } + pub(crate) fn last_window( &self, ) -> anyhow::Result<(Option, Option)> { @@ -768,8 +894,11 @@ impl WorkspaceDb { let mut result = Vec::new(); let mut delete_tasks = Vec::new(); let dev_server_projects = self.dev_server_projects()?; + let ssh_projects = self.ssh_projects()?; - for (id, location, order, dev_server_project_id) in self.recent_workspaces()? { + for (id, location, order, dev_server_project_id, ssh_project_id) in + self.recent_workspaces()? + { if let Some(dev_server_project_id) = dev_server_project_id.map(DevServerProjectId) { if let Some(dev_server_project) = dev_server_projects .iter() @@ -782,6 +911,15 @@ impl WorkspaceDb { continue; } + if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) { + if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) { + result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone()))); + } else { + delete_tasks.push(self.delete_workspace_by_id(id)); + } + continue; + } + if location.paths().iter().all(|path| path.exists()) && location.paths().iter().any(|path| path.is_dir()) { @@ -802,7 +940,9 @@ impl WorkspaceDb { .into_iter() .filter_map(|(_, location)| match location { SerializedWorkspaceLocation::Local(local_paths, _) => Some(local_paths), + // Do not automatically reopen Dev Server and SSH workspaces SerializedWorkspaceLocation::DevServer(_) => None, + SerializedWorkspaceLocation::Ssh(_) => None, }) .next()) } @@ -1512,6 +1652,122 @@ mod tests { assert_eq!(have[3], LocalPaths::new([dir1.path().to_str().unwrap()])); } + #[gpui::test] + async fn test_get_or_create_ssh_project() { + let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await); + + let (host, port, path, user) = ( + "example.com".to_string(), + Some(22_u16), + "/home/user".to_string(), + Some("user".to_string()), + ); + + let project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.host, host); + assert_eq!(project.path, path); + assert_eq!(project.user, user); + + // Test that calling the function again with the same parameters returns the same project + let same_project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.id, same_project.id); + + // Test with different parameters + let (host2, path2, user2) = ( + "otherexample.com".to_string(), + "/home/otheruser".to_string(), + Some("otheruser".to_string()), + ); + + let different_project = db + .get_or_create_ssh_project(host2.clone(), None, path2.clone(), user2.clone()) + .await + .unwrap(); + + assert_ne!(project.id, different_project.id); + assert_eq!(different_project.host, host2); + assert_eq!(different_project.path, path2); + assert_eq!(different_project.user, user2); + } + + #[gpui::test] + async fn test_get_or_create_ssh_project_with_null_user() { + let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await); + + let (host, port, path, user) = ( + "example.com".to_string(), + None, + "/home/user".to_string(), + None, + ); + + let project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), None) + .await + .unwrap(); + + assert_eq!(project.host, host); + assert_eq!(project.path, path); + assert_eq!(project.user, None); + + // Test that calling the function again with the same parameters returns the same project + let same_project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.id, same_project.id); + } + + #[gpui::test] + async fn test_get_ssh_projects() { + let db = WorkspaceDb(open_test_db("test_get_ssh_projects").await); + + let projects = vec![ + ( + "example.com".to_string(), + None, + "/home/user".to_string(), + None, + ), + ( + "anotherexample.com".to_string(), + Some(123_u16), + "/home/user2".to_string(), + Some("user2".to_string()), + ), + ( + "yetanother.com".to_string(), + Some(345_u16), + "/home/user3".to_string(), + None, + ), + ]; + + for (host, port, path, user) in projects.iter() { + let project = db + .get_or_create_ssh_project(host.clone(), *port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(&project.host, host); + assert_eq!(&project.port, port); + assert_eq!(&project.path, path); + assert_eq!(&project.user, user); + } + + let stored_projects = db.ssh_projects().unwrap(); + assert_eq!(stored_projects.len(), projects.len()); + } + #[gpui::test] async fn test_simple_split() { env_logger::try_init().ok(); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index d6f8001f25..0ad3fa5e60 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -11,6 +11,7 @@ use db::sqlez::{ }; use gpui::{AsyncWindowContext, Model, View, WeakView}; use project::Project; +use remote::ssh_session::SshProjectId; use serde::{Deserialize, Serialize}; use std::{ path::{Path, PathBuf}, @@ -20,6 +21,69 @@ use ui::SharedString; use util::ResultExt; use uuid::Uuid; +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct SerializedSshProject { + pub id: SshProjectId, + pub host: String, + pub port: Option, + pub path: String, + pub user: Option, +} + +impl SerializedSshProject { + pub fn ssh_url(&self) -> String { + let mut result = String::from("ssh://"); + if let Some(user) = &self.user { + result.push_str(user); + result.push('@'); + } + result.push_str(&self.host); + if let Some(port) = &self.port { + result.push(':'); + result.push_str(&port.to_string()); + } + result.push_str(&self.path); + result + } +} + +impl StaticColumnCount for SerializedSshProject { + fn column_count() -> usize { + 5 + } +} + +impl Bind for &SerializedSshProject { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + let next_index = statement.bind(&self.id.0, start_index)?; + let next_index = statement.bind(&self.host, next_index)?; + let next_index = statement.bind(&self.port, next_index)?; + let next_index = statement.bind(&self.path, next_index)?; + statement.bind(&self.user, next_index) + } +} + +impl Column for SerializedSshProject { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let id = statement.column_int64(start_index)?; + let host = statement.column_text(start_index + 1)?.to_string(); + let (port, _) = Option::::column(statement, start_index + 2)?; + let path = statement.column_text(start_index + 3)?.to_string(); + let (user, _) = Option::::column(statement, start_index + 4)?; + + Ok(( + Self { + id: SshProjectId(id as u64), + host, + port, + path, + user, + }, + start_index + 5, + )) + } +} + #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct SerializedDevServerProject { pub id: DevServerProjectId, @@ -58,7 +122,6 @@ impl Column for LocalPaths { fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { let path_blob = statement.column_blob(start_index)?; let paths: Arc> = if path_blob.is_empty() { - println!("path blog is empty"); Default::default() } else { bincode::deserialize(path_blob).context("Bincode deserialization of paths failed")? @@ -146,6 +209,7 @@ impl Column for SerializedDevServerProject { #[derive(Debug, PartialEq, Clone)] pub enum SerializedWorkspaceLocation { Local(LocalPaths, LocalPathsOrder), + Ssh(SerializedSshProject), DevServer(SerializedDevServerProject), } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 98ac49992d..5855dcce1e 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -49,15 +49,19 @@ use node_runtime::NodeRuntime; use notifications::{simple_message_notification::MessageNotification, NotificationHandle}; pub use pane::*; pub use pane_group::*; -use persistence::{model::SerializedWorkspace, SerializedWindowBounds, DB}; pub use persistence::{ model::{ItemId, LocalPaths, SerializedDevServerProject, SerializedWorkspaceLocation}, WorkspaceDb, DB as WORKSPACE_DB, }; +use persistence::{ + model::{SerializedSshProject, SerializedWorkspace}, + SerializedWindowBounds, DB, +}; use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, }; +use remote::{SshConnectionOptions, SshSession}; use serde::Deserialize; use session::AppSession; use settings::Settings; @@ -756,6 +760,7 @@ pub struct Workspace { render_disconnected_overlay: Option) -> AnyElement>>, serializable_items_tx: UnboundedSender>, + serialized_ssh_project: Option, _items_serializer: Task>, session_id: Option, } @@ -1054,6 +1059,7 @@ impl Workspace { serializable_items_tx, _items_serializer, session_id: Some(session_id), + serialized_ssh_project: None, } } @@ -1440,6 +1446,10 @@ impl Workspace { self.on_prompt_for_open_path = Some(prompt) } + pub fn set_serialized_ssh_project(&mut self, serialized_ssh_project: SerializedSshProject) { + self.serialized_ssh_project = Some(serialized_ssh_project); + } + pub fn set_render_disconnected_overlay( &mut self, render: impl Fn(&mut Self, &mut ViewContext) -> AnyElement + 'static, @@ -4097,7 +4107,9 @@ impl Workspace { } } - let location = if let Some(local_paths) = self.local_paths(cx) { + let location = if let Some(ssh_project) = &self.serialized_ssh_project { + Some(SerializedWorkspaceLocation::Ssh(ssh_project.clone())) + } else if let Some(local_paths) = self.local_paths(cx) { if !local_paths.is_empty() { Some(SerializedWorkspaceLocation::from_local_paths(local_paths)) } else { @@ -5476,6 +5488,70 @@ pub fn join_hosted_project( }) } +pub fn open_ssh_project( + window: WindowHandle, + connection_options: SshConnectionOptions, + session: Arc, + app_state: Arc, + paths: Vec, + cx: &mut AppContext, +) -> Task> { + cx.spawn(|mut cx| async move { + // TODO: Handle multiple paths + let path = paths.iter().next().cloned().unwrap_or_default(); + + let serialized_ssh_project = persistence::DB + .get_or_create_ssh_project( + connection_options.host.clone(), + connection_options.port, + path.to_string_lossy().to_string(), + connection_options.username.clone(), + ) + .await?; + + let project = cx.update(|cx| { + project::Project::ssh( + session, + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + cx, + ) + })?; + + for path in paths { + project + .update(&mut cx, |project, cx| { + project.find_or_create_worktree(&path, true, cx) + })? + .await?; + } + + let serialized_workspace = + persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); + + let workspace_id = + if let Some(workspace_id) = serialized_workspace.map(|workspace| workspace.id) { + workspace_id + } else { + persistence::DB.next_id().await? + }; + + cx.update_window(window.into(), |_, cx| { + cx.replace_root_view(|cx| { + let mut workspace = + Workspace::new(Some(workspace_id), project, app_state.clone(), cx); + workspace.set_serialized_ssh_project(serialized_ssh_project); + workspace + }); + })?; + + window.update(&mut cx, |_, cx| cx.activate_window()) + }) +} + pub fn join_dev_server_project( dev_server_project_id: DevServerProjectId, project_id: ProjectId, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index c127a975a9..3104001f99 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -667,7 +667,11 @@ fn handle_open_request( cx.spawn(|mut cx| async move { open_ssh_project( connection_info, - request.open_paths, + request + .open_paths + .into_iter() + .map(|path| path.path) + .collect::>(), app_state, workspace::OpenOptions::default(), &mut cx, From 3fd690ade401f7d665448977c674db4780e23165 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 19 Sep 2024 12:00:13 -0400 Subject: [PATCH 220/270] docs: Update lsp.settings examples for yaml-language-server (#18081) --- docs/src/configuring-zed.md | 21 ++++++++- docs/src/languages/typescript.md | 26 +++++------ docs/src/languages/yaml.md | 80 ++++++++++++++++++++++++++++++-- 3 files changed, 108 insertions(+), 19 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 382c33c216..1befa7d93a 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -575,8 +575,13 @@ Each option controls displaying of a particular toolbar element. If all elements The following settings can be overridden for specific language servers: - `initialization_options` +- `settings` -To override settings for a language, add an entry for that language server's name to the `lsp` value. Example: +To override configuration for a language server, add an entry for that language server's name to the `lsp` value. + +Some options are passed via `initialization_options` to the language server. These are for options which must be specified at language server startup and when changed will require restarting the language server. + +For example to pass the `check` option to `rust-analyzer`, use the following configuration: ```json "lsp": { @@ -590,6 +595,20 @@ To override settings for a language, add an entry for that language server's nam } ``` +While other options may be changed at a runtime and should be placed under `settings`: + +```json +"lsp": { + "yaml-language-server": { + "settings": { + "yaml": { + "keyOrdering": true // Enforces alphabetical ordering of keys in maps + } + } + } +} +``` + ## Format On Save - Description: Whether or not to perform a buffer format before saving. diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index feb7d76622..080d41efb3 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -72,20 +72,20 @@ You can override these settings in your configuration file: ```json "lsp": { - "$LANGUAGE_SERVER_NAME": { - "initialization_options": { - "preferences": { - "includeInlayParameterNameHints": "all", - "includeInlayParameterNameHintsWhenArgumentMatchesName": true, - "includeInlayFunctionParameterTypeHints": true, - "includeInlayVariableTypeHints": true, - "includeInlayVariableTypeHintsWhenTypeMatchesName": true, - "includeInlayPropertyDeclarationTypeHints": true, - "includeInlayFunctionLikeReturnTypeHints": true, - "includeInlayEnumMemberValueHints": true, - } - } + "$LANGUAGE_SERVER_NAME": { + "initialization_options": { + "preferences": { + "includeInlayParameterNameHints": "all", + "includeInlayParameterNameHintsWhenArgumentMatchesName": true, + "includeInlayFunctionParameterTypeHints": true, + "includeInlayVariableTypeHints": true, + "includeInlayVariableTypeHintsWhenTypeMatchesName": true, + "includeInlayPropertyDeclarationTypeHints": true, + "includeInlayFunctionLikeReturnTypeHints": true, + "includeInlayEnumMemberValueHints": true, + } } + } } ``` diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md index 5ef614394c..7b840d0825 100644 --- a/docs/src/languages/yaml.md +++ b/docs/src/languages/yaml.md @@ -12,7 +12,7 @@ You can configure various [yaml-language-server settings](https://github.com/red ```json "lsp": { "yaml-language-server": { - "initialization_options": { + "settings": { "yaml": { "keyOrdering": true, "format": { @@ -32,9 +32,9 @@ Note, settings keys must be nested, so `yaml.keyOrdering` becomes `{"yaml": { "k ## Schemas -By default yaml-language-server will attempt to determine the correct schema for a given yaml file and retrieve the appropriate JSON Schema from [Json Schema Store]. +By default yaml-language-server will attempt to determine the correct schema for a given yaml file and retrieve the appropriate JSON Schema from [Json Schema Store](https://schemastore.org/). -You can override this by [using an inlined schema] reference via a modeline comment at the top of your yaml file: +You can override any auto-detected schema via the `schemas` settings key (demonstrated above) or by providing an [inlined schema](https://github.com/redhat-developer/yaml-language-server#using-inlined-schema) reference via a modeline comment at the top of your yaml file: ```yaml # yaml-language-server: $schema=https://json.schemastore.org/github-action.json @@ -44,12 +44,12 @@ on: types: [oppened] ``` -You can disable this functionality entirely if desired: +You can disable the automatic detection and retrieval of schemas from the JSON Schema if desired: ```json "lsp": { "yaml-language-server": { - "initialization_options": { + "settings": { "yaml": { "schemaStore": { "enable": false @@ -59,3 +59,73 @@ You can disable this functionality entirely if desired: } } ``` + +## Custom Tags + +Yaml-language-server supports [custom tags](https://github.com/redhat-developer/yaml-language-server#adding-custom-tags) which can be used to inject custom application functionality at runtime into your yaml files. + +For example Amazon CloudFormation YAML uses a number of custom tags, to support these you can add the following to your settings.json: + +```json + "lsp": { + "yaml-language-server": { + "settings": { + "yaml": { + "customTags": [ + "!And scalar", + "!And mapping", + "!And sequence", + "!If scalar", + "!If mapping", + "!If sequence", + "!Not scalar", + "!Not mapping", + "!Not sequence", + "!Equals scalar", + "!Equals mapping", + "!Equals sequence", + "!Or scalar", + "!Or mapping", + "!Or sequence", + "!FindInMap scalar", + "!FindInMap mapping", + "!FindInMap sequence", + "!Base64 scalar", + "!Base64 mapping", + "!Base64 sequence", + "!Cidr scalar", + "!Cidr mapping", + "!Cidr sequence", + "!Ref scalar", + "!Ref mapping", + "!Ref sequence", + "!Sub scalar", + "!Sub mapping", + "!Sub sequence", + "!GetAtt scalar", + "!GetAtt mapping", + "!GetAtt sequence", + "!GetAZs scalar", + "!GetAZs mapping", + "!GetAZs sequence", + "!ImportValue scalar", + "!ImportValue mapping", + "!ImportValue sequence", + "!Select scalar", + "!Select mapping", + "!Select sequence", + "!Split scalar", + "!Split mapping", + "!Split sequence", + "!Join scalar", + "!Join mapping", + "!Join sequence", + "!Condition scalar", + "!Condition mapping", + "!Condition sequence" + ] + } + } + } + } +``` From 713b39bac0702b008c461c87764a603010562b65 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 10:13:55 -0600 Subject: [PATCH 221/270] Auto deploy collab staging daily (#18085) This should avoid us breaking the collab build and not noticing for a month Release Notes: - N/A --- .github/workflows/bump_collab_staging.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml index 89cc7c4848..224325d53f 100644 --- a/.github/workflows/bump_collab_staging.yml +++ b/.github/workflows/bump_collab_staging.yml @@ -1,9 +1,9 @@ name: Bump collab-staging Tag on: - push: - branches: - - main + schedule: + # Fire every day at 16:00 UTC (At the start of the US workday) + - cron: "0 16 * * *" jobs: update-collab-staging-tag: From 3986bcf9dc23cd32b5155310136f53d9d8a5cc73 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:14:37 -0600 Subject: [PATCH 222/270] Update Rust crate async-trait to v0.1.82 (#18038) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [async-trait](https://redirect.github.com/dtolnay/async-trait) | workspace.dependencies | patch | `0.1.81` -> `0.1.82` | --- ### Release Notes
dtolnay/async-trait (async-trait) ### [`v0.1.82`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.82) [Compare Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.81...0.1.82) - Prevent elided_named_lifetimes lint being produced in generated code ([#​276](https://redirect.github.com/dtolnay/async-trait/issues/276))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 16ee627d2c..38c3e74ce1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -894,9 +894,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", From 6670c9eb3b5033caf4b78d59aecab9bee5cb09d1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:15:31 -0600 Subject: [PATCH 223/270] Update Rust crate backtrace to v0.3.74 (#18039) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [backtrace](https://redirect.github.com/rust-lang/backtrace-rs) | dependencies | patch | `0.3.73` -> `0.3.74` | | [backtrace](https://redirect.github.com/rust-lang/backtrace-rs) | dev-dependencies | patch | `0.3.73` -> `0.3.74` | --- ### Release Notes
rust-lang/backtrace-rs (backtrace) ### [`v0.3.74`](https://redirect.github.com/rust-lang/backtrace-rs/releases/tag/0.3.74) [Compare Source](https://redirect.github.com/rust-lang/backtrace-rs/compare/0.3.73...0.3.74) #### What's Changed - QNX Neutrino 7.0 support, thanks to [@​nyurik](https://redirect.github.com/nyurik) in [https://github.com/rust-lang/backtrace-rs/pull/648](https://redirect.github.com/rust-lang/backtrace-rs/pull/648) - Cleaned up our Android support. This should massively improve backtraces for ones with the API level sufficient to ship with libunwind, etc. Unfortunately, it comes at the cost of dropping support for older ones! Thanks to [@​fengys](https://redirect.github.com/fengys) in [https://github.com/rust-lang/backtrace-rs/pull/656](https://redirect.github.com/rust-lang/backtrace-rs/pull/656) - Made PrintFmt, which was using the `Enum::__NonExhaustiveVariant` pattern, use `#[non_exhaustive]` for real. Don't @​ me if you were matching on that! Thanks to [@​nyurik](https://redirect.github.com/nyurik) in [https://github.com/rust-lang/backtrace-rs/pull/651](https://redirect.github.com/rust-lang/backtrace-rs/pull/651) - Massively cleaned up the windows code! We moved from winapi to windows-sys with windows-targets thanks to [@​CraftSpider](https://redirect.github.com/CraftSpider) and [@​ChrisDenton](https://redirect.github.com/ChrisDenton) in - Don't cast HANDLE to usize and back by [@​CraftSpider](https://redirect.github.com/CraftSpider) in [https://github.com/rust-lang/backtrace-rs/pull/635](https://redirect.github.com/rust-lang/backtrace-rs/pull/635) - Switch from `winapi` to `windows-sys` by [@​CraftSpider](https://redirect.github.com/CraftSpider) in [https://github.com/rust-lang/backtrace-rs/pull/641](https://redirect.github.com/rust-lang/backtrace-rs/pull/641) - Update windows bindings and use windows-targets by [@​ChrisDenton](https://redirect.github.com/ChrisDenton) in [https://github.com/rust-lang/backtrace-rs/pull/653](https://redirect.github.com/rust-lang/backtrace-rs/pull/653) - A bunch of updated dependencies. Thanks [@​djc](https://redirect.github.com/djc) and [@​khuey](https://redirect.github.com/khuey)! - Sorry if you were testing this code in miri! It started yelling about sussy casts. A lot. We did a bunch of internal cleanups that should make it quiet down, thanks to [@​workingjubilee](https://redirect.github.com/workingjubilee) in [https://github.com/rust-lang/backtrace-rs/pull/641](https://redirect.github.com/rust-lang/backtrace-rs/pull/641) - Uhhh we had to tweak `dl_iterate_phdr` in [https://github.com/rust-lang/backtrace-rs/pull/660](https://redirect.github.com/rust-lang/backtrace-rs/pull/660) after Android revealed it was... kind of unsound actually and not doing things like checking for null pointers before making slices! WHOOPS! Thanks to [@​saethlin](https://redirect.github.com/saethlin) for implementing detection for precisely that in rustc! It's really hard to find soundness issues in inherited codebases like this one... #### New Contributors - [@​CraftSpider](https://redirect.github.com/CraftSpider) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/635](https://redirect.github.com/rust-lang/backtrace-rs/pull/635) - [@​fengys1996](https://redirect.github.com/fengys1996) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/656](https://redirect.github.com/rust-lang/backtrace-rs/pull/656) - [@​djc](https://redirect.github.com/djc) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/657](https://redirect.github.com/rust-lang/backtrace-rs/pull/657) **Full Changelog**: https://github.com/rust-lang/backtrace-rs/compare/0.3.73...0.3.74
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38c3e74ce1..4a1a584469 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,11 +21,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ - "gimli", + "gimli 0.31.0", ] [[package]] @@ -1493,17 +1493,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.4", + "miniz_oxide 0.8.0", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -3083,7 +3083,7 @@ dependencies = [ "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli", + "gimli 0.29.0", "hashbrown 0.14.5", "log", "regalloc2", @@ -4873,6 +4873,12 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "gimli" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" + [[package]] name = "git" version = "0.1.0" @@ -13108,7 +13114,7 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli", + "gimli 0.29.0", "log", "object", "target-lexicon", @@ -13128,7 +13134,7 @@ dependencies = [ "cpp_demangle", "cranelift-bitset", "cranelift-entity", - "gimli", + "gimli 0.29.0", "indexmap 2.4.0", "log", "object", @@ -13242,7 +13248,7 @@ checksum = "2a25199625effa4c13dd790d64bd56884b014c69829431bfe43991c740bd5bc1" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "object", "target-lexicon", "wasmparser 0.215.0", @@ -13539,7 +13545,7 @@ checksum = "073efe897d9ead7fc609874f94580afc831114af5149b6a90ee0a3a39b497fe0" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "regalloc2", "smallvec", "target-lexicon", From 157c57aa8d3e35a5d7d750ae552740b412b5911b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:15:46 -0600 Subject: [PATCH 224/270] Update Rust crate clap to v4.5.17 (#18041) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.16` -> `4.5.17` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.17`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4517---2024-09-04) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.16...v4.5.17) ##### Fixes - *(help)* Style required argument groups - *(derive)* Improve error messages when unsupported fields are used
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4a1a584469..68625d5520 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2282,9 +2282,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", "clap_derive", @@ -2292,9 +2292,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstream", "anstyle", From ce4f07bd3cbfa20a95e14af112e83002bfd486d4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:16:31 -0600 Subject: [PATCH 225/270] Update Rust crate globset to v0.4.15 (#18042) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [globset](https://redirect.github.com/BurntSushi/ripgrep/tree/master/crates/globset) ([source](https://redirect.github.com/BurntSushi/ripgrep/tree/HEAD/crates/globset)) | workspace.dependencies | patch | `0.4.14` -> `0.4.15` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68625d5520..7c298c2a9a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4946,9 +4946,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", From c3bdc1c178190dd223d6b4718905f86822329da3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:18:14 -0600 Subject: [PATCH 226/270] Update Rust crate ignore to v0.4.23 (#18044) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [ignore](https://redirect.github.com/BurntSushi/ripgrep/tree/master/crates/ignore) ([source](https://redirect.github.com/BurntSushi/ripgrep/tree/HEAD/crates/ignore)) | workspace.dependencies | patch | `0.4.22` -> `0.4.23` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c298c2a9a..26b8847041 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5688,9 +5688,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", @@ -6474,7 +6474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -13528,7 +13528,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] From ac0d5d3152fe09201b907210c917e82fee62d450 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 20 Sep 2024 00:19:13 +0800 Subject: [PATCH 227/270] windows: Fix regional indicator symbols broken (#18053) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #18027 Unlike macOS, not all glyphs in color fonts are color glyphs, such as `🇩🇪` in `Segoe UI Emoji`. As a result, attempting to retrieve color information for these glyphs can cause an error, preventing the glyph from being rendered. This PR addresses the issue by setting the `is_emoji` variable to `false` for non-color glyphs within color fonts. Release Notes: - N/A --- .../gpui/src/platform/windows/direct_write.rs | 42 ++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index 6253881f5a..fb53a833d6 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -1063,7 +1063,7 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { // This `cast()` action here should never fail since we are running on Win10+, and // `IDWriteFontFace3` requires Win10 let font_face = &font_face.cast::().unwrap(); - let Some((font_identifier, font_struct, is_emoji)) = + let Some((font_identifier, font_struct, color_font)) = get_font_identifier_and_font_struct(font_face, &self.locale) else { return Ok(()); @@ -1084,6 +1084,8 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { context .index_converter .advance_to_utf16_ix(context.utf16_index); + let is_emoji = color_font + && is_color_glyph(font_face, id, &context.text_system.components.factory); glyphs.push(ShapedGlyph { id, position: point(px(context.width), px(0.0)), @@ -1446,6 +1448,44 @@ fn get_render_target_property( } } +// One would think that with newer DirectWrite method: IDWriteFontFace4::GetGlyphImageFormats +// but that doesn't seem to work for some glyphs, say ❤ +fn is_color_glyph( + font_face: &IDWriteFontFace3, + glyph_id: GlyphId, + factory: &IDWriteFactory5, +) -> bool { + let glyph_run = DWRITE_GLYPH_RUN { + fontFace: unsafe { std::mem::transmute_copy(font_face) }, + fontEmSize: 14.0, + glyphCount: 1, + glyphIndices: &(glyph_id.0 as u16), + glyphAdvances: &0.0, + glyphOffsets: &DWRITE_GLYPH_OFFSET { + advanceOffset: 0.0, + ascenderOffset: 0.0, + }, + isSideways: BOOL(0), + bidiLevel: 0, + }; + unsafe { + factory.TranslateColorGlyphRun( + D2D_POINT_2F::default(), + &glyph_run as _, + None, + DWRITE_GLYPH_IMAGE_FORMATS_COLR + | DWRITE_GLYPH_IMAGE_FORMATS_SVG + | DWRITE_GLYPH_IMAGE_FORMATS_PNG + | DWRITE_GLYPH_IMAGE_FORMATS_JPEG + | DWRITE_GLYPH_IMAGE_FORMATS_PREMULTIPLIED_B8G8R8A8, + DWRITE_MEASURING_MODE_NATURAL, + None, + 0, + ) + } + .is_ok() +} + const DEFAULT_LOCALE_NAME: PCWSTR = windows::core::w!("en-US"); const BRUSH_COLOR: D2D1_COLOR_F = D2D1_COLOR_F { r: 1.0, From 8074fba76b4352077fed94364fcfb9d095f177a9 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 19 Sep 2024 12:31:40 -0400 Subject: [PATCH 228/270] Update List to support UI Density (#18079) Tracking issue: #18078 Improve UI Density support for List. UI density is an unstable feature. You can read more about it in the above issue! | Before Normal - Before Dense - After Normal - After Dense | |--------------------------------------------------------| | ![Group 8](https://github.com/user-attachments/assets/bb896fcf-e4a6-4776-9308-1405906d2dbe) | | | | | Before Normal - Before Dense - After Normal - After Dense | |--------------------------------------------------------| | ![Group 9](https://github.com/user-attachments/assets/00815a1b-071b-4d02-96bc-36bf37b5ae8b) | Release Notes: - N/A --- crates/ui/src/components/list/list.rs | 12 ++-- crates/ui/src/components/list/list_header.rs | 15 +++-- crates/ui/src/components/list/list_item.rs | 8 +-- .../ui/src/components/list/list_separator.rs | 2 +- .../ui/src/components/list/list_sub_header.rs | 57 +++++++++++-------- 5 files changed, 55 insertions(+), 39 deletions(-) diff --git a/crates/ui/src/components/list/list.rs b/crates/ui/src/components/list/list.rs index a09abf92e4..4bf157ef40 100644 --- a/crates/ui/src/components/list/list.rs +++ b/crates/ui/src/components/list/list.rs @@ -52,13 +52,15 @@ impl ParentElement for List { } impl RenderOnce for List { - fn render(self, _cx: &mut WindowContext) -> impl IntoElement { - v_flex().w_full().py_1().children(self.header).map(|this| { - match (self.children.is_empty(), self.toggle) { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + v_flex() + .w_full() + .py(Spacing::Small.rems(cx)) + .children(self.header) + .map(|this| match (self.children.is_empty(), self.toggle) { (false, _) => this.children(self.children), (true, Some(false)) => this, (true, _) => this.child(Label::new(self.empty_message.clone()).color(Color::Muted)), - } - }) + }) } } diff --git a/crates/ui/src/components/list/list_header.rs b/crates/ui/src/components/list/list_header.rs index 4377efbc46..3b15f8cd3d 100644 --- a/crates/ui/src/components/list/list_header.rs +++ b/crates/ui/src/components/list/list_header.rs @@ -2,6 +2,8 @@ use std::sync::Arc; use crate::{h_flex, prelude::*, Disclosure, Label}; use gpui::{AnyElement, ClickEvent}; +use settings::Settings; +use theme::ThemeSettings; #[derive(IntoElement)] pub struct ListHeader { @@ -78,6 +80,8 @@ impl Selectable for ListHeader { impl RenderOnce for ListHeader { fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let ui_density = ThemeSettings::get_global(cx).ui_density; + h_flex() .id(self.label.clone()) .w_full() @@ -85,7 +89,10 @@ impl RenderOnce for ListHeader { .group("list_header") .child( div() - .h_7() + .map(|this| match ui_density { + theme::UiDensity::Comfortable => this.h_5(), + _ => this.h_7(), + }) .when(self.inset, |this| this.px_2()) .when(self.selected, |this| { this.bg(cx.theme().colors().ghost_element_selected) @@ -95,10 +102,10 @@ impl RenderOnce for ListHeader { .items_center() .justify_between() .w_full() - .gap_1() + .gap(Spacing::Small.rems(cx)) .child( h_flex() - .gap_1() + .gap(Spacing::Small.rems(cx)) .children(self.toggle.map(|is_open| { Disclosure::new("toggle", is_open).on_toggle(self.on_toggle.clone()) })) @@ -106,7 +113,7 @@ impl RenderOnce for ListHeader { div() .id("label_container") .flex() - .gap_1() + .gap(Spacing::Small.rems(cx)) .items_center() .children(self.start_slot) .child(Label::new(self.label.clone()).color(Color::Muted)) diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index 6b38b7f963..e1c90894fd 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -162,7 +162,7 @@ impl RenderOnce for ListItem { // When an item is inset draw the indent spacing outside of the item .when(self.inset, |this| { this.ml(self.indent_level as f32 * self.indent_step_size) - .px_1() + .px(Spacing::Small.rems(cx)) }) .when(!self.inset && !self.disabled, |this| { this @@ -185,7 +185,7 @@ impl RenderOnce for ListItem { .w_full() .relative() .gap_1() - .px_1p5() + .px(Spacing::Medium.rems(cx)) .map(|this| match self.spacing { ListItemSpacing::Dense => this, ListItemSpacing::Sparse => this.py_1(), @@ -238,7 +238,7 @@ impl RenderOnce for ListItem { .flex_grow() .flex_shrink_0() .flex_basis(relative(0.25)) - .gap_1() + .gap(Spacing::Small.rems(cx)) .overflow_hidden() .children(self.start_slot) .children(self.children), @@ -260,7 +260,7 @@ impl RenderOnce for ListItem { h_flex() .h_full() .absolute() - .right_1p5() + .right(Spacing::Medium.rems(cx)) .top_0() .visible_on_hover("list_item") .child(end_hover_slot), diff --git a/crates/ui/src/components/list/list_separator.rs b/crates/ui/src/components/list/list_separator.rs index b53dc7a043..0d5fdf8d49 100644 --- a/crates/ui/src/components/list/list_separator.rs +++ b/crates/ui/src/components/list/list_separator.rs @@ -8,7 +8,7 @@ impl RenderOnce for ListSeparator { div() .h_px() .w_full() - .my_1p5() + .my(Spacing::Medium.rems(cx)) .bg(cx.theme().colors().border_variant) } } diff --git a/crates/ui/src/components/list/list_sub_header.rs b/crates/ui/src/components/list/list_sub_header.rs index 2aa9387129..0ed072ebbf 100644 --- a/crates/ui/src/components/list/list_sub_header.rs +++ b/crates/ui/src/components/list/list_sub_header.rs @@ -39,30 +39,37 @@ impl Selectable for ListSubHeader { impl RenderOnce for ListSubHeader { fn render(self, cx: &mut WindowContext) -> impl IntoElement { - h_flex().flex_1().w_full().relative().pb_1().px_0p5().child( - div() - .h_6() - .when(self.inset, |this| this.px_2()) - .when(self.selected, |this| { - this.bg(cx.theme().colors().ghost_element_selected) - }) - .flex() - .flex_1() - .w_full() - .gap_1() - .items_center() - .justify_between() - .child( - div() - .flex() - .gap_1() - .items_center() - .children( - self.start_slot - .map(|i| Icon::new(i).color(Color::Muted).size(IconSize::Small)), - ) - .child(Label::new(self.label.clone()).color(Color::Muted)), - ), - ) + h_flex() + .flex_1() + .w_full() + .relative() + .pb(Spacing::Small.rems(cx)) + .px(Spacing::XSmall.rems(cx)) + .child( + div() + .h_6() + .when(self.inset, |this| this.px_2()) + .when(self.selected, |this| { + this.bg(cx.theme().colors().ghost_element_selected) + }) + .flex() + .flex_1() + .w_full() + .gap_1() + .items_center() + .justify_between() + .child( + div() + .flex() + .gap_1() + .items_center() + .children( + self.start_slot.map(|i| { + Icon::new(i).color(Color::Muted).size(IconSize::Small) + }), + ) + .child(Label::new(self.label.clone()).color(Color::Muted)), + ), + ) } } From 1fc391f696a828780b6a651df0b797be91aee91e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 13:14:15 -0400 Subject: [PATCH 229/270] Make `Buffer::apply_ops` infallible (#18089) This PR makes the `Buffer::apply_ops` method infallible for `text::Buffer` and `language::Buffer`. We discovered that `text::Buffer::apply_ops` was only fallible due to `apply_undo`, which didn't actually need to be fallible. Release Notes: - N/A --- crates/assistant/src/context.rs | 8 ++-- crates/assistant/src/context/context_tests.rs | 8 +--- crates/assistant/src/context_store.rs | 6 +-- crates/channel/src/channel_buffer.rs | 4 +- crates/channel/src/channel_store.rs | 2 +- crates/collab/src/db/queries/buffers.rs | 4 +- crates/collab/src/db/tests/buffer_tests.rs | 18 ++++---- crates/language/src/buffer.rs | 5 +- crates/language/src/buffer_tests.rs | 46 ++++++++----------- crates/multi_buffer/src/multi_buffer.rs | 12 ++--- crates/project/src/buffer_store.rs | 9 ++-- crates/text/src/tests.rs | 32 ++++++------- crates/text/src/text.rs | 39 +++++++--------- 13 files changed, 85 insertions(+), 108 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d72b04e3cd..830c098049 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -683,7 +683,7 @@ impl Context { buffer.set_text(saved_context.text.as_str(), cx) }); let operations = saved_context.into_ops(&this.buffer, cx); - this.apply_ops(operations, cx).unwrap(); + this.apply_ops(operations, cx); this } @@ -756,7 +756,7 @@ impl Context { &mut self, ops: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) { let mut buffer_ops = Vec::new(); for op in ops { match op { @@ -765,10 +765,8 @@ impl Context { } } self.buffer - .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?; + .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx)); self.flush_ops(cx); - - Ok(()) } fn flush_ops(&mut self, cx: &mut ModelContext) { diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index 842ac05078..2d6a2894c9 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -1166,9 +1166,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std ); network.lock().broadcast(replica_id, ops_to_send); - context - .update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)); } else if rng.gen_bool(0.1) && replica_id != 0 { log::info!("Context {}: disconnecting", context_index); network.lock().disconnect_peer(replica_id); @@ -1180,9 +1178,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std .map(ContextOperation::from_proto) .collect::>>() .unwrap(); - context - .update(cx, |context, cx| context.apply_ops(ops, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops, cx)); } } } diff --git a/crates/assistant/src/context_store.rs b/crates/assistant/src/context_store.rs index 867d906791..f57a2fbca6 100644 --- a/crates/assistant/src/context_store.rs +++ b/crates/assistant/src/context_store.rs @@ -223,7 +223,7 @@ impl ContextStore { if let Some(context) = this.loaded_context_for_id(&context_id, cx) { let operation_proto = envelope.payload.operation.context("invalid operation")?; let operation = ContextOperation::from_proto(operation_proto)?; - context.update(cx, |context, cx| context.apply_ops([operation], cx))?; + context.update(cx, |context, cx| context.apply_ops([operation], cx)); } Ok(()) })? @@ -394,7 +394,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context @@ -531,7 +531,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index df3e66483f..755e7400e1 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -66,7 +66,7 @@ impl ChannelBuffer { let capability = channel_store.read(cx).channel_capability(channel.id); language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text) })?; - buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??; + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; let subscription = client.subscribe_to_entity(channel.id.0)?; @@ -151,7 +151,7 @@ impl ChannelBuffer { cx.notify(); this.buffer .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) - })??; + })?; Ok(()) } diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 9bd5fd564f..fc5b12cfae 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -1007,7 +1007,7 @@ impl ChannelStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.apply_ops(incoming_operations, cx)?; + buffer.apply_ops(incoming_operations, cx); anyhow::Ok(outgoing_operations) }) .log_err(); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 7b19dee315..06ad2b4594 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -689,9 +689,7 @@ impl Database { } let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text); - text_buffer - .apply_ops(operations.into_iter().filter_map(operation_from_wire)) - .unwrap(); + text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); let base_text = text_buffer.text(); let epoch = buffer.epoch + 1; diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 55a8f216c4..adc571580a 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -96,16 +96,14 @@ async fn test_channel_buffers(db: &Arc) { text::BufferId::new(1).unwrap(), buffer_response_b.base_text, ); - buffer_b - .apply_ops(buffer_response_b.operations.into_iter().map(|operation| { - let operation = proto::deserialize_operation(operation).unwrap(); - if let language::Operation::Buffer(operation) = operation { - operation - } else { - unreachable!() - } - })) - .unwrap(); + buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })); assert_eq!(buffer_b.text(), "hello, cruel world"); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 43fe1565ac..08fc1ccdb4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1972,7 +1972,7 @@ impl Buffer { &mut self, ops: I, cx: &mut ModelContext, - ) -> Result<()> { + ) { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); @@ -1991,14 +1991,13 @@ impl Buffer { } }) .collect::>(); - self.text.apply_ops(buffer_ops)?; + self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. cx.notify(); - Ok(()) } fn flush_deferred_ops(&mut self, cx: &mut ModelContext) { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 50dea8d256..23faa33316 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -308,7 +308,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) { // Incorporating a set of remote ops emits a single edited event, // followed by a dirty changed event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap(); + buffer.apply_ops(buffer1_ops.lock().drain(..), cx); }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), @@ -332,7 +332,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) { // Incorporating the remote ops again emits a single edited event, // followed by a dirty changed event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap(); + buffer.apply_ops(buffer1_ops.lock().drain(..), cx); }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), @@ -2274,13 +2274,11 @@ fn test_serialization(cx: &mut gpui::AppContext) { .block(buffer1.read(cx).serialize_ops(None, cx)); let buffer2 = cx.new_model(|cx| { let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| proto::deserialize_operation(op).unwrap()), + cx, + ); buffer }); assert_eq!(buffer2.read(cx).text(), "abcDF"); @@ -2401,13 +2399,11 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { .block(base_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = Buffer::from_proto(i as ReplicaId, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| proto::deserialize_operation(op).unwrap()), + cx, + ); buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { @@ -2523,14 +2519,12 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { None, ) .unwrap(); - new_buffer - .apply_ops( - old_buffer_ops - .into_iter() - .map(|op| deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + new_buffer.apply_ops( + old_buffer_ops + .into_iter() + .map(|op| deserialize_operation(op).unwrap()), + cx, + ); log::info!( "New replica {} text: {:?}", new_buffer.replica_id(), @@ -2570,7 +2564,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { ops ); new_buffer.update(cx, |new_buffer, cx| { - new_buffer.apply_ops(ops, cx).unwrap(); + new_buffer.apply_ops(ops, cx); }); } } @@ -2598,7 +2592,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { ops.len(), ops ); - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap()); + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } } _ => {} diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 9dee092dea..29bd9a8068 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5019,13 +5019,11 @@ mod tests { .background_executor() .block(host_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| language::proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| language::proto::deserialize_operation(op).unwrap()), + cx, + ); buffer }); let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx)); diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index ead3235997..0045aba2e8 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -644,7 +644,7 @@ impl BufferStore { } hash_map::Entry::Occupied(mut entry) => { if let OpenBuffer::Operations(operations) = entry.get_mut() { - buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?; + buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx)); } else if entry.get().upgrade().is_some() { if is_remote { return Ok(()); @@ -1051,12 +1051,12 @@ impl BufferStore { match this.opened_buffers.entry(buffer_id) { hash_map::Entry::Occupied(mut e) => match e.get_mut() { OpenBuffer::Strong(buffer) => { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), OpenBuffer::Weak(buffer) => { if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } } }, @@ -1217,7 +1217,8 @@ impl BufferStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)) + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); + anyhow::Ok(()) }); if let Err(error) = result { diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 6f748fb588..8c5d7014ee 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -515,25 +515,25 @@ fn test_undo_redo() { let entries = buffer.history.undo_stack.clone(); assert_eq!(entries.len(), 3); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1234"); } @@ -692,12 +692,12 @@ fn test_concurrent_edits() { let buf3_op = buffer3.edit([(5..6, "56")]); assert_eq!(buffer3.text(), "abcde56"); - buffer1.apply_op(buf2_op.clone()).unwrap(); - buffer1.apply_op(buf3_op.clone()).unwrap(); - buffer2.apply_op(buf1_op.clone()).unwrap(); - buffer2.apply_op(buf3_op).unwrap(); - buffer3.apply_op(buf1_op).unwrap(); - buffer3.apply_op(buf2_op).unwrap(); + buffer1.apply_op(buf2_op.clone()); + buffer1.apply_op(buf3_op.clone()); + buffer2.apply_op(buf1_op.clone()); + buffer2.apply_op(buf3_op); + buffer3.apply_op(buf1_op); + buffer3.apply_op(buf2_op); assert_eq!(buffer1.text(), "a12c34e56"); assert_eq!(buffer2.text(), "a12c34e56"); @@ -756,7 +756,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { replica_id, ops.len() ); - buffer.apply_ops(ops).unwrap(); + buffer.apply_ops(ops); } } _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 9630ec5b80..8d2cd97aac 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -38,7 +38,6 @@ pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree, TreeMap}; use undo_map::UndoMap; -use util::ResultExt; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; @@ -927,23 +926,22 @@ impl Buffer { self.snapshot.line_ending = line_ending; } - pub fn apply_ops>(&mut self, ops: I) -> Result<()> { + pub fn apply_ops>(&mut self, ops: I) { let mut deferred_ops = Vec::new(); for op in ops { self.history.push(op.clone()); if self.can_apply_op(&op) { - self.apply_op(op)?; + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - self.flush_deferred_ops()?; - Ok(()) + self.flush_deferred_ops(); } - fn apply_op(&mut self, op: Operation) -> Result<()> { + fn apply_op(&mut self, op: Operation) { match op { Operation::Edit(edit) => { if !self.version.observed(edit.timestamp) { @@ -960,7 +958,7 @@ impl Buffer { } Operation::Undo(undo) => { if !self.version.observed(undo.timestamp) { - self.apply_undo(&undo)?; + self.apply_undo(&undo); self.snapshot.version.observe(undo.timestamp); self.lamport_clock.observe(undo.timestamp); } @@ -974,7 +972,6 @@ impl Buffer { true } }); - Ok(()) } fn apply_remote_edit( @@ -1217,7 +1214,7 @@ impl Buffer { fragment_ids } - fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { + fn apply_undo(&mut self, undo: &UndoOperation) { self.snapshot.undo_map.insert(undo); let mut edits = Patch::default(); @@ -1268,22 +1265,20 @@ impl Buffer { self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.subscriptions.publish_mut(&edits); - Ok(()) } - fn flush_deferred_ops(&mut self) -> Result<()> { + fn flush_deferred_ops(&mut self) { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { - self.apply_op(op)?; + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - Ok(()) } fn can_apply_op(&self, op: &Operation) -> bool { @@ -1352,7 +1347,7 @@ impl Buffer { if let Some(entry) = self.history.pop_undo() { let transaction = entry.transaction.clone(); let transaction_id = transaction.id; - let op = self.undo_or_redo(transaction).unwrap(); + let op = self.undo_or_redo(transaction); Some((transaction_id, op)) } else { None @@ -1365,7 +1360,7 @@ impl Buffer { .remove_from_undo(transaction_id)? .transaction .clone(); - self.undo_or_redo(transaction).log_err() + Some(self.undo_or_redo(transaction)) } pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec { @@ -1378,7 +1373,7 @@ impl Buffer { transactions .into_iter() - .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .map(|transaction| self.undo_or_redo(transaction)) .collect() } @@ -1394,7 +1389,7 @@ impl Buffer { if let Some(entry) = self.history.pop_redo() { let transaction = entry.transaction.clone(); let transaction_id = transaction.id; - let op = self.undo_or_redo(transaction).unwrap(); + let op = self.undo_or_redo(transaction); Some((transaction_id, op)) } else { None @@ -1411,11 +1406,11 @@ impl Buffer { transactions .into_iter() - .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .map(|transaction| self.undo_or_redo(transaction)) .collect() } - fn undo_or_redo(&mut self, transaction: Transaction) -> Result { + fn undo_or_redo(&mut self, transaction: Transaction) -> Operation { let mut counts = HashMap::default(); for edit_id in transaction.edit_ids { counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); @@ -1426,11 +1421,11 @@ impl Buffer { version: self.version(), counts, }; - self.apply_undo(&undo)?; + self.apply_undo(&undo); self.snapshot.version.observe(undo.timestamp); let operation = Operation::Undo(undo); self.history.push(operation.clone()); - Ok(operation) + operation } pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) { @@ -1762,7 +1757,7 @@ impl Buffer { self.replica_id, transaction ); - ops.push(self.undo_or_redo(transaction).unwrap()); + ops.push(self.undo_or_redo(transaction)); } } ops From 27c1106fadef97dc56d17d9359fd7514b71c8643 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 13:26:14 -0400 Subject: [PATCH 230/270] Fix bug where copying from assistant panel appends extra newline to clipboard (#18090) Closes https://github.com/zed-industries/zed/issues/17661 Release Notes: - Fixed a bug where copying from the assistant panel appended an additional newline to the end of the clipboard contents. --- crates/assistant/src/assistant_panel.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index da176ebeee..364c6f9663 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3533,7 +3533,9 @@ impl ContextEditor { for chunk in context.buffer().read(cx).text_for_range(range) { text.push_str(chunk); } - text.push('\n'); + if message.offset_range.end < selection.range().end { + text.push('\n'); + } } } } From 00b1c81c9f8f209667140036da4c9ac578031546 Mon Sep 17 00:00:00 2001 From: David Soria Parra <167242713+dsp-ant@users.noreply.github.com> Date: Thu, 19 Sep 2024 20:51:48 +0100 Subject: [PATCH 231/270] context_servers: Remove context_type from ResourceContent (#18097) This is removed in the protocol Release Notes: - N/A --- crates/context_servers/src/types.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index c0e9a79f15..cd95ecd7ad 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -239,7 +239,6 @@ pub struct Resource { pub struct ResourceContent { pub uri: Url, pub mime_type: Option, - pub content_type: String, pub text: Option, pub data: Option, } From fbbf0393cbe9b2094bbdd496a5d5d15419eeaeb3 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 14:04:46 -0600 Subject: [PATCH 232/270] ssh-remoting: Fix go to definition out of worktree (#18094) Release Notes: - ssh-remoting: Fixed go to definition outside of worktree --------- Co-authored-by: Mikayla --- crates/client/src/client.rs | 4 + crates/project/src/lsp_store.rs | 21 ++- crates/project/src/project.rs | 181 ++++++++----------- crates/project/src/worktree_store.rs | 173 ++++++++++++++---- crates/remote/src/ssh_session.rs | 7 +- crates/remote_server/src/headless_project.rs | 17 +- crates/rpc/src/proto_client.rs | 6 + 7 files changed, 251 insertions(+), 158 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 09286300d9..a8387f7c5a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1621,6 +1621,10 @@ impl ProtoClient for Client { fn message_handler_set(&self) -> &parking_lot::Mutex { &self.handler_set } + + fn goes_via_collab(&self) -> bool { + true + } } #[derive(Serialize, Deserialize)] diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 58d9ba8926..5c32c9030d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -534,6 +534,9 @@ impl LspStore { } WorktreeStoreEvent::WorktreeRemoved(_, id) => self.remove_worktree(*id, cx), WorktreeStoreEvent::WorktreeOrderChanged => {} + WorktreeStoreEvent::WorktreeUpdateSent(worktree) => { + worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree)); + } } } @@ -764,24 +767,22 @@ impl LspStore { self.active_entry = active_entry; } - pub(crate) fn send_diagnostic_summaries( - &self, - worktree: &mut Worktree, - ) -> Result<(), anyhow::Error> { + pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { if let Some(client) = self.downstream_client.clone() { if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { for (path, summaries) in summaries { for (&server_id, summary) in summaries { - client.send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, - worktree_id: worktree.id().to_proto(), - summary: Some(summary.to_proto(server_id, path)), - })?; + client + .send(proto::UpdateDiagnosticSummary { + project_id: self.project_id, + worktree_id: worktree.id().to_proto(), + summary: Some(summary.to_proto(server_id, path)), + }) + .log_err(); } } } } - Ok(()) } pub fn request_lsp( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f4816cf0cd..fcf10d11c2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,7 +31,7 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - AsyncWriteExt, FutureExt, StreamExt, + AsyncWriteExt, StreamExt, }; use git::{blame::Blame, repository::GitRepository}; @@ -152,7 +152,7 @@ pub struct Project { _subscriptions: Vec, buffers_needing_diff: HashSet>, git_diff_debouncer: DebouncedDelay, - remotely_created_buffers: Arc>, + remotely_created_models: Arc>, terminals: Terminals, node: Option>, tasks: Model, @@ -169,26 +169,28 @@ pub struct Project { } #[derive(Default)] -struct RemotelyCreatedBuffers { +struct RemotelyCreatedModels { + worktrees: Vec>, buffers: Vec>, retain_count: usize, } -struct RemotelyCreatedBufferGuard { - remote_buffers: std::sync::Weak>, +struct RemotelyCreatedModelGuard { + remote_models: std::sync::Weak>, } -impl Drop for RemotelyCreatedBufferGuard { +impl Drop for RemotelyCreatedModelGuard { fn drop(&mut self) { - if let Some(remote_buffers) = self.remote_buffers.upgrade() { - let mut remote_buffers = remote_buffers.lock(); + if let Some(remote_models) = self.remote_models.upgrade() { + let mut remote_models = remote_models.lock(); assert!( - remote_buffers.retain_count > 0, - "RemotelyCreatedBufferGuard dropped too many times" + remote_models.retain_count > 0, + "RemotelyCreatedModelGuard dropped too many times" ); - remote_buffers.retain_count -= 1; - if remote_buffers.retain_count == 0 { - remote_buffers.buffers.clear(); + remote_models.retain_count -= 1; + if remote_models.retain_count == 0 { + remote_models.buffers.clear(); + remote_models.worktrees.clear(); } } } @@ -620,7 +622,7 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| WorktreeStore::new(false, fs.clone())); + let worktree_store = cx.new_model(|_| WorktreeStore::new(None, false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -687,7 +689,7 @@ impl Project { dev_server_project_id: None, search_history: Self::new_search_history(), environment, - remotely_created_buffers: Default::default(), + remotely_created_models: Default::default(), last_formatting_failure: None, buffers_being_formatted: Default::default(), search_included_history: Self::new_search_history(), @@ -714,11 +716,8 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| { - let mut worktree_store = WorktreeStore::new(false, fs.clone()); - worktree_store.set_upstream_client(ssh.clone().into()); - worktree_store - }); + let worktree_store = + cx.new_model(|_| WorktreeStore::new(Some(ssh.clone().into()), false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -773,7 +772,7 @@ impl Project { dev_server_project_id: None, search_history: Self::new_search_history(), environment, - remotely_created_buffers: Default::default(), + remotely_created_models: Default::default(), last_formatting_failure: None, buffers_being_formatted: Default::default(), search_included_history: Self::new_search_history(), @@ -787,8 +786,9 @@ impl Project { ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.worktree_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.lsp_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer); - client.add_model_message_handler(Self::handle_update_worktree); client.add_model_message_handler(Self::handle_create_buffer_for_peer); + client.add_model_message_handler(Self::handle_update_worktree); + client.add_model_message_handler(Self::handle_update_project); client.add_model_request_handler(BufferStore::handle_update_buffer); BufferStore::init(&client); LspStore::init(&client); @@ -867,8 +867,7 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - let mut store = WorktreeStore::new(true, fs.clone()); - store.set_upstream_client(client.clone().into()); + let mut store = WorktreeStore::new(Some(client.clone().into()), true, fs.clone()); if let Some(dev_server_project_id) = response.payload.dev_server_project_id { store.set_dev_server_project_id(DevServerProjectId(dev_server_project_id)); } @@ -955,7 +954,7 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), - remotely_created_buffers: Arc::new(Mutex::new(RemotelyCreatedBuffers::default())), + remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), last_formatting_failure: None, buffers_being_formatted: Default::default(), }; @@ -1259,43 +1258,6 @@ impl Project { } } - fn metadata_changed(&mut self, cx: &mut ModelContext) { - cx.notify(); - - let ProjectClientState::Shared { remote_id } = self.client_state else { - return; - }; - let project_id = remote_id; - - let update_project = self.client.request(proto::UpdateProject { - project_id, - worktrees: self.worktree_metadata_protos(cx), - }); - cx.spawn(|this, mut cx| async move { - update_project.await?; - this.update(&mut cx, |this, cx| { - let client = this.client.clone(); - let worktrees = this.worktree_store.read(cx).worktrees().collect::>(); - - for worktree in worktrees { - worktree.update(cx, |worktree, cx| { - let client = client.clone(); - worktree.observe_updates(project_id, cx, { - move |update| client.request(update).map(|result| result.is_ok()) - }); - - this.lsp_store.update(cx, |lsp_store, _| { - lsp_store.send_diagnostic_summaries(worktree) - }) - })?; - } - - anyhow::Ok(()) - }) - }) - .detach_and_log_err(cx); - } - pub fn task_inventory(&self) -> &Model { &self.tasks } @@ -1513,7 +1475,7 @@ impl Project { buffer_store.shared(project_id, self.client.clone().into(), cx) }); self.worktree_store.update(cx, |worktree_store, cx| { - worktree_store.set_shared(true, cx); + worktree_store.shared(project_id, self.client.clone().into(), cx); }); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.shared(project_id, self.client.clone().into(), cx) @@ -1526,7 +1488,6 @@ impl Project { remote_id: project_id, }; - self.metadata_changed(cx); cx.emit(Event::RemoteIdChanged(Some(project_id))); cx.notify(); Ok(()) @@ -1540,7 +1501,11 @@ impl Project { self.buffer_store .update(cx, |buffer_store, _| buffer_store.forget_shared_buffers()); self.set_collaborators_from_proto(message.collaborators, cx)?; - self.metadata_changed(cx); + + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.send_project_updates(cx); + }); + cx.notify(); cx.emit(Event::Reshared); Ok(()) } @@ -1576,7 +1541,6 @@ impl Project { pub fn unshare(&mut self, cx: &mut ModelContext) -> Result<()> { self.unshare_internal(cx)?; - self.metadata_changed(cx); cx.notify(); Ok(()) } @@ -1598,7 +1562,7 @@ impl Project { self.collaborators.clear(); self.client_subscriptions.clear(); self.worktree_store.update(cx, |store, cx| { - store.set_shared(false, cx); + store.unshared(cx); }); self.buffer_store.update(cx, |buffer_store, cx| { buffer_store.forget_shared_buffers(); @@ -1867,9 +1831,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { { - let mut remotely_created_buffers = self.remotely_created_buffers.lock(); - if remotely_created_buffers.retain_count > 0 { - remotely_created_buffers.buffers.push(buffer.clone()) + let mut remotely_created_models = self.remotely_created_models.lock(); + if remotely_created_models.retain_count > 0 { + remotely_created_models.buffers.push(buffer.clone()) } } @@ -2110,10 +2074,17 @@ impl Project { cx.emit(Event::WorktreeRemoved(*id)); } WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged), + WorktreeStoreEvent::WorktreeUpdateSent(_) => {} } } fn on_worktree_added(&mut self, worktree: &Model, cx: &mut ModelContext) { + { + let mut remotely_created_models = self.remotely_created_models.lock(); + if remotely_created_models.retain_count > 0 { + remotely_created_models.worktrees.push(worktree.clone()) + } + } cx.observe(worktree, |_, _, cx| cx.notify()).detach(); cx.subscribe(worktree, |this, worktree, event, cx| { let is_local = worktree.read(cx).is_local(); @@ -2140,7 +2111,7 @@ impl Project { } }) .detach(); - self.metadata_changed(cx); + cx.notify(); } fn on_worktree_removed(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { @@ -2171,7 +2142,7 @@ impl Project { inventory.remove_worktree_sources(id_to_remove); }); - self.metadata_changed(cx); + cx.notify(); } fn on_buffer_event( @@ -3012,7 +2983,7 @@ impl Project { #[inline(never)] fn definition_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3025,7 +2996,7 @@ impl Project { ) } pub fn definition( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3035,7 +3006,7 @@ impl Project { } fn declaration_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3049,7 +3020,7 @@ impl Project { } pub fn declaration( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3059,7 +3030,7 @@ impl Project { } fn type_definition_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3073,7 +3044,7 @@ impl Project { } pub fn type_definition( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3083,7 +3054,7 @@ impl Project { } pub fn implementation( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3098,7 +3069,7 @@ impl Project { } pub fn references( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3113,7 +3084,7 @@ impl Project { } fn document_highlights_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3127,7 +3098,7 @@ impl Project { } pub fn document_highlights( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3514,7 +3485,7 @@ impl Project { query: Some(query.to_proto()), limit: limit as _, }); - let guard = self.retain_remotely_created_buffers(cx); + let guard = self.retain_remotely_created_models(cx); cx.spawn(move |this, mut cx| async move { let response = request.await?; @@ -3536,7 +3507,7 @@ impl Project { } pub fn request_lsp( - &self, + &mut self, buffer_handle: Model, server: LanguageServerToQuery, request: R, @@ -3546,8 +3517,14 @@ impl Project { ::Result: Send, ::Params: Send, { - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.request_lsp(buffer_handle, server, request, cx) + }); + cx.spawn(|_, _| async move { + let result = task.await; + drop(guard); + result }) } @@ -4095,6 +4072,7 @@ impl Project { })? } + // Collab sends UpdateWorktree protos as messages async fn handle_update_worktree( this: Model, envelope: TypedEnvelope, @@ -4130,19 +4108,21 @@ impl Project { BufferStore::handle_update_buffer(buffer_store, envelope, cx).await } - fn retain_remotely_created_buffers( + fn retain_remotely_created_models( &mut self, cx: &mut ModelContext, - ) -> RemotelyCreatedBufferGuard { + ) -> RemotelyCreatedModelGuard { { - let mut remotely_created_buffers = self.remotely_created_buffers.lock(); - if remotely_created_buffers.retain_count == 0 { - remotely_created_buffers.buffers = self.buffer_store.read(cx).buffers().collect(); + let mut remotely_create_models = self.remotely_created_models.lock(); + if remotely_create_models.retain_count == 0 { + remotely_create_models.buffers = self.buffer_store.read(cx).buffers().collect(); + remotely_create_models.worktrees = + self.worktree_store.read(cx).worktrees().collect(); } - remotely_created_buffers.retain_count += 1; + remotely_create_models.retain_count += 1; } - RemotelyCreatedBufferGuard { - remote_buffers: Arc::downgrade(&self.remotely_created_buffers), + RemotelyCreatedModelGuard { + remote_models: Arc::downgrade(&self.remotely_created_models), } } @@ -4637,16 +4617,11 @@ impl Project { worktrees: Vec, cx: &mut ModelContext, ) -> Result<()> { - self.metadata_changed(cx); - self.worktree_store.update(cx, |worktree_store, cx| { - worktree_store.set_worktrees_from_proto( - worktrees, - self.replica_id(), - self.remote_id().ok_or_else(|| anyhow!("invalid project"))?, - self.client.clone().into(), - cx, - ) - }) + cx.notify(); + let result = self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.set_worktrees_from_proto(worktrees, self.replica_id(), cx) + }); + result } fn set_collaborators_from_proto( diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 07764d4a05..7fae8b9e1d 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -39,8 +39,10 @@ struct MatchingEntry { pub struct WorktreeStore { next_entry_id: Arc, upstream_client: Option, + downstream_client: Option, + remote_id: u64, dev_server_project_id: Option, - is_shared: bool, + retain_worktrees: bool, worktrees: Vec, worktrees_reordered: bool, #[allow(clippy::type_complexity)] @@ -53,6 +55,7 @@ pub enum WorktreeStoreEvent { WorktreeAdded(Model), WorktreeRemoved(EntityId, WorktreeId), WorktreeOrderChanged, + WorktreeUpdateSent(Model), } impl EventEmitter for WorktreeStore {} @@ -66,23 +69,25 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_expand_project_entry); } - pub fn new(retain_worktrees: bool, fs: Arc) -> Self { + pub fn new( + upstream_client: Option, + retain_worktrees: bool, + fs: Arc, + ) -> Self { Self { next_entry_id: Default::default(), loading_worktrees: Default::default(), - upstream_client: None, dev_server_project_id: None, - is_shared: retain_worktrees, + downstream_client: None, worktrees: Vec::new(), worktrees_reordered: false, + retain_worktrees, + remote_id: 0, + upstream_client, fs, } } - pub fn set_upstream_client(&mut self, client: AnyProtoClient) { - self.upstream_client = Some(client); - } - pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) { self.dev_server_project_id = Some(id); } @@ -201,6 +206,13 @@ impl WorktreeStore { path: abs_path.clone(), }) .await?; + + if let Some(existing_worktree) = this.read_with(&cx, |this, cx| { + this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx) + })? { + return Ok(existing_worktree); + } + let worktree = cx.update(|cx| { Worktree::remote( 0, @@ -302,7 +314,10 @@ impl WorktreeStore { } pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { - let push_strong_handle = self.is_shared || worktree.read(cx).is_visible(); + let worktree_id = worktree.read(cx).id(); + debug_assert!(!self.worktrees().any(|w| w.read(cx).id() == worktree_id)); + + let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible(); let handle = if push_strong_handle { WorktreeHandle::Strong(worktree.clone()) } else { @@ -322,13 +337,15 @@ impl WorktreeStore { } cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone())); + self.send_project_updates(cx); let handle_id = worktree.entity_id(); - cx.observe_release(worktree, move |_, worktree, cx| { + cx.observe_release(worktree, move |this, worktree, cx| { cx.emit(WorktreeStoreEvent::WorktreeRemoved( handle_id, worktree.id(), )); + this.send_project_updates(cx); }) .detach(); } @@ -349,6 +366,7 @@ impl WorktreeStore { false } }); + self.send_project_updates(cx); } pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { @@ -359,8 +377,6 @@ impl WorktreeStore { &mut self, worktrees: Vec, replica_id: ReplicaId, - remote_id: u64, - client: AnyProtoClient, cx: &mut ModelContext, ) -> Result<()> { let mut old_worktrees_by_id = self @@ -372,18 +388,31 @@ impl WorktreeStore { }) .collect::>(); + let client = self + .upstream_client + .clone() + .ok_or_else(|| anyhow!("invalid project"))?; + for worktree in worktrees { if let Some(old_worktree) = old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id)) { - self.worktrees.push(WorktreeHandle::Strong(old_worktree)); + let push_strong_handle = + self.retain_worktrees || old_worktree.read(cx).is_visible(); + let handle = if push_strong_handle { + WorktreeHandle::Strong(old_worktree.clone()) + } else { + WorktreeHandle::Weak(old_worktree.downgrade()) + }; + self.worktrees.push(handle); } else { self.add( - &Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx), + &Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx), cx, ); } } + self.send_project_updates(cx); Ok(()) } @@ -446,33 +475,109 @@ impl WorktreeStore { } } - pub fn set_shared(&mut self, is_shared: bool, cx: &mut ModelContext) { - self.is_shared = is_shared; + pub fn send_project_updates(&mut self, cx: &mut ModelContext) { + let Some(downstream_client) = self.downstream_client.clone() else { + return; + }; + let project_id = self.remote_id; + + let update = proto::UpdateProject { + project_id, + worktrees: self.worktree_metadata_protos(cx), + }; + + // collab has bad concurrency guarantees, so we send requests in serial. + let update_project = if downstream_client.goes_via_collab() { + Some(downstream_client.request(update)) + } else { + downstream_client.send(update).log_err(); + None + }; + cx.spawn(|this, mut cx| async move { + if let Some(update_project) = update_project { + update_project.await?; + } + + this.update(&mut cx, |this, cx| { + let worktrees = this.worktrees().collect::>(); + + for worktree in worktrees { + worktree.update(cx, |worktree, cx| { + let client = downstream_client.clone(); + worktree.observe_updates(project_id, cx, { + move |update| { + let client = client.clone(); + async move { + if client.goes_via_collab() { + client.request(update).map(|result| result.is_ok()).await + } else { + client.send(update).is_ok() + } + } + } + }); + }); + + cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone())) + } + + anyhow::Ok(()) + }) + }) + .detach_and_log_err(cx); + } + + pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec { + self.worktrees() + .map(|worktree| { + let worktree = worktree.read(cx); + proto::WorktreeMetadata { + id: worktree.id().to_proto(), + root_name: worktree.root_name().into(), + visible: worktree.is_visible(), + abs_path: worktree.abs_path().to_string_lossy().into(), + } + }) + .collect() + } + + pub fn shared( + &mut self, + remote_id: u64, + downsteam_client: AnyProtoClient, + cx: &mut ModelContext, + ) { + self.retain_worktrees = true; + self.remote_id = remote_id; + self.downstream_client = Some(downsteam_client); // When shared, retain all worktrees - if is_shared { - for worktree_handle in self.worktrees.iter_mut() { - match worktree_handle { - WorktreeHandle::Strong(_) => {} - WorktreeHandle::Weak(worktree) => { - if let Some(worktree) = worktree.upgrade() { - *worktree_handle = WorktreeHandle::Strong(worktree); - } + for worktree_handle in self.worktrees.iter_mut() { + match worktree_handle { + WorktreeHandle::Strong(_) => {} + WorktreeHandle::Weak(worktree) => { + if let Some(worktree) = worktree.upgrade() { + *worktree_handle = WorktreeHandle::Strong(worktree); } } } } + self.send_project_updates(cx); + } + + pub fn unshared(&mut self, cx: &mut ModelContext) { + self.retain_worktrees = false; + self.downstream_client.take(); + // When not shared, only retain the visible worktrees - else { - for worktree_handle in self.worktrees.iter_mut() { - if let WorktreeHandle::Strong(worktree) = worktree_handle { - let is_visible = worktree.update(cx, |worktree, _| { - worktree.stop_observing_updates(); - worktree.is_visible() - }); - if !is_visible { - *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); - } + for worktree_handle in self.worktrees.iter_mut() { + if let WorktreeHandle::Strong(worktree) = worktree_handle { + let is_visible = worktree.update(cx, |worktree, _| { + worktree.stop_observing_updates(); + worktree.is_visible() + }); + if !is_visible { + *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); } } } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 4aab731e64..10608b74f3 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -247,7 +247,8 @@ impl SshSession { let line_ix = start_ix + ix; let content = &stderr_buffer[start_ix..line_ix]; start_ix = line_ix + 1; - if let Ok(record) = serde_json::from_slice::(content) { + if let Ok(mut record) = serde_json::from_slice::(content) { + record.message = format!("(remote) {}", record.message); record.log(log::logger()) } else { eprintln!("(remote) {}", String::from_utf8_lossy(content)); @@ -469,6 +470,10 @@ impl ProtoClient for SshSession { fn message_handler_set(&self) -> &Mutex { &self.state } + + fn goes_via_collab(&self) -> bool { + false + } } impl SshClientState { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index bbd82281d8..54f48e3626 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -44,7 +44,11 @@ impl HeadlessProject { pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); - let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); + let worktree_store = cx.new_model(|cx| { + let mut store = WorktreeStore::new(None, true, fs.clone()); + store.shared(SSH_PROJECT_ID, session.clone().into(), cx); + store + }); let buffer_store = cx.new_model(|cx| { let mut buffer_store = BufferStore::new(worktree_store.clone(), Some(SSH_PROJECT_ID), cx); @@ -196,18 +200,11 @@ impl HeadlessProject { .await?; this.update(&mut cx, |this, cx| { - let session = this.session.clone(); this.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(&worktree, cx); }); - worktree.update(cx, |worktree, cx| { - worktree.observe_updates(0, cx, move |update| { - session.send(update).ok(); - futures::future::ready(true) - }); - proto::AddWorktreeResponse { - worktree_id: worktree.id().to_proto(), - } + worktree.update(cx, |worktree, _| proto::AddWorktreeResponse { + worktree_id: worktree.id().to_proto(), }) }) } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 4a990a8433..89ef580cdf 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -27,6 +27,8 @@ pub trait ProtoClient: Send + Sync { fn send_response(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>; fn message_handler_set(&self) -> &parking_lot::Mutex; + + fn goes_via_collab(&self) -> bool; } #[derive(Default)] @@ -139,6 +141,10 @@ impl AnyProtoClient { Self(client) } + pub fn goes_via_collab(&self) -> bool { + self.0.goes_via_collab() + } + pub fn request( &self, request: T, From 28a54ce122fdd5efb2e23cc77a5efab78c07061c Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Thu, 19 Sep 2024 14:16:01 -0600 Subject: [PATCH 233/270] Add diagnostic information to context of inline assistant (#18096) Release Notes: - Added Diagnostic information to inline assistant. This enables users to just say "Fix this" and have the model know what the errors are. --- assets/prompts/content_prompt.hbs | 11 +++++++++++ crates/assistant/src/prompts.rs | 23 +++++++++++++++++++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/assets/prompts/content_prompt.hbs b/assets/prompts/content_prompt.hbs index cf4141349b..e944e230f5 100644 --- a/assets/prompts/content_prompt.hbs +++ b/assets/prompts/content_prompt.hbs @@ -47,6 +47,17 @@ And here's the section to rewrite based on that prompt again for reference: {{{rewrite_section}}} + +{{#if diagnostic_errors}} +{{#each diagnostic_errors}} + + {{line_number}} + {{error_message}} + {{code_content}} + +{{/each}} +{{/if}} + {{/if}} Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved. diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index ae2ab4787e..7d99a70d14 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -4,13 +4,20 @@ use fs::Fs; use futures::StreamExt; use gpui::AssetSource; use handlebars::{Handlebars, RenderError}; -use language::{BufferSnapshot, LanguageName}; +use language::{BufferSnapshot, LanguageName, Point}; use parking_lot::Mutex; use serde::Serialize; use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration}; use text::LineEnding; use util::ResultExt; +#[derive(Serialize)] +pub struct ContentPromptDiagnosticContext { + pub line_number: usize, + pub error_message: String, + pub code_content: String, +} + #[derive(Serialize)] pub struct ContentPromptContext { pub content_type: String, @@ -20,6 +27,7 @@ pub struct ContentPromptContext { pub document_content: String, pub user_prompt: String, pub rewrite_section: Option, + pub diagnostic_errors: Vec, } #[derive(Serialize)] @@ -261,6 +269,17 @@ impl PromptBuilder { } else { None }; + let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false); + let diagnostic_errors: Vec = diagnostics + .map(|entry| { + let start = entry.range.start; + ContentPromptDiagnosticContext { + line_number: (start.row + 1) as usize, + error_message: entry.diagnostic.message.clone(), + code_content: buffer.text_for_range(entry.range.clone()).collect(), + } + }) + .collect(); let context = ContentPromptContext { content_type: content_type.to_string(), @@ -270,8 +289,8 @@ impl PromptBuilder { document_content, user_prompt, rewrite_section, + diagnostic_errors, }; - self.handlebars.lock().render("content_prompt", &context) } From 82e6b1e0e5fe4f2a04fba2fb6f3e7d1aae0974a1 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 19 Sep 2024 17:22:11 -0400 Subject: [PATCH 234/270] docs: Update glibc requirements for current binaries (#18101) --- docs/src/linux.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index 812a3707d0..17334c325c 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer; Amazon Linux >2023) + - x86_64 (Intel/AMD): glibc version >= 2.35 (Ubuntu 22 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). @@ -24,7 +24,7 @@ Both Nix and Alpine have third-party Zed packages available (though they are cur You will need to build from source for: - architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) -- Amazon Linux 2 on x86_64 +- Amazon Linux - Rocky Linux 9.3 ## Other ways to install Zed on Linux From edf2c192500194192320ff21e86a2846e5089d48 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 15:28:30 -0600 Subject: [PATCH 235/270] Hide GPU problems from Slack (#18087) Release Notes: - N/A --------- Co-authored-by: Marshall Co-authored-by: Marshall Bowers --- crates/collab/src/api/events.rs | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 1be8f9c37b..008c76e048 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -18,8 +18,8 @@ use sha2::{Digest, Sha256}; use std::sync::{Arc, OnceLock}; use telemetry_events::{ ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event, - EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent, - SettingEvent, + EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic, + ReplEvent, SettingEvent, }; use uuid::Uuid; @@ -296,10 +296,11 @@ pub async fn post_panic( version = %panic.app_version, os_name = %panic.os_name, os_version = %panic.os_version.clone().unwrap_or_default(), - installation_id = %panic.installation_id.unwrap_or_default(), + installation_id = %panic.installation_id.clone().unwrap_or_default(), description = %panic.payload, backtrace = %panic.backtrace.join("\n"), - "panic report"); + "panic report" + ); let backtrace = if panic.backtrace.len() > 25 { let total = panic.backtrace.len(); @@ -317,6 +318,11 @@ pub async fn post_panic( } else { panic.backtrace.join("\n") }; + + if !report_to_slack(&panic) { + return Ok(()); + } + let backtrace_with_summary = panic.payload + "\n" + &backtrace; if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { @@ -357,6 +363,23 @@ pub async fn post_panic( Ok(()) } +fn report_to_slack(panic: &Panic) -> bool { + if panic.os_name == "Linux" { + if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { + return false; + } + + if panic + .payload + .contains("GPU has crashed, and no debug information is available") + { + return false; + } + } + + true +} + pub async fn post_events( Extension(app): Extension>, TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, From 740803d745e1fe3b711c3c1a05ce3a2616f123cb Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 19:43:32 -0400 Subject: [PATCH 236/270] Bump release_notes to v2 endpoint (#18108) Partially addresses https://github.com/zed-industries/zed/issues/17527 SCR-20240919-rcik Release Notes: - Enhanced the `auto update: view release notes locally` feature to display release notes for each patch version associated with the installed minor version. --- crates/auto_update/src/auto_update.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index cfda6d6e58..1fe89cce0f 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -268,7 +268,7 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext Date: Thu, 19 Sep 2024 17:49:22 -0600 Subject: [PATCH 237/270] Fix prompt reloading in dev mode (#18095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I think I nulled out the repo path to test the non dev mode case and then forgot to reenable it 🤦‍♂️ . Release Notes: - N/A --- crates/assistant/src/prompts.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 7d99a70d14..3b9f75bac9 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -90,10 +90,9 @@ impl PromptBuilder { /// and application context. /// * `handlebars` - An `Arc>` for registering and updating templates. fn watch_fs_for_template_overrides( - mut params: PromptLoadingParams, + params: PromptLoadingParams, handlebars: Arc>>, ) { - params.repo_path = None; let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref()); params.cx.background_executor() .spawn(async move { From 15b4130fa551e38841b69f135218ef75cfb010db Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 19 Sep 2024 17:50:00 -0600 Subject: [PATCH 238/270] Introduce the ability to cycle between alternative inline assists (#18098) Release Notes: - Added a new `assistant.inline_alternatives` setting to configure additional models that will be used to perform inline assists in parallel. --------- Co-authored-by: Nathan Co-authored-by: Roy Co-authored-by: Adam --- assets/keymaps/default-linux.json | 7 + assets/keymaps/default-macos.json | 7 + crates/assistant/src/assistant.rs | 13 + crates/assistant/src/assistant_settings.rs | 13 +- crates/assistant/src/inline_assistant.rs | 702 ++++++++++++++++----- crates/language_model/src/registry.rs | 32 + crates/multi_buffer/src/multi_buffer.rs | 20 + docs/src/assistant/configuration.md | 26 + 8 files changed, 642 insertions(+), 178 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 542f6c2df4..f15c4dfe22 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -520,6 +520,13 @@ "alt-enter": "editor::Newline" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 77fac3254b..a58112b3c0 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -527,6 +527,13 @@ "ctrl-enter": "assistant::InlineAssist" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index d7466878c9..8b9c66ee55 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -69,6 +69,8 @@ actions!( ConfirmCommand, NewContext, ToggleModelSelector, + CycleNextInlineAssist, + CyclePreviousInlineAssist ] ); @@ -359,8 +361,19 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) { let settings = AssistantSettings::get_global(cx); let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone()); let model_id = LanguageModelId::from(settings.default_model.model.clone()); + let inline_alternatives = settings + .inline_alternatives + .iter() + .map(|alternative| { + ( + LanguageModelProviderId::from(alternative.provider.clone()), + LanguageModelId::from(alternative.model.clone()), + ) + }) + .collect::>(); LanguageModelRegistry::global(cx).update(cx, |registry, cx| { registry.select_active_model(&provider_name, &model_id, cx); + registry.select_inline_alternative_models(inline_alternatives, cx); }); } diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index e2c6a8eb24..5aa379bae3 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -59,6 +59,7 @@ pub struct AssistantSettings { pub default_width: Pixels, pub default_height: Pixels, pub default_model: LanguageModelSelection, + pub inline_alternatives: Vec, pub using_outdated_settings_version: bool, } @@ -236,6 +237,7 @@ impl AssistantSettingsContent { }) } }), + inline_alternatives: None, }, VersionedAssistantSettingsContent::V2(settings) => settings.clone(), }, @@ -254,6 +256,7 @@ impl AssistantSettingsContent { .id() .to_string(), }), + inline_alternatives: None, }, } } @@ -369,6 +372,7 @@ impl Default for VersionedAssistantSettingsContent { default_width: None, default_height: None, default_model: None, + inline_alternatives: None, }) } } @@ -397,6 +401,8 @@ pub struct AssistantSettingsContentV2 { default_height: Option, /// The default model to use when creating new contexts. default_model: Option, + /// Additional models with which to generate alternatives when performing inline assists. + inline_alternatives: Option>, } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] @@ -517,10 +523,8 @@ impl Settings for AssistantSettings { &mut settings.default_height, value.default_height.map(Into::into), ); - merge( - &mut settings.default_model, - value.default_model.map(Into::into), - ); + merge(&mut settings.default_model, value.default_model); + merge(&mut settings.inline_alternatives, value.inline_alternatives); // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference } @@ -574,6 +578,7 @@ mod tests { provider: "test-provider".into(), model: "gpt-99".into(), }), + inline_alternatives: None, enabled: None, button: None, dock: None, diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index c9360213ae..428b33f3bb 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1,7 +1,7 @@ use crate::{ assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder, - AssistantPanel, AssistantPanelEvent, CharOperation, LineDiff, LineOperation, ModelSelector, - StreamingDiff, + AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist, + CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff, }; use anyhow::{anyhow, Context as _, Result}; use client::{telemetry::Telemetry, ErrorExt}; @@ -25,13 +25,13 @@ use futures::{ SinkExt, Stream, StreamExt, }; use gpui::{ - anchored, deferred, point, AppContext, ClickEvent, EventEmitter, FocusHandle, FocusableView, - FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, TextStyle, - UpdateGlobal, View, ViewContext, WeakView, WindowContext, + anchored, deferred, point, AnyElement, AppContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, + TextStyle, UpdateGlobal, View, ViewContext, WeakView, WindowContext, }; use language::{Buffer, IndentKind, Point, Selection, TransactionId}; use language_model::{ - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; @@ -41,7 +41,7 @@ use smol::future::FutureExt; use std::{ cmp, future::{self, Future}, - mem, + iter, mem, ops::{Range, RangeInclusive}, pin::Pin, sync::Arc, @@ -85,7 +85,7 @@ pub struct InlineAssistant { async_watch::Receiver, ), >, - confirmed_assists: HashMap>, + confirmed_assists: HashMap>, prompt_history: VecDeque, prompt_builder: Arc, telemetry: Option>, @@ -157,7 +157,7 @@ impl InlineAssistant { if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = &assist.codegen.read(cx).status { + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { self.finish_assist(assist_id, false, cx) } } @@ -553,7 +553,7 @@ impl InlineAssistant { let assist_range = assist.range.to_offset(&buffer); if assist_range.contains(&selection.start) && assist_range.contains(&selection.end) { - if matches!(assist.codegen.read(cx).status, CodegenStatus::Pending) { + if matches!(assist.codegen.read(cx).status(cx), CodegenStatus::Pending) { self.dismiss_assist(*assist_id, cx); } else { self.finish_assist(*assist_id, false, cx); @@ -671,7 +671,7 @@ impl InlineAssistant { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; if matches!( - assist.codegen.read(cx).status, + assist.codegen.read(cx).status(cx), CodegenStatus::Error(_) | CodegenStatus::Done ) { let assist_range = assist.range.to_offset(&snapshot); @@ -774,7 +774,9 @@ impl InlineAssistant { if undo { assist.codegen.update(cx, |codegen, cx| codegen.undo(cx)); } else { - self.confirmed_assists.insert(assist_id, assist.codegen); + let confirmed_alternative = assist.codegen.read(cx).active_alternative().clone(); + self.confirmed_assists + .insert(assist_id, confirmed_alternative); } } @@ -978,12 +980,7 @@ impl InlineAssistant { assist .codegen .update(cx, |codegen, cx| { - codegen.start( - assist.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + codegen.start(user_prompt, assistant_panel_context, cx) }) .log_err(); @@ -1008,7 +1005,7 @@ impl InlineAssistant { pub fn assist_status(&self, assist_id: InlineAssistId, cx: &AppContext) -> InlineAssistStatus { if let Some(assist) = self.assists.get(&assist_id) { - match &assist.codegen.read(cx).status { + match assist.codegen.read(cx).status(cx) { CodegenStatus::Idle => InlineAssistStatus::Idle, CodegenStatus::Pending => InlineAssistStatus::Pending, CodegenStatus::Done => InlineAssistStatus::Done, @@ -1037,16 +1034,16 @@ impl InlineAssistant { for assist_id in assist_ids { if let Some(assist) = self.assists.get(assist_id) { let codegen = assist.codegen.read(cx); - let buffer = codegen.buffer.read(cx).read(cx); - foreground_ranges.extend(codegen.last_equal_ranges().iter().cloned()); + let buffer = codegen.buffer(cx).read(cx).read(cx); + foreground_ranges.extend(codegen.last_equal_ranges(cx).iter().cloned()); let pending_range = - codegen.edit_position.unwrap_or(assist.range.start)..assist.range.end; + codegen.edit_position(cx).unwrap_or(assist.range.start)..assist.range.end; if pending_range.end.to_offset(&buffer) > pending_range.start.to_offset(&buffer) { gutter_pending_ranges.push(pending_range); } - if let Some(edit_position) = codegen.edit_position { + if let Some(edit_position) = codegen.edit_position(cx) { let edited_range = assist.range.start..edit_position; if edited_range.end.to_offset(&buffer) > edited_range.start.to_offset(&buffer) { gutter_transformed_ranges.push(edited_range); @@ -1054,7 +1051,8 @@ impl InlineAssistant { } if assist.decorations.is_some() { - inserted_row_ranges.extend(codegen.diff.inserted_row_ranges.iter().cloned()); + inserted_row_ranges + .extend(codegen.diff(cx).inserted_row_ranges.iter().cloned()); } } } @@ -1125,9 +1123,9 @@ impl InlineAssistant { }; let codegen = assist.codegen.read(cx); - let old_snapshot = codegen.snapshot.clone(); - let old_buffer = codegen.old_buffer.clone(); - let deleted_row_ranges = codegen.diff.deleted_row_ranges.clone(); + let old_snapshot = codegen.snapshot(cx); + let old_buffer = codegen.old_buffer(cx); + let deleted_row_ranges = codegen.diff(cx).deleted_row_ranges.clone(); editor.update(cx, |editor, cx| { let old_blocks = mem::take(&mut decorations.removed_line_block_ids); @@ -1406,8 +1404,15 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let gutter_dimensions = *self.gutter_dimensions.lock(); - let status = &self.codegen.read(cx).status; - let buttons = match status { + let codegen = self.codegen.read(cx); + + let mut buttons = Vec::new(); + if codegen.alternative_count(cx) > 1 { + buttons.push(self.render_cycle_controls(cx)); + } + + let status = codegen.status(cx); + buttons.extend(match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -1416,14 +1421,16 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("start", IconName::SparkleAlt) .icon_color(Color::Muted) .shape(IconButtonShape::Square) .tooltip(|cx| Tooltip::for_action("Transform", &menu::Confirm, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StartRequested)), - ), + ) + .into_any_element(), ] } CodegenStatus::Pending => { @@ -1434,7 +1441,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::text("Cancel Assist", cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("stop", IconName::Stop) .icon_color(Color::Error) .shape(IconButtonShape::Square) @@ -1446,9 +1454,8 @@ impl Render for PromptEditor { cx, ) }) - .on_click( - cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested)), - ), + .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested))) + .into_any_element(), ] } CodegenStatus::Error(_) | CodegenStatus::Done => { @@ -1459,7 +1466,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), if self.edited_since_done || matches!(status, CodegenStatus::Error(_)) { IconButton::new("restart", IconName::RotateCw) .icon_color(Color::Info) @@ -1475,6 +1483,7 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::StartRequested); })) + .into_any_element() } else { IconButton::new("confirm", IconName::Check) .icon_color(Color::Info) @@ -1483,12 +1492,14 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::ConfirmRequested); })) + .into_any_element() }, ] } - }; + }); h_flex() + .key_context("PromptEditor") .bg(cx.theme().colors().editor_background) .border_y_1() .border_color(cx.theme().status().info_border) @@ -1498,6 +1509,8 @@ impl Render for PromptEditor { .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) .on_action(cx.listener(Self::move_down)) + .capture_action(cx.listener(Self::cycle_prev)) + .capture_action(cx.listener(Self::cycle_next)) .child( h_flex() .w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0)) @@ -1532,7 +1545,7 @@ impl Render for PromptEditor { ), ) .map(|el| { - let CodegenStatus::Error(error) = &self.codegen.read(cx).status else { + let CodegenStatus::Error(error) = self.codegen.read(cx).status(cx) else { return el; }; @@ -1776,7 +1789,7 @@ impl PromptEditor { } fn handle_codegen_changed(&mut self, _: Model, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { self.editor .update(cx, |editor, _| editor.set_read_only(false)); @@ -1807,7 +1820,7 @@ impl PromptEditor { } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle | CodegenStatus::Done | CodegenStatus::Error(_) => { cx.emit(PromptEditorEvent::CancelRequested); } @@ -1818,7 +1831,7 @@ impl PromptEditor { } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { cx.emit(PromptEditorEvent::StartRequested); } @@ -1878,6 +1891,79 @@ impl PromptEditor { } } + fn cycle_prev(&mut self, _: &CyclePreviousInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)); + } + + fn cycle_next(&mut self, _: &CycleNextInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)); + } + + fn render_cycle_controls(&self, cx: &ViewContext) -> AnyElement { + let codegen = self.codegen.read(cx); + let disabled = matches!(codegen.status(cx), CodegenStatus::Idle); + + h_flex() + .child( + IconButton::new("previous", IconName::ChevronLeft) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Alternative", + &CyclePreviousInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)) + })), + ) + .child( + Label::new(format!( + "{}/{}", + codegen.active_alternative + 1, + codegen.alternative_count(cx) + )) + .size(LabelSize::Small) + .color(if disabled { + Color::Disabled + } else { + Color::Muted + }), + ) + .child( + IconButton::new("next", IconName::ChevronRight) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Alternative", + &CycleNextInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)) + })), + ) + .into_any_element() + } + fn render_token_count(&self, cx: &mut ViewContext) -> Option { let model = LanguageModelRegistry::read_global(cx).active_model()?; let token_counts = self.token_counts?; @@ -2124,7 +2210,7 @@ impl InlineAssist { return; }; - if let CodegenStatus::Error(error) = &codegen.read(cx).status { + if let CodegenStatus::Error(error) = codegen.read(cx).status(cx) { if assist.decorations.is_none() { if let Some(workspace) = assist .workspace @@ -2185,12 +2271,9 @@ impl InlineAssist { return future::ready(Err(anyhow!("no user prompt"))).boxed(); }; let assistant_panel_context = self.assistant_panel_context(cx); - self.codegen.read(cx).count_tokens( - self.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + self.codegen + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) } } @@ -2201,19 +2284,216 @@ struct InlineAssistDecorations { end_block_id: CustomBlockId, } -#[derive(Debug)] +#[derive(Copy, Clone, Debug)] pub enum CodegenEvent { Finished, Undone, } pub struct Codegen { + alternatives: Vec>, + active_alternative: usize, + subscriptions: Vec, + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, +} + +impl Codegen { + pub fn new( + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, + cx: &mut ModelContext, + ) -> Self { + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + telemetry.clone(), + builder.clone(), + cx, + ) + }); + let mut this = Self { + alternatives: vec![codegen], + active_alternative: 0, + subscriptions: Vec::new(), + buffer, + range, + initial_transaction_id, + telemetry, + builder, + }; + this.activate(0, cx); + this + } + + fn subscribe_to_alternative(&mut self, cx: &mut ModelContext) { + let codegen = self.active_alternative().clone(); + self.subscriptions.clear(); + self.subscriptions + .push(cx.observe(&codegen, |_, _, cx| cx.notify())); + self.subscriptions + .push(cx.subscribe(&codegen, |_, _, event, cx| cx.emit(*event))); + } + + fn active_alternative(&self) -> &Model { + &self.alternatives[self.active_alternative] + } + + fn status<'a>(&self, cx: &'a AppContext) -> &'a CodegenStatus { + &self.active_alternative().read(cx).status + } + + fn alternative_count(&self, cx: &AppContext) -> usize { + LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .len() + + 1 + } + + pub fn cycle_prev(&mut self, cx: &mut ModelContext) { + let next_active_ix = if self.active_alternative == 0 { + self.alternatives.len() - 1 + } else { + self.active_alternative - 1 + }; + self.activate(next_active_ix, cx); + } + + pub fn cycle_next(&mut self, cx: &mut ModelContext) { + let next_active_ix = (self.active_alternative + 1) % self.alternatives.len(); + self.activate(next_active_ix, cx); + } + + fn activate(&mut self, index: usize, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(false, cx)); + self.active_alternative = index; + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(true, cx)); + self.subscribe_to_alternative(cx); + cx.notify(); + } + + pub fn start( + &mut self, + user_prompt: String, + assistant_panel_context: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let alternative_models = LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .to_vec(); + + self.active_alternative() + .update(cx, |alternative, cx| alternative.undo(cx)); + self.activate(0, cx); + self.alternatives.truncate(1); + + for _ in 0..alternative_models.len() { + self.alternatives.push(cx.new_model(|cx| { + CodegenAlternative::new( + self.buffer.clone(), + self.range.clone(), + false, + self.telemetry.clone(), + self.builder.clone(), + cx, + ) + })); + } + + let primary_model = LanguageModelRegistry::read_global(cx) + .active_model() + .context("no active model")?; + + for (model, alternative) in iter::once(primary_model) + .chain(alternative_models) + .zip(&self.alternatives) + { + alternative.update(cx, |alternative, cx| { + alternative.start( + user_prompt.clone(), + assistant_panel_context.clone(), + model.clone(), + cx, + ) + })?; + } + + Ok(()) + } + + pub fn stop(&mut self, cx: &mut ModelContext) { + for codegen in &self.alternatives { + codegen.update(cx, |codegen, cx| codegen.stop(cx)); + } + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.undo(cx)); + + self.buffer.update(cx, |buffer, cx| { + if let Some(transaction_id) = self.initial_transaction_id.take() { + buffer.undo_transaction(transaction_id, cx); + buffer.refresh_preview(cx); + } + }); + } + + pub fn count_tokens( + &self, + user_prompt: String, + assistant_panel_context: Option, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + self.active_alternative() + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) + } + + pub fn buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).buffer.clone() + } + + pub fn old_buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).old_buffer.clone() + } + + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { + self.active_alternative().read(cx).snapshot.clone() + } + + pub fn edit_position(&self, cx: &AppContext) -> Option { + self.active_alternative().read(cx).edit_position + } + + fn diff<'a>(&self, cx: &'a AppContext) -> &'a Diff { + &self.active_alternative().read(cx).diff + } + + pub fn last_equal_ranges<'a>(&self, cx: &'a AppContext) -> &'a [Range] { + self.active_alternative().read(cx).last_equal_ranges() + } +} + +impl EventEmitter for Codegen {} + +pub struct CodegenAlternative { buffer: Model, old_buffer: Model, snapshot: MultiBufferSnapshot, edit_position: Option, + range: Range, last_equal_ranges: Vec>, - initial_transaction_id: Option, transformation_transaction_id: Option, status: CodegenStatus, generation: Task<()>, @@ -2221,6 +2501,9 @@ pub struct Codegen { telemetry: Option>, _subscription: gpui::Subscription, builder: Arc, + active: bool, + edits: Vec<(Range, String)>, + line_operations: Vec, } enum CodegenStatus { @@ -2242,13 +2525,13 @@ impl Diff { } } -impl EventEmitter for Codegen {} +impl EventEmitter for CodegenAlternative {} -impl Codegen { +impl CodegenAlternative { pub fn new( buffer: Model, range: Range, - initial_transaction_id: Option, + active: bool, telemetry: Option>, builder: Arc, cx: &mut ModelContext, @@ -2287,8 +2570,33 @@ impl Codegen { diff: Diff::default(), telemetry, _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), - initial_transaction_id, builder, + active, + edits: Vec::new(), + line_operations: Vec::new(), + range, + } + } + + fn set_active(&mut self, active: bool, cx: &mut ModelContext) { + if active != self.active { + self.active = active; + + if self.active { + let edits = self.edits.clone(); + self.apply_edits(edits, cx); + if matches!(self.status, CodegenStatus::Pending) { + let line_operations = self.line_operations.clone(); + self.reapply_line_based_diff(line_operations, cx); + } else { + self.reapply_batch_diff(cx).detach(); + } + } else if let Some(transaction_id) = self.transformation_transaction_id.take() { + self.buffer.update(cx, |buffer, cx| { + buffer.undo_transaction(transaction_id, cx); + buffer.forget_transaction(transaction_id, cx); + }); + } } } @@ -2313,14 +2621,12 @@ impl Codegen { pub fn count_tokens( &self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, cx: &AppContext, ) -> BoxFuture<'static, Result> { if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { - let request = - self.build_request(user_prompt, assistant_panel_context.clone(), edit_range, cx); + let request = self.build_request(user_prompt, assistant_panel_context.clone(), cx); match request { Ok(request) => { let total_count = model.count_tokens(request.clone(), cx); @@ -2345,39 +2651,31 @@ impl Codegen { pub fn start( &mut self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, + model: Arc, cx: &mut ModelContext, ) -> Result<()> { - let model = LanguageModelRegistry::read_global(cx) - .active_model() - .context("no active model")?; - if let Some(transformation_transaction_id) = self.transformation_transaction_id.take() { self.buffer.update(cx, |buffer, cx| { buffer.undo_transaction(transformation_transaction_id, cx); }); } - self.edit_position = Some(edit_range.start.bias_right(&self.snapshot)); + self.edit_position = Some(self.range.start.bias_right(&self.snapshot)); let telemetry_id = model.telemetry_id(); - let chunks: LocalBoxFuture>>> = if user_prompt - .trim() - .to_lowercase() - == "delete" - { - async { Ok(stream::empty().boxed()) }.boxed_local() - } else { - let request = - self.build_request(user_prompt, assistant_panel_context, edit_range.clone(), cx)?; + let chunks: LocalBoxFuture>>> = + if user_prompt.trim().to_lowercase() == "delete" { + async { Ok(stream::empty().boxed()) }.boxed_local() + } else { + let request = self.build_request(user_prompt, assistant_panel_context, cx)?; - let chunks = - cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); - async move { Ok(chunks.await?.boxed()) }.boxed_local() - }; - self.handle_stream(telemetry_id, edit_range, chunks, cx); + let chunks = cx + .spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); + async move { Ok(chunks.await?.boxed()) }.boxed_local() + }; + self.handle_stream(telemetry_id, chunks, cx); Ok(()) } @@ -2385,11 +2683,10 @@ impl Codegen { &self, user_prompt: String, assistant_panel_context: Option, - edit_range: Range, cx: &AppContext, ) -> Result { let buffer = self.buffer.read(cx).snapshot(cx); - let language = buffer.language_at(edit_range.start); + let language = buffer.language_at(self.range.start); let language_name = if let Some(language) = language.as_ref() { if Arc::ptr_eq(language, &language::PLAIN_TEXT) { None @@ -2401,8 +2698,8 @@ impl Codegen { }; let language_name = language_name.as_ref(); - let start = buffer.point_to_buffer_offset(edit_range.start); - let end = buffer.point_to_buffer_offset(edit_range.end); + let start = buffer.point_to_buffer_offset(self.range.start); + let end = buffer.point_to_buffer_offset(self.range.end); let (buffer, range) = if let Some((start, end)) = start.zip(end) { let (start_buffer, start_buffer_offset) = start; let (end_buffer, end_buffer_offset) = end; @@ -2442,16 +2739,15 @@ impl Codegen { pub fn handle_stream( &mut self, model_telemetry_id: String, - edit_range: Range, stream: impl 'static + Future>>>, cx: &mut ModelContext, ) { let snapshot = self.snapshot.clone(); let selected_text = snapshot - .text_for_range(edit_range.start..edit_range.end) + .text_for_range(self.range.start..self.range.end) .collect::(); - let selection_start = edit_range.start.to_point(&snapshot); + let selection_start = self.range.start.to_point(&snapshot); // Start with the indentation of the first line in the selection let mut suggested_line_indent = snapshot @@ -2462,7 +2758,7 @@ impl Codegen { // If the first line in the selection does not have indentation, check the following lines if suggested_line_indent.len == 0 && suggested_line_indent.kind == IndentKind::Space { - for row in selection_start.row..=edit_range.end.to_point(&snapshot).row { + for row in selection_start.row..=self.range.end.to_point(&snapshot).row { let line_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); // Prefer tabs if a line in the selection uses tabs as indentation if line_indent.kind == IndentKind::Tab { @@ -2475,7 +2771,7 @@ impl Codegen { let telemetry = self.telemetry.clone(); self.diff = Diff::default(); self.status = CodegenStatus::Pending; - let mut edit_start = edit_range.start.to_offset(&snapshot); + let mut edit_start = self.range.start.to_offset(&snapshot); self.generation = cx.spawn(|codegen, mut cx| { async move { let chunks = stream.await; @@ -2597,68 +2893,42 @@ impl Codegen { Ok(()) }); - while let Some((char_ops, line_diff)) = diff_rx.next().await { + while let Some((char_ops, line_ops)) = diff_rx.next().await { codegen.update(&mut cx, |codegen, cx| { codegen.last_equal_ranges.clear(); - let transaction = codegen.buffer.update(cx, |buffer, cx| { - // Avoid grouping assistant edits with user edits. - buffer.finalize_last_transaction(cx); + let edits = char_ops + .into_iter() + .filter_map(|operation| match operation { + CharOperation::Insert { text } => { + let edit_start = snapshot.anchor_after(edit_start); + Some((edit_start..edit_start, text)) + } + CharOperation::Delete { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + Some((edit_range, String::new())) + } + CharOperation::Keep { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + codegen.last_equal_ranges.push(edit_range); + None + } + }) + .collect::>(); - buffer.start_transaction(cx); - buffer.edit( - char_ops - .into_iter() - .filter_map(|operation| match operation { - CharOperation::Insert { text } => { - let edit_start = snapshot.anchor_after(edit_start); - Some((edit_start..edit_start, text)) - } - CharOperation::Delete { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - Some((edit_range, String::new())) - } - CharOperation::Keep { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - codegen.last_equal_ranges.push(edit_range); - None - } - }), - None, - cx, - ); - codegen.edit_position = Some(snapshot.anchor_after(edit_start)); - - buffer.end_transaction(cx) - }); - - if let Some(transaction) = transaction { - if let Some(first_transaction) = - codegen.transformation_transaction_id - { - // Group all assistant edits into the first transaction. - codegen.buffer.update(cx, |buffer, cx| { - buffer.merge_transactions( - transaction, - first_transaction, - cx, - ) - }); - } else { - codegen.transformation_transaction_id = Some(transaction); - codegen.buffer.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(cx) - }); - } + if codegen.active { + codegen.apply_edits(edits.iter().cloned(), cx); + codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx); } - - codegen.reapply_line_based_diff(edit_range.clone(), line_diff, cx); + codegen.edits.extend(edits); + codegen.line_operations = line_ops; + codegen.edit_position = Some(snapshot.anchor_after(edit_start)); cx.notify(); })?; @@ -2667,9 +2937,8 @@ impl Codegen { // Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer. // That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff. // It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`. - let batch_diff_task = codegen.update(&mut cx, |codegen, cx| { - codegen.reapply_batch_diff(edit_range.clone(), cx) - })?; + let batch_diff_task = + codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?; let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task); line_based_stream_diff?; @@ -2713,24 +2982,45 @@ impl Codegen { buffer.undo_transaction(transaction_id, cx); buffer.refresh_preview(cx); } - - if let Some(transaction_id) = self.initial_transaction_id.take() { - buffer.undo_transaction(transaction_id, cx); - buffer.refresh_preview(cx); - } }); } + fn apply_edits( + &mut self, + edits: impl IntoIterator, String)>, + cx: &mut ModelContext, + ) { + let transaction = self.buffer.update(cx, |buffer, cx| { + // Avoid grouping assistant edits with user edits. + buffer.finalize_last_transaction(cx); + buffer.start_transaction(cx); + buffer.edit(edits, None, cx); + buffer.end_transaction(cx) + }); + + if let Some(transaction) = transaction { + if let Some(first_transaction) = self.transformation_transaction_id { + // Group all assistant edits into the first transaction. + self.buffer.update(cx, |buffer, cx| { + buffer.merge_transactions(transaction, first_transaction, cx) + }); + } else { + self.transformation_transaction_id = Some(transaction); + self.buffer + .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); + } + } + } + fn reapply_line_based_diff( &mut self, - edit_range: Range, - line_operations: Vec, + line_operations: impl IntoIterator, cx: &mut ModelContext, ) { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); let mut old_row = old_range.start.row; let mut new_row = new_range.start.row; @@ -2781,15 +3071,11 @@ impl Codegen { } } - fn reapply_batch_diff( - &mut self, - edit_range: Range, - cx: &mut ModelContext, - ) -> Task<()> { + fn reapply_batch_diff(&mut self, cx: &mut ModelContext) -> Task<()> { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); cx.spawn(|codegen, mut cx| async move { let (deleted_row_ranges, inserted_row_ranges) = cx @@ -3073,10 +3359,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3087,7 +3373,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range, future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3145,10 +3430,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3159,7 +3444,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3220,10 +3504,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3234,7 +3518,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3294,10 +3577,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3308,7 +3591,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3338,6 +3620,78 @@ mod tests { ); } + #[gpui::test] + async fn test_inactive_codegen_alternative(cx: &mut TestAppContext) { + cx.update(LanguageModelRegistry::test); + cx.set_global(cx.update(SettingsStore::test)); + cx.update(language_settings::init); + + let text = indoc! {" + fn main() { + let x = 0; + } + "}; + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let range = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 14)) + }); + let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + None, + prompt_builder, + cx, + ) + }); + + let (chunks_tx, chunks_rx) = mpsc::unbounded(); + codegen.update(cx, |codegen, cx| { + codegen.handle_stream( + String::new(), + future::ready(Ok(chunks_rx.map(Ok).boxed())), + cx, + ) + }); + + chunks_tx + .unbounded_send("let mut x = 0;\nx += 1;".to_string()) + .unwrap(); + drop(chunks_tx); + cx.run_until_parked(); + + // The codegen is inactive, so the buffer doesn't get modified. + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + + // Activating the codegen applies the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(true, cx)); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + x += 1; + } + "} + ); + + // Deactivating the codegen undoes the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(false, cx)); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + } + #[gpui::test] async fn test_strip_invalid_spans_from_codeblock() { assert_chunks("Lorem ipsum dolor", "Lorem ipsum dolor").await; diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index b3c8ef5f57..e1ba1c5886 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -76,6 +76,7 @@ impl Global for GlobalLanguageModelRegistry {} pub struct LanguageModelRegistry { active_model: Option, providers: BTreeMap>, + inline_alternatives: Vec>, } pub struct ActiveModel { @@ -229,6 +230,37 @@ impl LanguageModelRegistry { pub fn active_model(&self) -> Option> { self.active_model.as_ref()?.model.clone() } + + /// Selects and sets the inline alternatives for language models based on + /// provider name and id. + pub fn select_inline_alternative_models( + &mut self, + alternatives: impl IntoIterator, + cx: &mut ModelContext, + ) { + let mut selected_alternatives = Vec::new(); + + for (provider_id, model_id) in alternatives { + if let Some(provider) = self.providers.get(&provider_id) { + if let Some(model) = provider + .provided_models(cx) + .iter() + .find(|m| m.id() == model_id) + { + selected_alternatives.push(model.clone()); + } + } + } + + self.inline_alternatives = selected_alternatives; + } + + /// The models to use for inline assists. Returns the union of the active + /// model and all inline alternatives. When there are multiple models, the + /// user will be able to cycle through results. + pub fn inline_alternative_models(&self) -> &[Arc] { + &self.inline_alternatives + } } #[cfg(test)] diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 29bd9a8068..c163dbc07a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1106,6 +1106,26 @@ impl MultiBuffer { } } + pub fn forget_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut ModelContext, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.forget_transaction(transaction_id); + }); + } else if let Some(transaction) = self.history.forget(transaction_id) { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(state) = self.buffers.borrow_mut().get_mut(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.forget_transaction(buffer_transaction_id); + }); + } + } + } + } + pub fn stream_excerpts_with_context_lines( &mut self, buffer: Model, diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index bcdf461e2c..17b52a27d8 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -20,6 +20,7 @@ To further customize providers, you can use `settings.json` to do that as follow - [Configuring endpoints](#custom-endpoint) - [Configuring timeouts](#provider-timeout) - [Configuring default model](#default-model) +- [Configuring alternative models for inline assists](#alternative-assists) ### Zed AI {#zed-ai} @@ -264,6 +265,31 @@ You can also manually edit the `default_model` object in your settings: } ``` +#### Configuring alternative models for inline assists {#alternative-assists} + +You can configure additional models that will be used to perform inline assists in parallel. When you do this, +the inline assist UI will surface controls to cycle between the alternatives generated by each model. The models +you specify here are always used in _addition_ to your default model. For example, the following configuration +will generate two outputs for every assist. One with Claude 3.5 Sonnet, and one with GPT-4o. + +```json +{ + "assistant": { + "default_model": { + "provider": "zed.dev", + "model": "claude-3-5-sonnet" + }, + "inline_alternatives": [ + { + "provider": "zed.dev", + "model": "gpt-4o" + } + ], + "version": "2" + } +} +``` + #### Common Panel Settings | key | type | default | description | From 8103ac12bfc596f0f32f041239e0e26f9c2ee4cc Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Fri, 20 Sep 2024 06:36:50 +0300 Subject: [PATCH 239/270] ssh-remoting: Tidy up the code a bit after #18094 (#18102) Release Notes: - N/A --- crates/client/src/client.rs | 2 +- crates/project/src/worktree_store.rs | 7 ++++--- crates/remote/src/ssh_session.rs | 2 +- crates/rpc/src/proto_client.rs | 6 +++--- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index a8387f7c5a..48bd646d8a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1622,7 +1622,7 @@ impl ProtoClient for Client { &self.handler_set } - fn goes_via_collab(&self) -> bool { + fn is_via_collab(&self) -> bool { true } } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 7fae8b9e1d..5c3b2a00a9 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -313,9 +313,10 @@ impl WorktreeStore { }) } + #[track_caller] pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { let worktree_id = worktree.read(cx).id(); - debug_assert!(!self.worktrees().any(|w| w.read(cx).id() == worktree_id)); + debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id)); let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible(); let handle = if push_strong_handle { @@ -487,7 +488,7 @@ impl WorktreeStore { }; // collab has bad concurrency guarantees, so we send requests in serial. - let update_project = if downstream_client.goes_via_collab() { + let update_project = if downstream_client.is_via_collab() { Some(downstream_client.request(update)) } else { downstream_client.send(update).log_err(); @@ -508,7 +509,7 @@ impl WorktreeStore { move |update| { let client = client.clone(); async move { - if client.goes_via_collab() { + if client.is_via_collab() { client.request(update).map(|result| result.is_ok()).await } else { client.send(update).is_ok() diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 10608b74f3..2bd18aa37e 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -471,7 +471,7 @@ impl ProtoClient for SshSession { &self.state } - fn goes_via_collab(&self) -> bool { + fn is_via_collab(&self) -> bool { false } } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 89ef580cdf..8809910276 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -28,7 +28,7 @@ pub trait ProtoClient: Send + Sync { fn message_handler_set(&self) -> &parking_lot::Mutex; - fn goes_via_collab(&self) -> bool; + fn is_via_collab(&self) -> bool; } #[derive(Default)] @@ -141,8 +141,8 @@ impl AnyProtoClient { Self(client) } - pub fn goes_via_collab(&self) -> bool { - self.0.goes_via_collab() + pub fn is_via_collab(&self) -> bool { + self.0.is_via_collab() } pub fn request( From 579267f399816ae9e54b79c92949384a0ac8455a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 08:04:26 +0200 Subject: [PATCH 240/270] docs: Update JavaScript docs and remove TBDs (#17989) Release Notes: - N/A --- docs/src/languages/javascript.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index 8fb84881ad..7e74cbbfae 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -26,17 +26,15 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t } ``` - +Zed supports JSDoc syntax in JavaScript and TypeScript comments that match the JSDoc syntax. Zed uses [tree-sitter/tree-sitter-jsdoc](https://github.com/tree-sitter/tree-sitter-jsdoc) for parsing and highlighting JSDoc. ## ESLint From 93730983dd31bad1855edd3d5943a617f83f2b40 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 08:04:49 +0200 Subject: [PATCH 241/270] ssh remoting: Restore items/buffers when opening SSH project (#18083) Demo: https://github.com/user-attachments/assets/ab79ed0d-13a6-4ae7-8e76-6365fc322ec4 Release Notes: - N/A Co-authored-by: Bennet --- crates/editor/src/items.rs | 8 ++++++-- crates/workspace/src/workspace.rs | 27 +++++++++++++++++---------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index e3e8ca604b..3d04eb82d3 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1087,10 +1087,14 @@ impl SerializableItem for Editor { let workspace_id = workspace.database_id()?; let buffer = self.buffer().read(cx).as_singleton()?; + let path = buffer + .read(cx) + .file() + .map(|file| file.full_path(cx)) + .and_then(|full_path| project.read(cx).find_project_path(&full_path, cx)) + .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)); let is_dirty = buffer.read(cx).is_dirty(); - let local_file = buffer.read(cx).file().and_then(|file| file.as_local()); - let path = local_file.map(|file| file.abs_path(cx)); let mtime = buffer.read(cx).saved_mtime(); let snapshot = buffer.read(cx).snapshot(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 5855dcce1e..92a85299f4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1114,18 +1114,16 @@ impl Workspace { } // Get project paths for all of the abs_paths - let mut worktree_roots: HashSet> = Default::default(); let mut project_paths: Vec<(PathBuf, Option)> = Vec::with_capacity(paths_to_open.len()); for path in paths_to_open.into_iter() { - if let Some((worktree, project_entry)) = cx + if let Some((_, project_entry)) = cx .update(|cx| { Workspace::project_path_for_path(project_handle.clone(), &path, true, cx) })? .await .log_err() { - worktree_roots.extend(worktree.update(&mut cx, |tree, _| tree.abs_path()).ok()); project_paths.push((path, Some(project_entry))); } else { project_paths.push((path, None)); @@ -5532,12 +5530,13 @@ pub fn open_ssh_project( let serialized_workspace = persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); - let workspace_id = - if let Some(workspace_id) = serialized_workspace.map(|workspace| workspace.id) { - workspace_id - } else { - persistence::DB.next_id().await? - }; + let workspace_id = if let Some(workspace_id) = + serialized_workspace.as_ref().map(|workspace| workspace.id) + { + workspace_id + } else { + persistence::DB.next_id().await? + }; cx.update_window(window.into(), |_, cx| { cx.replace_root_view(|cx| { @@ -5548,7 +5547,15 @@ pub fn open_ssh_project( }); })?; - window.update(&mut cx, |_, cx| cx.activate_window()) + window + .update(&mut cx, |_, cx| { + cx.activate_window(); + + open_items(serialized_workspace, vec![], app_state, cx) + })? + .await?; + + Ok(()) }) } From ace4d5185dbd53023f8b583df781bc96f891b80a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 10:53:06 +0200 Subject: [PATCH 242/270] settings: Show notification when user/project settings fail to parse (#18122) Closes #16876 We only ever showed parsing errors, but not if something failed to deserialize. Basically, if you had a stray `,` somewhere, we'd show a notification for user errors, but only squiggly lines if you had a `[]` instead of a `{}`. The squiggly lines would only show up when there were schema errors. In the case of `formatter` settings, for example, if someone put in a `{}` instead of `[]`, we'd never show anything. With this change we always show a notification if parsing user or project settings fails. (Right now, the error message might still be bad, but that's a separate change) Release Notes: - Added a notification to warn users if their user settings or project-local settings failed to deserialize. Demo: https://github.com/user-attachments/assets/e5c48165-f2f7-4b5c-9c6d-6ea74f678683 --- crates/language/src/language_settings.rs | 7 ++ crates/project/src/project.rs | 26 ++++++- crates/project/src/project_settings.rs | 45 ++++++++++-- crates/settings/src/settings.rs | 4 +- crates/settings/src/settings_store.rs | 93 ++++++++++++++++-------- crates/workspace/src/workspace.rs | 19 ++++- crates/zed/src/main.rs | 28 ++++--- 7 files changed, 172 insertions(+), 50 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 77c9a1d18c..6121cb6a39 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1152,6 +1152,13 @@ mod tests { ); } + #[test] + fn test_formatter_deserialization_invalid() { + let raw_auto = "{\"formatter\": {}}"; + let result: Result = serde_json::from_str(raw_auto); + assert!(result.is_err()); + } + #[test] pub fn test_resolve_language_servers() { fn language_server_names(names: &[&str]) -> Vec { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fcf10d11c2..435c143024 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -59,12 +59,14 @@ use node_runtime::NodeRuntime; use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; pub use prettier_store::PrettierStore; -use project_settings::{ProjectSettings, SettingsObserver}; +use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; use remote::SshSession; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; use search_history::SearchHistory; -use settings::{watch_config_file, Settings, SettingsLocation, SettingsStore}; +use settings::{ + watch_config_file, InvalidSettingsError, Settings, SettingsLocation, SettingsStore, +}; use smol::channel::Receiver; use snippet::Snippet; use snippet_provider::SnippetProvider; @@ -230,6 +232,7 @@ pub enum Event { LanguageServerRemoved(LanguageServerId), LanguageServerLog(LanguageServerId, LanguageServerLogType, String), Notification(String), + LocalSettingsUpdated(Result<(), InvalidSettingsError>), LanguageServerPrompt(LanguageServerPromptRequest), LanguageNotFound(Model), ActiveEntryChanged(Option), @@ -644,6 +647,8 @@ impl Project { let settings_observer = cx.new_model(|cx| { SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx) }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let environment = ProjectEnvironment::new(&worktree_store, env, cx); let lsp_store = cx.new_model(|cx| { @@ -729,6 +734,8 @@ impl Project { let settings_observer = cx.new_model(|cx| { SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let environment = ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { @@ -913,6 +920,8 @@ impl Project { cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let mut this = Self { buffer_ordered_messages_tx: tx, @@ -2058,6 +2067,19 @@ impl Project { } } + fn on_settings_observer_event( + &mut self, + _: Model, + event: &SettingsObserverEvent, + cx: &mut ModelContext, + ) { + match event { + SettingsObserverEvent::LocalSettingsUpdated(error) => { + cx.emit(Event::LocalSettingsUpdated(error.clone())) + } + } + } + fn on_worktree_store_event( &mut self, _: Model, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 2eeb840896..9a7c80703c 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,11 +1,11 @@ use collections::HashMap; use fs::Fs; -use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Model, ModelContext}; +use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore}; +use settings::{InvalidSettingsError, Settings, SettingsSources, SettingsStore}; use std::{ path::{Path, PathBuf}, sync::Arc, @@ -176,6 +176,13 @@ pub enum SettingsObserverMode { Remote, } +#[derive(Clone, Debug, PartialEq)] +pub enum SettingsObserverEvent { + LocalSettingsUpdated(Result<(), InvalidSettingsError>), +} + +impl EventEmitter for SettingsObserver {} + pub struct SettingsObserver { mode: SettingsObserverMode, downstream_client: Option, @@ -415,11 +422,16 @@ impl SettingsObserver { ) { let worktree_id = worktree.read(cx).id(); let remote_worktree_id = worktree.read(cx).id(); - cx.update_global::(|store, cx| { + + let result = cx.update_global::>(|store, cx| { for (directory, file_content) in settings_contents { - store - .set_local_settings(worktree_id, directory.clone(), file_content.as_deref(), cx) - .log_err(); + store.set_local_settings( + worktree_id, + directory.clone(), + file_content.as_deref(), + cx, + )?; + if let Some(downstream_client) = &self.downstream_client { downstream_client .send(proto::UpdateWorktreeSettings { @@ -431,6 +443,25 @@ impl SettingsObserver { .log_err(); } } - }) + anyhow::Ok(()) + }); + + match result { + Err(error) => { + if let Ok(error) = error.downcast::() { + if let InvalidSettingsError::LocalSettings { + ref path, + ref message, + } = error + { + log::error!("Failed to set local settings in {:?}: {:?}", path, message); + cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Err(error))); + } + } + } + Ok(()) => { + cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Ok(()))); + } + } } } diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 5ece3f867e..f1f8591bba 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -13,7 +13,9 @@ pub use editable_setting_control::*; pub use json_schema::*; pub use keymap_file::KeymapFile; pub use settings_file::*; -pub use settings_store::{Settings, SettingsLocation, SettingsSources, SettingsStore}; +pub use settings_store::{ + InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 3ef8bffe2d..20bf52f2c5 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -3,6 +3,7 @@ use collections::{btree_map, hash_map, BTreeMap, HashMap}; use fs::Fs; use futures::{channel::mpsc, future::LocalBoxFuture, FutureExt, StreamExt}; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Global, Task, UpdateGlobal}; +use paths::local_settings_file_relative_path; use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema}; use serde::{de::DeserializeOwned, Deserialize as _, Serialize}; use smallvec::SmallVec; @@ -10,7 +11,7 @@ use std::{ any::{type_name, Any, TypeId}, fmt::Debug, ops::Range, - path::Path, + path::{Path, PathBuf}, str, sync::{Arc, LazyLock}, }; @@ -694,9 +695,14 @@ impl SettingsStore { .deserialize_setting(&self.raw_extension_settings) .log_err(); - let user_settings = setting_value - .deserialize_setting(&self.raw_user_settings) - .log_err(); + let user_settings = match setting_value.deserialize_setting(&self.raw_user_settings) { + Ok(settings) => Some(settings), + Err(error) => { + return Err(anyhow!(InvalidSettingsError::UserSettings { + message: error.to_string() + })); + } + }; let mut release_channel_settings = None; if let Some(release_settings) = &self @@ -746,34 +752,43 @@ impl SettingsStore { break; } - if let Some(local_settings) = - setting_value.deserialize_setting(local_settings).log_err() - { - paths_stack.push(Some((*root_id, path.as_ref()))); - project_settings_stack.push(local_settings); + match setting_value.deserialize_setting(local_settings) { + Ok(local_settings) => { + paths_stack.push(Some((*root_id, path.as_ref()))); + project_settings_stack.push(local_settings); - // If a local settings file changed, then avoid recomputing local - // settings for any path outside of that directory. - if changed_local_path.map_or(false, |(changed_root_id, changed_local_path)| { - *root_id != changed_root_id || !path.starts_with(changed_local_path) - }) { - continue; - } - - if let Some(value) = setting_value - .load_setting( - SettingsSources { - default: &default_settings, - extensions: extension_settings.as_ref(), - user: user_settings.as_ref(), - release_channel: release_channel_settings.as_ref(), - project: &project_settings_stack.iter().collect::>(), + // If a local settings file changed, then avoid recomputing local + // settings for any path outside of that directory. + if changed_local_path.map_or( + false, + |(changed_root_id, changed_local_path)| { + *root_id != changed_root_id || !path.starts_with(changed_local_path) }, - cx, - ) - .log_err() - { - setting_value.set_local_value(*root_id, path.clone(), value); + ) { + continue; + } + + if let Some(value) = setting_value + .load_setting( + SettingsSources { + default: &default_settings, + extensions: extension_settings.as_ref(), + user: user_settings.as_ref(), + release_channel: release_channel_settings.as_ref(), + project: &project_settings_stack.iter().collect::>(), + }, + cx, + ) + .log_err() + { + setting_value.set_local_value(*root_id, path.clone(), value); + } + } + Err(error) => { + return Err(anyhow!(InvalidSettingsError::LocalSettings { + path: path.join(local_settings_file_relative_path()), + message: error.to_string() + })); } } } @@ -782,6 +797,24 @@ impl SettingsStore { } } +#[derive(Debug, Clone, PartialEq)] +pub enum InvalidSettingsError { + LocalSettings { path: PathBuf, message: String }, + UserSettings { message: String }, +} + +impl std::fmt::Display for InvalidSettingsError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + InvalidSettingsError::LocalSettings { message, .. } + | InvalidSettingsError::UserSettings { message } => { + write!(f, "{}", message) + } + } + } +} +impl std::error::Error for InvalidSettingsError {} + impl Debug for SettingsStore { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("SettingsStore") diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 92a85299f4..1fbeab38a2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -64,7 +64,7 @@ use project::{ use remote::{SshConnectionOptions, SshSession}; use serde::Deserialize; use session::AppSession; -use settings::Settings; +use settings::{InvalidSettingsError, Settings}; use shared_screen::SharedScreen; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -832,6 +832,23 @@ impl Workspace { } } + project::Event::LocalSettingsUpdated(result) => { + struct LocalSettingsUpdated; + let id = NotificationId::unique::(); + + match result { + Err(InvalidSettingsError::LocalSettings { message, path }) => { + let full_message = + format!("Failed to set local settings in {:?}:\n{}", path, message); + this.show_notification(id, cx, |cx| { + cx.new_view(|_| MessageNotification::new(full_message.clone())) + }) + } + Err(_) => {} + Ok(_) => this.dismiss_notification(&id, cx), + } + } + project::Event::Notification(message) => { struct ProjectNotification; diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3104001f99..6ecdbb224f 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -34,7 +34,9 @@ use parking_lot::Mutex; use recent_projects::open_ssh_project; use release_channel::{AppCommitSha, AppVersion}; use session::{AppSession, Session}; -use settings::{handle_settings_file_changes, watch_config_file, Settings, SettingsStore}; +use settings::{ + handle_settings_file_changes, watch_config_file, InvalidSettingsError, Settings, SettingsStore, +}; use simplelog::ConfigBuilder; use smol::process::Command; use std::{ @@ -626,20 +628,28 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { for workspace in workspace::local_workspace_windows(cx) { workspace - .update(cx, |workspace, cx| match &error { - Some(error) => { - workspace.show_notification(id.clone(), cx, |cx| { - cx.new_view(|_| { - MessageNotification::new(format!("Invalid settings file\n{error}")) + .update(cx, |workspace, cx| { + match error + .as_ref() + .and_then(|error| error.downcast_ref::()) + { + Some(InvalidSettingsError::UserSettings { message }) => { + workspace.show_notification(id.clone(), cx, |cx| { + cx.new_view(|_| { + MessageNotification::new(format!( + "Invalid user settings file\n{message}" + )) .with_click_message("Open settings file") .on_click(|cx| { cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); cx.emit(DismissEvent); }) - }) - }); + }) + }); + } + None => workspace.dismiss_notification(&id, cx), + _ => {} } - None => workspace.dismiss_notification(&id, cx), }) .log_err(); } From 97708fdf43bbd15e3b978412d6682502df2f0d70 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 11:10:19 +0200 Subject: [PATCH 243/270] settings: Follow-up fix to show more errors (#18123) The condition added in #18122 was too strict. Release Notes: - N/A --- crates/zed/src/main.rs | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 6ecdbb224f..d3eb97c9aa 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -629,26 +629,28 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { for workspace in workspace::local_workspace_windows(cx) { workspace .update(cx, |workspace, cx| { - match error - .as_ref() - .and_then(|error| error.downcast_ref::()) - { - Some(InvalidSettingsError::UserSettings { message }) => { - workspace.show_notification(id.clone(), cx, |cx| { - cx.new_view(|_| { - MessageNotification::new(format!( - "Invalid user settings file\n{message}" - )) - .with_click_message("Open settings file") - .on_click(|cx| { - cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); - cx.emit(DismissEvent); + match error.as_ref() { + Some(error) => { + if let Some(InvalidSettingsError::LocalSettings { .. }) = + error.downcast_ref::() + { + // Local settings will be displayed by the projects + } else { + workspace.show_notification(id.clone(), cx, |cx| { + cx.new_view(|_| { + MessageNotification::new(format!( + "Invalid user settings file\n{error}" + )) + .with_click_message("Open settings file") + .on_click(|cx| { + cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); + cx.emit(DismissEvent); + }) }) - }) - }); + }); + } } None => workspace.dismiss_notification(&id, cx), - _ => {} } }) .log_err(); From ca033e647507ab8b31bc3a4a249f501ccecb0f9c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 08:35:13 -0400 Subject: [PATCH 244/270] Revert "Update nightly tag every night (#17879)" (#18133) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR reverts #17879, as it wasn't working. When a GitHub Action pushes a tag, it does not trigger workflows for push events for that tag: > When you use the repository's `GITHUB_TOKEN` to perform tasks, events triggered by the `GITHUB_TOKEN`, with the exception of `workflow_dispatch` and `repository_dispatch`, will not create a new workflow run. This prevents you from accidentally creating recursive workflow runs. For example, if a workflow run pushes code using the repository's `GITHUB_TOKEN`, a new workflow will not run even when the repository contains a workflow configured to run when `push` events occur. > > — [source](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) This reverts commit 761129e3739efacb7b8763eaa0fa8a109e935447. Release Notes: - N/A --- .github/workflows/bump_nightly_tag.yml | 23 ----------------------- .github/workflows/release_nightly.yml | 3 +++ 2 files changed, 3 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/bump_nightly_tag.yml diff --git a/.github/workflows/bump_nightly_tag.yml b/.github/workflows/bump_nightly_tag.yml deleted file mode 100644 index 0959ae9677..0000000000 --- a/.github/workflows/bump_nightly_tag.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Update Nightly Tag - -on: - schedule: - # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) - - cron: "0 7 * * *" - -jobs: - update-nightly-tag: - if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - with: - fetch-depth: 0 - - - name: Update nightly tag - run: | - git config user.name github-actions - git config user.email github-actions@github.com - git tag -f nightly - git push origin nightly --force diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 17db66a264..bcaa60b775 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -1,6 +1,9 @@ name: Release Nightly on: + schedule: + # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) + - cron: "0 7 * * *" push: tags: - "nightly" From 90a12f55642410e38df65d7f8381d6ecb3d0c1c2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 14:35:45 +0200 Subject: [PATCH 245/270] ssh remoting: Do not double-register LspAdapters (#18132) This fixes the bug with hover tooltips appearing multiple times. Turns out everytime we receive the `CreateLanguageServer` message we'd add a new adapter but only have a single server running for all of them. And we send a `CreateLanguageServer` message everytime you open a buffer. What this does is to only add a new adapter if it hasn't already been registered, which is also what we do locally. Release Notes: - N/A --- crates/language/src/language_registry.rs | 34 ++++++++++++++++-- crates/project/src/lsp_store.rs | 44 ++++++++++++------------ 2 files changed, 54 insertions(+), 24 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 17ebef50e8..e264517d5b 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -326,13 +326,43 @@ impl LanguageRegistry { Some(load_lsp_adapter()) } - pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc) { + pub fn register_lsp_adapter( + &self, + language_name: LanguageName, + adapter: Arc, + ) -> Arc { + let cached = CachedLspAdapter::new(adapter); self.state .write() .lsp_adapters .entry(language_name) .or_default() - .push(CachedLspAdapter::new(adapter)); + .push(cached.clone()); + cached + } + + pub fn get_or_register_lsp_adapter( + &self, + language_name: LanguageName, + server_name: LanguageServerName, + build_adapter: impl FnOnce() -> Arc + 'static, + ) -> Arc { + let registered = self + .state + .write() + .lsp_adapters + .entry(language_name.clone()) + .or_default() + .iter() + .find(|cached_adapter| cached_adapter.name == server_name) + .cloned(); + + if let Some(found) = registered { + found + } else { + let adapter = build_adapter(); + self.register_lsp_adapter(language_name, adapter) + } } /// Register a fake language server and adapter diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 5c32c9030d..92f37f87af 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4475,7 +4475,7 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let name = LanguageServerName::from_proto(envelope.payload.name); + let server_name = LanguageServerName::from_proto(envelope.payload.name); let binary = envelope .payload @@ -4494,6 +4494,14 @@ impl LspStore { let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; this.update(&mut cx, |this, cx| { + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return Err(anyhow!("worktree not found")); + }; + this.languages .register_language(language_name.clone(), None, matcher.clone(), { let language_name = language_name.clone(); @@ -4513,28 +4521,20 @@ impl LspStore { .spawn(this.languages.language_for_name(language_name.0.as_ref())) .detach(); - let adapter = Arc::new(SshLspAdapter::new( - name, - binary, - envelope.payload.initialization_options, - envelope.payload.code_action_kinds, - )); - - this.languages - .register_lsp_adapter(language_name.clone(), adapter.clone()); - let Some(worktree) = this - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - else { - return Err(anyhow!("worktree not found")); - }; - this.start_language_server( - &worktree, - CachedLspAdapter::new(adapter), - language_name, - cx, + let adapter = this.languages.get_or_register_lsp_adapter( + language_name.clone(), + server_name.clone(), + || { + Arc::new(SshLspAdapter::new( + server_name, + binary, + envelope.payload.initialization_options, + envelope.payload.code_action_kinds, + )) + }, ); + + this.start_language_server(&worktree, adapter, language_name, cx); Ok(()) })??; Ok(proto::Ack {}) From 16d2afc662ae43cd404279c068bb26557306b9c7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 08:46:23 -0400 Subject: [PATCH 246/270] ci: Bump `nightly` tag on scheduled Nightly builds (#18134) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR makes it so after a scheduled Nightly build we also update the `nightly` tag to keep things in sync. It's safe to bump the tag within this Action, as it won't trigger another Nightly build due to GitHub's recursive Action protections: > When you use the repository's `GITHUB_TOKEN` to perform tasks, events triggered by the `GITHUB_TOKEN`, with the exception of `workflow_dispatch` and `repository_dispatch`, will not create a new workflow run. This prevents you from accidentally creating recursive workflow runs. For example, if a workflow run pushes code using the repository's `GITHUB_TOKEN`, a new workflow will not run even when the repository contains a workflow configured to run when `push` events occur. > > — [source](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) Release Notes: - N/A --- .github/workflows/release_nightly.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index bcaa60b775..2b973dcddc 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -171,3 +171,28 @@ jobs: - name: Upload Zed Nightly run: script/upload-nightly linux-targz + + update-nightly-tag: + name: Update nightly tag + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + needs: + - bundle-mac + - bundle-linux-x86 + - bundle-linux-arm + steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + + - name: Update nightly tag + run: | + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force From d6c184b494a0c9a9a46d4ffdb5483ba65967ab0b Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 20 Sep 2024 09:23:11 -0400 Subject: [PATCH 247/270] Detect 'MD' extension as Markdown (#18135) --- crates/languages/src/markdown/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 6b518ec8b6..ce3b294b4e 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -1,6 +1,6 @@ name = "Markdown" grammar = "markdown" -path_suffixes = ["md", "mdx", "mdwn", "markdown"] +path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"] word_characters = ["-"] brackets = [ { start = "{", end = "}", close = true, newline = true }, From 5f1046b3cd5290112f6dd464e49bc58661fd2179 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 20 Sep 2024 10:28:22 -0400 Subject: [PATCH 248/270] Make evals handle failures more gracefully (#18082) Now when an individual project eval fails, instead of panicking we add it to a list of failures that we collect and report at the end (and make the exit code nonzero). Release Notes: - N/A --- crates/evals/src/eval.rs | 315 ++++++++++++++++++++++++--------------- 1 file changed, 195 insertions(+), 120 deletions(-) diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 708cfa7511..0580053373 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -12,13 +12,16 @@ use language::LanguageRegistry; use node_runtime::FakeNodeRuntime; use open_ai::OpenAiEmbeddingModel; use project::Project; -use semantic_index::{OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status}; +use semantic_index::{ + EmbeddingProvider, OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status, +}; use serde::{Deserialize, Serialize}; use settings::SettingsStore; use smol::channel::bounded; use smol::io::AsyncReadExt; use smol::Timer; use std::ops::RangeInclusive; +use std::path::PathBuf; use std::time::Duration; use std::{ fs, @@ -237,6 +240,14 @@ async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result Ok(()) } +#[derive(Default, Debug)] +struct Counts { + covered_results: usize, + overlapped_results: usize, + covered_files: usize, + total_results: usize, +} + async fn run_evaluation( only_repo: Option, executor: &BackgroundExecutor, @@ -297,12 +308,11 @@ async fn run_evaluation( cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx)) .unwrap(); - let mut covered_result_count = 0; - let mut overlapped_result_count = 0; - let mut covered_file_count = 0; - let mut total_result_count = 0; + let mut counts = Counts::default(); eprint!("Running evals."); + let mut failures = Vec::new(); + for evaluation_project in evaluations { if only_repo .as_ref() @@ -314,27 +324,24 @@ async fn run_evaluation( eprint!("\r\x1B[2K"); eprint!( "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...", - covered_result_count, - total_result_count, - overlapped_result_count, - total_result_count, - covered_file_count, - total_result_count, + counts.covered_results, + counts.total_results, + counts.overlapped_results, + counts.total_results, + counts.covered_files, + counts.total_results, evaluation_project.repo ); - let repo_db_path = - db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); - let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx) - .await - .unwrap(); - let repo_dir = repos_dir.join(&evaluation_project.repo); if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() { eprintln!("Skipping {}: directory not found", evaluation_project.repo); continue; } + let repo_db_path = + db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); + let project = cx .update(|cx| { Project::local( @@ -349,125 +356,193 @@ async fn run_evaluation( }) .unwrap(); - let (worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree(repo_dir, true, cx) - })? - .await?; + let repo = evaluation_project.repo.clone(); + if let Err(err) = run_eval_project( + evaluation_project, + &user_store, + repo_db_path, + &repo_dir, + &mut counts, + project, + embedding_provider.clone(), + fs.clone(), + cx, + ) + .await + { + eprintln!("{repo} eval failed with error: {:?}", err); - worktree - .update(cx, |worktree, _| { - worktree.as_local().unwrap().scan_complete() - }) - .unwrap() - .await; + failures.push((repo, err)); + } + } - let project_index = cx - .update(|cx| semantic_index.create_project_index(project.clone(), cx)) - .unwrap(); - wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + eprintln!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. {} failed.", + counts.covered_results, + counts.total_results, + counts.overlapped_results, + counts.total_results, + counts.covered_files, + counts.total_results, + failures.len(), + ); - for query in evaluation_project.queries { - let results = cx - .update(|cx| { + if failures.is_empty() { + Ok(()) + } else { + eprintln!("Failures:\n"); + + for (index, (repo, failure)) in failures.iter().enumerate() { + eprintln!("Failure #{} - {repo}\n{:?}", index + 1, failure); + } + + Err(anyhow::anyhow!("Some evals failed.")) + } +} + +#[allow(clippy::too_many_arguments)] +async fn run_eval_project( + evaluation_project: EvaluationProject, + user_store: &Model, + repo_db_path: PathBuf, + repo_dir: &Path, + counts: &mut Counts, + project: Model, + embedding_provider: Arc, + fs: Arc, + cx: &mut AsyncAppContext, +) -> Result<(), anyhow::Error> { + let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider, cx).await?; + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(repo_dir, true, cx) + })? + .await?; + + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + })? + .await; + + let project_index = cx.update(|cx| semantic_index.create_project_index(project.clone(), cx))?; + wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + + for query in evaluation_project.queries { + let results = { + // Retry search up to 3 times in case of timeout, network failure, etc. + let mut retries_remaining = 3; + let mut result; + + loop { + match cx.update(|cx| { let project_index = project_index.read(cx); project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) - }) - .unwrap() - .await - .unwrap(); - - let results = SemanticDb::load_results(results, &fs.clone(), &cx) - .await - .unwrap(); - - let mut project_covered_result_count = 0; - let mut project_overlapped_result_count = 0; - let mut project_covered_file_count = 0; - let mut covered_result_indices = Vec::new(); - for expected_result in &query.expected_results { - let mut file_matched = false; - let mut range_overlapped = false; - let mut range_covered = false; - - for (ix, result) in results.iter().enumerate() { - if result.path.as_ref() == Path::new(&expected_result.file) { - file_matched = true; - let start_matched = - result.row_range.contains(&expected_result.lines.start()); - let end_matched = result.row_range.contains(&expected_result.lines.end()); - - if start_matched || end_matched { - range_overlapped = true; - } - - if start_matched && end_matched { - range_covered = true; - covered_result_indices.push(ix); + }) { + Ok(task) => match task.await { + Ok(answer) => { + result = Ok(answer); break; } + Err(err) => { + result = Err(err); + } + }, + Err(err) => { + result = Err(err); } } - if range_covered { - project_covered_result_count += 1 - }; - if range_overlapped { - project_overlapped_result_count += 1 - }; - if file_matched { - project_covered_file_count += 1 - }; + if retries_remaining > 0 { + eprintln!( + "Retrying search after it failed on query {:?} with {:?}", + query, result + ); + retries_remaining -= 1; + } else { + eprintln!( + "Ran out of retries; giving up on search which failed on query {:?} with {:?}", + query, result + ); + break; + } } - let outcome_repo = evaluation_project.repo.clone(); - let query_results = EvaluationQueryOutcome { - repo: outcome_repo, - query: query.query, - total_result_count: query.expected_results.len(), - covered_result_count: project_covered_result_count, - overlapped_result_count: project_overlapped_result_count, - covered_file_count: project_covered_file_count, - expected_results: query.expected_results, - actual_results: results - .iter() - .map(|result| EvaluationSearchResult { - file: result.path.to_string_lossy().to_string(), - lines: result.row_range.clone(), - }) - .collect(), - covered_result_indices, + SemanticDb::load_results(result?, &fs.clone(), &cx).await? + }; + + let mut project_covered_result_count = 0; + let mut project_overlapped_result_count = 0; + let mut project_covered_file_count = 0; + let mut covered_result_indices = Vec::new(); + for expected_result in &query.expected_results { + let mut file_matched = false; + let mut range_overlapped = false; + let mut range_covered = false; + + for (ix, result) in results.iter().enumerate() { + if result.path.as_ref() == Path::new(&expected_result.file) { + file_matched = true; + let start_matched = result.row_range.contains(&expected_result.lines.start()); + let end_matched = result.row_range.contains(&expected_result.lines.end()); + + if start_matched || end_matched { + range_overlapped = true; + } + + if start_matched && end_matched { + range_covered = true; + covered_result_indices.push(ix); + break; + } + } + } + + if range_covered { + project_covered_result_count += 1 + }; + if range_overlapped { + project_overlapped_result_count += 1 + }; + if file_matched { + project_covered_file_count += 1 }; - - overlapped_result_count += query_results.overlapped_result_count; - covered_result_count += query_results.covered_result_count; - covered_file_count += query_results.covered_file_count; - total_result_count += query_results.total_result_count; - - println!("{}", serde_json::to_string(&query_results).unwrap()); } + let outcome_repo = evaluation_project.repo.clone(); - user_store - .update(cx, |_, _| { - drop(semantic_index); - drop(project); - drop(worktree); - drop(project_index); - }) - .unwrap(); + let query_results = EvaluationQueryOutcome { + repo: outcome_repo, + query: query.query, + total_result_count: query.expected_results.len(), + covered_result_count: project_covered_result_count, + overlapped_result_count: project_overlapped_result_count, + covered_file_count: project_covered_file_count, + expected_results: query.expected_results, + actual_results: results + .iter() + .map(|result| EvaluationSearchResult { + file: result.path.to_string_lossy().to_string(), + lines: result.row_range.clone(), + }) + .collect(), + covered_result_indices, + }; + + counts.overlapped_results += query_results.overlapped_result_count; + counts.covered_results += query_results.covered_result_count; + counts.covered_files += query_results.covered_file_count; + counts.total_results += query_results.total_result_count; + + println!("{}", serde_json::to_string(&query_results)?); } - eprint!( - "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.", - covered_result_count, - total_result_count, - overlapped_result_count, - total_result_count, - covered_file_count, - total_result_count, - ); - - Ok(()) + user_store.update(cx, |_, _| { + drop(semantic_index); + drop(project); + drop(worktree); + drop(project_index); + }) } async fn wait_for_indexing_complete( @@ -524,7 +599,7 @@ async fn fetch_eval_repos( let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); - eprint!("Fetching evaluation repositories..."); + eprintln!("Fetching evaluation repositories..."); executor .scoped(move |scope| { From ab1d466c5f46fbaf84615dc39f142cfe0c0880e3 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 10:48:27 -0400 Subject: [PATCH 249/270] Remove `replica_id` from `MultiBuffer`s (#18141) This PR removes the `replica_id` field from the `MultiBuffer` struct. We were only ever referencing this field to pass when constructing a `MultiBuffer`, and never used it outside of that. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 5 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/collab/src/tests/following_tests.rs | 2 +- .../src/copilot_completion_provider.rs | 4 +- crates/diagnostics/src/diagnostics.rs | 7 +-- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/editor.rs | 22 ++------ crates/editor/src/editor_tests.rs | 20 +++---- crates/editor/src/git.rs | 2 +- crates/editor/src/hunk_diff.rs | 2 +- crates/editor/src/inlay_hint_cache.rs | 4 +- crates/editor/src/items.rs | 3 +- crates/editor/src/movement.rs | 2 +- crates/editor/src/test/editor_test_context.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 56 ++++++++----------- crates/search/src/project_search.rs | 3 +- 16 files changed, 54 insertions(+), 84 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 364c6f9663..22237eeb07 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -2814,9 +2814,8 @@ impl ContextEditor { } else { // If there are multiple buffers or suggestion groups, create a multibuffer let multibuffer = cx.new_model(|cx| { - let replica_id = project.read(cx).replica_id(); - let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite) - .with_title(resolved_step.title.clone()); + let mut multibuffer = + MultiBuffer::new(Capability::ReadWrite).with_title(resolved_step.title.clone()); for (buffer, groups) in &resolved_step.suggestion_groups { let excerpt_ids = multibuffer.push_excerpts( buffer.clone(), diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 428b33f3bb..d95b54d3c6 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1145,7 +1145,7 @@ impl InlineAssistant { let deleted_lines_editor = cx.new_view(|cx| { let multi_buffer = cx.new_model(|_| { - MultiBuffer::without_headers(0, language::Capability::ReadOnly) + MultiBuffer::without_headers(language::Capability::ReadOnly) }); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e66b66a1b4..9a39d6f3eb 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -289,7 +289,7 @@ async fn test_basic_following( .get_open_buffer(&(worktree_id, "2.txt").into(), cx) .unwrap() }); - let mut result = MultiBuffer::new(0, Capability::ReadWrite); + let mut result = MultiBuffer::new(Capability::ReadWrite); result.push_excerpts( buffer_a1, [ExcerptRange { diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index c54fefad6f..3a3361cda1 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -767,7 +767,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -1018,7 +1018,7 @@ mod tests { .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( private_buffer.clone(), [ExcerptRange { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index eec4f735ec..6876388542 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -156,12 +156,7 @@ impl ProjectDiagnosticsEditor { cx.on_focus_out(&focus_handle, |this, _event, cx| this.focus_out(cx)) .detach(); - let excerpts = cx.new_model(|cx| { - MultiBuffer::new( - project_handle.read(cx).replica_id(), - project_handle.read(cx).capability(), - ) - }); + let excerpts = cx.new_model(|cx| MultiBuffer::new(project_handle.read(cx).capability())); let editor = cx.new_view(|cx| { let mut editor = Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), false, cx); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 3a298832de..efa026a56c 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1671,7 +1671,7 @@ mod tests { let mut excerpt_ids = Vec::new(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); excerpt_ids.extend(multi_buffer.push_excerpts( buffer1.clone(), [ExcerptRange { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f797f82832..eb2dafc24d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2155,10 +2155,6 @@ impl Editor { }); } - pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { - self.buffer.read(cx).replica_id() - } - pub fn leader_peer_id(&self) -> Option { self.leader_peer_id } @@ -4758,8 +4754,6 @@ impl Editor { title: String, mut cx: AsyncWindowContext, ) -> Result<()> { - let replica_id = this.update(&mut cx, |this, cx| this.replica_id(cx))?; - let mut entries = transaction.0.into_iter().collect::>(); cx.update(|cx| { entries.sort_unstable_by_key(|(buffer, _)| { @@ -4802,8 +4796,7 @@ impl Editor { let mut ranges_to_highlight = Vec::new(); let excerpt_buffer = cx.new_model(|cx| { - let mut multibuffer = - MultiBuffer::new(replica_id, Capability::ReadWrite).with_title(title); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite).with_title(title); for (buffer_handle, transaction) in &entries { let buffer = buffer_handle.read(cx); ranges_to_highlight.extend( @@ -9610,7 +9603,6 @@ impl Editor { }) }) } else if !definitions.is_empty() { - let replica_id = self.replica_id(cx); cx.spawn(|editor, mut cx| async move { let (title, location_tasks, workspace) = editor .update(&mut cx, |editor, cx| { @@ -9663,9 +9655,7 @@ impl Editor { }; let opened = workspace .update(&mut cx, |workspace, cx| { - Self::open_locations_in_multibuffer( - workspace, locations, replica_id, title, split, cx, - ) + Self::open_locations_in_multibuffer(workspace, locations, title, split, cx) }) .ok(); @@ -9762,7 +9752,6 @@ impl Editor { } let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?; - let replica_id = self.replica_id(cx); let workspace = self.workspace()?; let project = workspace.read(cx).project().clone(); let references = project.update(cx, |project, cx| project.references(&buffer, head, cx)); @@ -9803,9 +9792,7 @@ impl Editor { ) }) .unwrap(); - Self::open_locations_in_multibuffer( - workspace, locations, replica_id, title, false, cx, - ); + Self::open_locations_in_multibuffer(workspace, locations, title, false, cx); Navigated::Yes }) })) @@ -9815,7 +9802,6 @@ impl Editor { pub fn open_locations_in_multibuffer( workspace: &mut Workspace, mut locations: Vec, - replica_id: ReplicaId, title: String, split: bool, cx: &mut ViewContext, @@ -9827,7 +9813,7 @@ impl Editor { let capability = workspace.project().read(cx).capability(); let excerpt_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(replica_id, capability); + let mut multibuffer = MultiBuffer::new(capability); while let Some(location) = locations.next() { let buffer = location.buffer.read(cx); let mut ranges_for_buffer = Vec::new(); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index e11b38ba59..589673447d 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -2822,7 +2822,7 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) { Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx) }); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( toml_buffer.clone(), [ExcerptRange { @@ -6671,7 +6671,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { .unwrap(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, ReadWrite); + let mut multi_buffer = MultiBuffer::new(ReadWrite); multi_buffer.push_excerpts( buffer_1.clone(), [ @@ -8614,7 +8614,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer.clone(), [ @@ -8698,7 +8698,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { }); let buffer = cx.new_model(|cx| Buffer::local(initial_text, cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts(buffer, excerpt_ranges, cx); multibuffer }); @@ -8757,7 +8757,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let mut excerpt1_id = None; let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); excerpt1_id = multibuffer .push_excerpts( buffer.clone(), @@ -8842,7 +8842,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let mut excerpt1_id = None; let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); excerpt1_id = multibuffer .push_excerpts( buffer.clone(), @@ -9230,7 +9230,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) { let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); let leader = pane.update(cx, |_, cx| { - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(ReadWrite)); cx.new_view(|cx| build_editor(multibuffer.clone(), cx)) }); @@ -10685,7 +10685,7 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -10825,7 +10825,7 @@ async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) { let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx)); let multi_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -11764,7 +11764,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) }); let multi_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 665c649e6e..63b083faa8 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -195,7 +195,7 @@ mod tests { cx.background_executor.run_until_parked(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 5dc73634bd..361ea6246e 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -764,7 +764,7 @@ fn editor_with_deleted_text( let parent_editor = cx.view().downgrade(); let editor = cx.new_view(|cx| { let multi_buffer = - cx.new_model(|_| MultiBuffer::without_headers(0, language::Capability::ReadOnly)); + cx.new_model(|_| MultiBuffer::without_headers(language::Capability::ReadOnly)); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( diff_base_buffer, diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 24ccf64c4c..ca2db70a70 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -2607,7 +2607,7 @@ pub mod tests { .await .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -2957,7 +2957,7 @@ pub mod tests { }) .await .unwrap(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { let buffer_1_excerpts = multibuffer.push_excerpts( buffer_1.clone(), diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 3d04eb82d3..1d301f2ee6 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -68,7 +68,6 @@ impl FollowableItem for Editor { unreachable!() }; - let replica_id = project.read(cx).replica_id(); let buffer_ids = state .excerpts .iter() @@ -92,7 +91,7 @@ impl FollowableItem for Editor { if state.singleton && buffers.len() == 1 { multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx) } else { - multibuffer = MultiBuffer::new(replica_id, project.read(cx).capability()); + multibuffer = MultiBuffer::new(project.read(cx).capability()); let mut excerpts = state.excerpts.into_iter().peekable(); while let Some(excerpt) = excerpts.peek() { let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index a9f27d53a6..19e2a4ea95 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -928,7 +928,7 @@ mod tests { let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer.clone(), [ diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 6f8a495895..3e4ef174d4 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -75,7 +75,7 @@ impl EditorTestContext { cx: &mut gpui::TestAppContext, excerpts: [&str; COUNT], ) -> EditorTestContext { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); let buffer = cx.new_model(|cx| { for excerpt in excerpts.into_iter() { let (text, ranges) = marked_text_ranges(excerpt, false); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index c163dbc07a..f6a61f562a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -67,7 +67,6 @@ pub struct MultiBuffer { subscriptions: Topic, /// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`] singleton: bool, - replica_id: ReplicaId, history: History, title: Option, capability: Capability, @@ -350,7 +349,7 @@ impl std::ops::Deref for MultiBufferIndentGuide { } impl MultiBuffer { - pub fn new(replica_id: ReplicaId, capability: Capability) -> Self { + pub fn new(capability: Capability) -> Self { Self { snapshot: RefCell::new(MultiBufferSnapshot { show_headers: true, @@ -360,7 +359,6 @@ impl MultiBuffer { subscriptions: Topic::default(), singleton: false, capability, - replica_id, title: None, history: History { next_transaction_id: clock::Lamport::default(), @@ -372,14 +370,13 @@ impl MultiBuffer { } } - pub fn without_headers(replica_id: ReplicaId, capability: Capability) -> Self { + pub fn without_headers(capability: Capability) -> Self { Self { snapshot: Default::default(), buffers: Default::default(), subscriptions: Default::default(), singleton: false, capability, - replica_id, history: History { next_transaction_id: Default::default(), undo_stack: Default::default(), @@ -414,7 +411,6 @@ impl MultiBuffer { subscriptions: Default::default(), singleton: self.singleton, capability: self.capability, - replica_id: self.replica_id, history: self.history.clone(), title: self.title.clone(), } @@ -430,7 +426,7 @@ impl MultiBuffer { } pub fn singleton(buffer: Model, cx: &mut ModelContext) -> Self { - let mut this = Self::new(buffer.read(cx).replica_id(), buffer.read(cx).capability()); + let mut this = Self::new(buffer.read(cx).capability()); this.singleton = true; this.push_excerpts( buffer, @@ -444,10 +440,6 @@ impl MultiBuffer { this } - pub fn replica_id(&self) -> ReplicaId { - self.replica_id - } - /// Returns an up-to-date snapshot of the MultiBuffer. pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); @@ -2011,7 +2003,7 @@ impl MultiBuffer { excerpts: [(&str, Vec>); COUNT], cx: &mut gpui::AppContext, ) -> Model { - let multi = cx.new_model(|_| Self::new(0, Capability::ReadWrite)); + let multi = cx.new_model(|_| Self::new(Capability::ReadWrite)); for (text, ranges) in excerpts { let buffer = cx.new_model(|cx| Buffer::local(text, cx)); let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange { @@ -2032,7 +2024,7 @@ impl MultiBuffer { pub fn build_random(rng: &mut impl rand::Rng, cx: &mut gpui::AppContext) -> Model { cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); let mutation_count = rng.gen_range(1..=5); multibuffer.randomly_edit_excerpts(rng, mutation_count, cx); multibuffer @@ -5063,7 +5055,7 @@ mod tests { fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let events = Arc::new(RwLock::new(Vec::::new())); multibuffer.update(cx, |_, cx| { @@ -5306,8 +5298,8 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'm'), cx)); - let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); - let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let follower_edit_event_count = Arc::new(RwLock::new(0)); follower_multibuffer.update(cx, |_, cx| { @@ -5410,7 +5402,7 @@ mod tests { #[gpui::test] fn test_expand_excerpts(cx: &mut AppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts_with_context_lines( @@ -5486,7 +5478,7 @@ mod tests { #[gpui::test] fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts_with_context_lines( buffer.clone(), @@ -5539,7 +5531,7 @@ mod tests { #[gpui::test] async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { let snapshot = buffer.read(cx); let ranges = vec![ @@ -5589,7 +5581,7 @@ mod tests { #[gpui::test] fn test_empty_multibuffer(cx: &mut AppContext) { - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot.text(), ""); @@ -5628,7 +5620,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("efghi", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -5685,7 +5677,7 @@ mod tests { fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); // Create an insertion id in buffer 1 that doesn't exist in buffer 2. // Add an excerpt from buffer 1 that spans this new insertion. @@ -5819,7 +5811,7 @@ mod tests { .unwrap_or(10); let mut buffers: Vec> = Vec::new(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_ids = Vec::::new(); let mut expected_excerpts = Vec::<(Model, Range)>::new(); let mut anchors = Vec::new(); @@ -6283,7 +6275,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("1234", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("5678", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let group_interval = multibuffer.read(cx).history.group_interval; multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( @@ -6418,7 +6410,7 @@ mod tests { fn test_excerpts_in_ranges_no_ranges(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), @@ -6496,7 +6488,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut expected_excerpt_id = ExcerptId(0); multibuffer.update(cx, |multibuffer, cx| { @@ -6557,7 +6549,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6623,7 +6615,7 @@ mod tests { let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'r'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); let mut excerpt_3_id = ExcerptId(0); @@ -6698,7 +6690,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6764,7 +6756,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6829,7 +6821,7 @@ mod tests { fn test_split_ranges(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), @@ -6885,7 +6877,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'm'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index c43d4ed454..fac3c55bf4 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -176,12 +176,11 @@ pub struct ProjectSearchBar { impl ProjectSearch { pub fn new(project: Model, cx: &mut ModelContext) -> Self { - let replica_id = project.read(cx).replica_id(); let capability = project.read(cx).capability(); Self { project, - excerpts: cx.new_model(|_| MultiBuffer::new(replica_id, capability)), + excerpts: cx.new_model(|_| MultiBuffer::new(capability)), pending_search: Default::default(), match_ranges: Default::default(), active_query: None, From 759646e0a35a2c4586817b79028cb347e3749de4 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 11:45:03 -0400 Subject: [PATCH 250/270] editor: Improve rewrapping when working with comments at different indentation levels (#18146) This PR improves the `editor::Rewrap` command when working with comments that were not all at the same indentation level. We now use a heuristic of finding the most common indentation level for each line, using the deepest indent in the event of a tie. It also removes an `.unwrap()` that would previously lead to a panic in this case. Instead of unwrapping we now log an error to the logs and skip rewrapping for that selection. Release Notes: - Improved the behavior of `editor: rewrap` when working with a selection that contained comments at different indentation levels. --- crates/editor/src/editor.rs | 46 ++++++++++++++++--- crates/editor/src/editor_tests.rs | 74 +++++++++++++++++++++++++++++++ crates/language/src/buffer.rs | 4 +- 3 files changed, 116 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index eb2dafc24d..33eb51cb0e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6736,9 +6736,31 @@ impl Editor { } } - let row = selection.head().row; - let indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); - let indent_end = Point::new(row, indent_size.len); + // Since not all lines in the selection may be at the same indent + // level, choose the indent size that is the most common between all + // of the lines. + // + // If there is a tie, we use the deepest indent. + let (indent_size, indent_end) = { + let mut indent_size_occurrences = HashMap::default(); + let mut rows_by_indent_size = HashMap::>::default(); + + for row in start_row..=end_row { + let indent = buffer.indent_size_for_line(MultiBufferRow(row)); + rows_by_indent_size.entry(indent).or_default().push(row); + *indent_size_occurrences.entry(indent).or_insert(0) += 1; + } + + let indent_size = indent_size_occurrences + .into_iter() + .max_by_key(|(indent, count)| (*count, indent.len)) + .map(|(indent, _)| indent) + .unwrap_or_default(); + let row = rows_by_indent_size[&indent_size][0]; + let indent_end = Point::new(row, indent_size.len); + + (indent_size, indent_end) + }; let mut line_prefix = indent_size.chars().collect::(); @@ -6788,10 +6810,22 @@ impl Editor { let start = Point::new(start_row, 0); let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row))); let selection_text = buffer.text_for_range(start..end).collect::(); - let unwrapped_text = selection_text + let Some(lines_without_prefixes) = selection_text .lines() - .map(|line| line.strip_prefix(&line_prefix).unwrap()) - .join(" "); + .map(|line| { + line.strip_prefix(&line_prefix) + .or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start())) + .ok_or_else(|| { + anyhow!("line did not start with prefix {line_prefix:?}: {line:?}") + }) + }) + .collect::, _>>() + .log_err() + else { + continue; + }; + + let unwrapped_text = lines_without_prefixes.join(" "); let wrap_column = buffer .settings_at(Point::new(start_row, 0), cx) .preferred_line_length as usize; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 589673447d..85684db818 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4249,6 +4249,80 @@ async fn test_rewrap(cx: &mut TestAppContext) { cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); cx.assert_editor_state(wrapped_text); } + + // Test rewrapping unaligned comments in a selection. + { + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into(), "/// ".into()], + ..LanguageConfig::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + let unwrapped_text = indoc! {" + fn foo() { + if true { + « // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. + // Praesent semper egestas tellus id dignissim.ˇ» + do_something(); + } else { + // + } + + } + "}; + + let wrapped_text = indoc! {" + fn foo() { + if true { + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus + // mollis elit purus, a ornare lacus gravida vitae. Praesent semper + // egestas tellus id dignissim.ˇ + do_something(); + } else { + // + } + + } + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + + let unwrapped_text = indoc! {" + fn foo() { + if true { + «ˇ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. + // Praesent semper egestas tellus id dignissim.» + do_something(); + } else { + // + } + + } + "}; + + let wrapped_text = indoc! {" + fn foo() { + if true { + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus + // mollis elit purus, a ornare lacus gravida vitae. Praesent semper + // egestas tellus id dignissim.ˇ + do_something(); + } else { + // + } + + } + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + } } #[gpui::test] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 08fc1ccdb4..acb57273e3 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -144,7 +144,7 @@ pub struct BufferSnapshot { /// The kind and amount of indentation in a particular line. For now, /// assumes that indentation is all the same character. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub struct IndentSize { /// The number of bytes that comprise the indentation. pub len: u32, @@ -153,7 +153,7 @@ pub struct IndentSize { } /// A whitespace character that's used for indentation. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub enum IndentKind { /// An ASCII space character. #[default] From f8195c41e0019b77a56a2eb96c346b601a6c8b89 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 20 Sep 2024 11:52:57 -0400 Subject: [PATCH 251/270] docs: Switch proxy example to socks5h not socks5 (#18142) Very rarely when you have a SOCKS proxy configured do you want local DNS. `socks5` does local DNS. `socks5h` does remote DNS. --- assets/settings/default.json | 2 +- docs/src/configuring-zed.md | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index a9e1865258..537ad12082 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1029,7 +1029,7 @@ // environment variables. // // Examples: - // - "proxy": "socks5://localhost:10808" + // - "proxy": "socks5h://localhost:10808" // - "proxy": "http://127.0.0.1:10809" "proxy": null, // Set to configure aliases for the command palette. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 1befa7d93a..de7433bf5d 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1127,10 +1127,10 @@ The following URI schemes are supported: - `http` - `https` -- `socks4` -- `socks4a` -- `socks5` -- `socks5h` +- `socks4` - SOCKS4 proxy with local DNS +- `socks4a` - SOCKS4 proxy with remote DNS +- `socks5` - SOCKS5 proxy with local DNS +- `socks5h` - SOCKS5 proxy with remote DNS `http` will be used when no scheme is specified. @@ -1148,7 +1148,7 @@ Or to set a `socks5` proxy: ```json { - "proxy": "socks5://localhost:10808" + "proxy": "socks5h://localhost:10808" } ``` From 99bef273009a62b416300daa22b9a14910b5ca91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Sat, 21 Sep 2024 00:20:14 +0800 Subject: [PATCH 252/270] Add escape string highlights to JSON and JSONC files (#18138) Release Notes: - Added escape string highlights to JSON and JSONC files --- crates/languages/src/json/highlights.scm | 1 + crates/languages/src/jsonc/highlights.scm | 1 + 2 files changed, 2 insertions(+) diff --git a/crates/languages/src/json/highlights.scm b/crates/languages/src/json/highlights.scm index 7116805109..8cf7a6d20d 100644 --- a/crates/languages/src/json/highlights.scm +++ b/crates/languages/src/json/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string +(escape_sequence) @string.escape (pair key: (string) @property.json_key) diff --git a/crates/languages/src/jsonc/highlights.scm b/crates/languages/src/jsonc/highlights.scm index 7116805109..8cf7a6d20d 100644 --- a/crates/languages/src/jsonc/highlights.scm +++ b/crates/languages/src/jsonc/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string +(escape_sequence) @string.escape (pair key: (string) @property.json_key) From d97427f69eb46b62b4decac7ee88f5890a8a575c Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:48:48 -0400 Subject: [PATCH 253/270] chore: Update flake inputs (#18150) Release Notes: - N/A --- flake.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/flake.lock b/flake.lock index 2b421a9efb..a5b7a7a6ae 100644 --- a/flake.lock +++ b/flake.lock @@ -23,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1726554553, - "narHash": "sha256-xakDhIS1c1VgJc/NMOLj05yBsTdlXKMEYz6wC8Hdshc=", + "lastModified": 1726813972, + "narHash": "sha256-t6turZgoSAVgj7hn5mxzNlLOeVeZvymFo8+ymB52q34=", "owner": "nix-community", "repo": "fenix", - "rev": "1f59d7585aa06d2c327960d397bea4067d8fee98", + "rev": "251caeafc75b710282ee7e375800f75f4c8c5727", "type": "github" }, "original": { @@ -53,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726463316, - "narHash": "sha256-gI9kkaH0ZjakJOKrdjaI/VbaMEo9qBbSUl93DnU7f4c=", + "lastModified": 1726642912, + "narHash": "sha256-wiZzKGHRAhItEuoE599Wm3ic+Lg/NykuBvhb+awf7N8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172", + "rev": "395c52d142ec1df377acd67db6d4a22950b02a98", "type": "github" }, "original": { From 9f6ff29a54aeeb1fac22e3d5315d47705d47cb31 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:57:35 -0400 Subject: [PATCH 254/270] Reuse OpenAI low_speed_timeout setting for zed.dev provider (#18144) Release Notes: - N/A --- Cargo.lock | 1 + crates/language_model/Cargo.toml | 1 + crates/language_model/src/provider/cloud.rs | 22 +++++++++++++++++++-- crates/language_model/src/settings.rs | 9 +++++++++ 4 files changed, 31 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26b8847041..a19506829e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6285,6 +6285,7 @@ dependencies = [ "http_client", "image", "inline_completion_button", + "isahc", "language", "log", "menu", diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index ef273ac44f..b63428c544 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -32,6 +32,7 @@ futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true +isahc.workspace = true inline_completion_button.workspace = true log.workspace = true menu.workspace = true diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index f8f64ff3b8..58efb4cfe1 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -19,6 +19,7 @@ use gpui::{ Subscription, Task, }; use http_client::{AsyncBody, HttpClient, Method, Response}; +use isahc::config::Configurable; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::value::RawValue; @@ -27,6 +28,7 @@ use smol::{ io::{AsyncReadExt, BufReader}, lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}, }; +use std::time::Duration; use std::{ future, sync::{Arc, LazyLock}, @@ -56,6 +58,7 @@ fn zed_cloud_provider_additional_models() -> &'static [AvailableModel] { #[derive(Default, Clone, Debug, PartialEq)] pub struct ZedDotDevSettings { pub available_models: Vec, + pub low_speed_timeout: Option, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] @@ -380,6 +383,7 @@ impl CloudLanguageModel { client: Arc, llm_api_token: LlmApiToken, body: PerformCompletionParams, + low_speed_timeout: Option, ) -> Result> { let http_client = &client.http_client(); @@ -387,7 +391,11 @@ impl CloudLanguageModel { let mut did_retry = false; let response = loop { - let request = http_client::Request::builder() + let mut request_builder = http_client::Request::builder(); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + }; + let request = request_builder .method(Method::POST) .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref()) .header("Content-Type", "application/json") @@ -501,8 +509,11 @@ impl LanguageModel for CloudLanguageModel { fn stream_completion( &self, request: LanguageModelRequest, - _cx: &AsyncAppContext, + cx: &AsyncAppContext, ) -> BoxFuture<'static, Result>>> { + let openai_low_speed_timeout = + AllLanguageModelSettings::try_read_global(cx, |s| s.openai.low_speed_timeout.unwrap()); + match &self.model { CloudModel::Anthropic(model) => { let request = request.into_anthropic(model.id().into(), model.max_output_tokens()); @@ -519,6 +530,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(map_to_language_model_completion_events(Box::pin( @@ -542,6 +554,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + openai_low_speed_timeout, ) .await?; Ok(open_ai::extract_text_from_events(response_lines(response))) @@ -569,6 +582,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(google_ai::extract_text_from_events(response_lines( @@ -599,6 +613,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(open_ai::extract_text_from_events(response_lines(response))) @@ -650,6 +665,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; @@ -694,6 +710,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; @@ -741,6 +758,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 80749c0bdb..8888d51e11 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -231,6 +231,7 @@ pub struct GoogleSettingsContent { #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] pub struct ZedDotDevSettingsContent { available_models: Option>, + pub low_speed_timeout_in_seconds: Option, } #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] @@ -333,6 +334,14 @@ impl settings::Settings for AllLanguageModelSettings { .as_ref() .and_then(|s| s.available_models.clone()), ); + if let Some(low_speed_timeout_in_seconds) = value + .zed_dot_dev + .as_ref() + .and_then(|s| s.low_speed_timeout_in_seconds) + { + settings.zed_dot_dev.low_speed_timeout = + Some(Duration::from_secs(low_speed_timeout_in_seconds)); + } merge( &mut settings.google.api_url, From 8bd624b5db035862ecb89a4cf126167f572712af Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 13:06:43 -0400 Subject: [PATCH 255/270] editor: Remove unneeded blank lines in rewrap test cases (#18152) This PR removes some unneeded blank lines from some of the test cases for `editor::Rewrap`. These weren't meaningful to the test, and their presence could be confusing. Release Notes: - N/A --- crates/editor/src/editor_tests.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 85684db818..5927c22cb0 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4270,7 +4270,6 @@ async fn test_rewrap(cx: &mut TestAppContext) { } else { // } - } "}; @@ -4284,7 +4283,6 @@ async fn test_rewrap(cx: &mut TestAppContext) { } else { // } - } "}; From 601090511bde0cd39985f670d7d2acc895f2594c Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 20 Sep 2024 13:25:06 -0400 Subject: [PATCH 256/270] Remove `system_id` from all events but `editor_events` (#18154) Release Notes: - N/A --- crates/collab/src/api/events.rs | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 008c76e048..f8ae532013 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -905,7 +905,6 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -944,7 +943,6 @@ impl CpuEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -968,7 +966,6 @@ pub struct MemoryEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1000,7 +997,6 @@ impl MemoryEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1024,7 +1020,6 @@ pub struct AppEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1055,7 +1050,6 @@ impl AppEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1078,7 +1072,6 @@ pub struct SettingEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1109,7 +1102,6 @@ impl SettingEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1133,7 +1125,6 @@ pub struct ExtensionEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1169,7 +1160,6 @@ impl ExtensionEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1260,7 +1250,6 @@ pub struct EditEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, // Note: This column name has a typo in the ClickHouse table. #[serde(rename = "sesssion_id")] @@ -1298,7 +1287,6 @@ impl EditEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, From 5d12e3ce3a318577ff09811bdf57c91674b1beea Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 20 Sep 2024 14:43:26 -0400 Subject: [PATCH 257/270] preview tabs: Toggle preview tab when saving (#18158) Release Notes: - Saving a preview tab will now mark it as a permanent tab --- crates/workspace/src/pane.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index a5f83f961f..82300690e7 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1595,8 +1595,13 @@ impl Pane { } if can_save { - pane.update(cx, |_, cx| item.save(should_format, project, cx))? - .await?; + pane.update(cx, |pane, cx| { + if pane.is_active_preview_item(item.item_id()) { + pane.set_preview_item_id(None, cx); + } + item.save(should_format, project, cx) + })? + .await?; } else if can_save_as { let abs_path = pane.update(cx, |pane, cx| { pane.workspace From 7dac5594cdb02259c455cee90f57fb610b8c6162 Mon Sep 17 00:00:00 2001 From: Daste Date: Fri, 20 Sep 2024 20:44:13 +0200 Subject: [PATCH 258/270] file_finder: Display file icons (#18091) This PR adds file icons (like in tabs, the project panel and tab switcher) to the file finder popup. It's similar to [tab_switcher icons](https://github.com/zed-industries/zed/pull/17115), but simpler, because we're only dealing with actual files. Release Notes: - Added icons to the file finder. Screenshot: ![image](https://github.com/user-attachments/assets/bd6a54c1-cdbd-415a-9a82-0cc7a0bb6ca2) --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 3 +++ assets/settings/default.json | 5 ++++ crates/file_finder/Cargo.toml | 3 +++ crates/file_finder/src/file_finder.rs | 21 +++++++++++++-- .../file_finder/src/file_finder_settings.rs | 27 +++++++++++++++++++ 5 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 crates/file_finder/src/file_finder_settings.rs diff --git a/Cargo.lock b/Cargo.lock index a19506829e..dd07dfa1cf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4326,6 +4326,7 @@ dependencies = [ "ctor", "editor", "env_logger", + "file_icons", "futures 0.3.30", "fuzzy", "gpui", @@ -4333,7 +4334,9 @@ dependencies = [ "menu", "picker", "project", + "schemars", "serde", + "serde_derive", "serde_json", "settings", "text", diff --git a/assets/settings/default.json b/assets/settings/default.json index 537ad12082..8424c5733d 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -496,6 +496,11 @@ // Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. "enable_preview_from_code_navigation": false }, + // Settings related to the file finder. + "file_finder": { + // Whether to show file icons in the file finder. + "file_icons": true + }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. "remove_trailing_whitespace_on_save": true, diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index 8f17b191a5..2b4aa5fe30 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -16,14 +16,17 @@ doctest = false anyhow.workspace = true collections.workspace = true editor.workspace = true +file_icons.workspace = true futures.workspace = true fuzzy.workspace = true gpui.workspace = true menu.workspace = true picker.workspace = true project.workspace = true +schemars.workspace = true settings.workspace = true serde.workspace = true +serde_derive.workspace = true text.workspace = true theme.workspace = true ui.workspace = true diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 50a14b62db..e1e0998f8a 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1,11 +1,14 @@ #[cfg(test)] mod file_finder_tests; +mod file_finder_settings; mod new_path_prompt; mod open_path_prompt; use collections::HashMap; use editor::{scroll::Autoscroll, Bias, Editor}; +use file_finder_settings::FileFinderSettings; +use file_icons::FileIcons; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, @@ -39,7 +42,12 @@ pub struct FileFinder { init_modifiers: Option, } +pub fn init_settings(cx: &mut AppContext) { + FileFinderSettings::register(cx); +} + pub fn init(cx: &mut AppContext) { + init_settings(cx); cx.observe_new_views(FileFinder::register).detach(); cx.observe_new_views(NewPathPrompt::register).detach(); cx.observe_new_views(OpenPathPrompt::register).detach(); @@ -1041,12 +1049,14 @@ impl PickerDelegate for FileFinderDelegate { selected: bool, cx: &mut ViewContext>, ) -> Option { + let settings = FileFinderSettings::get_global(cx); + let path_match = self .matches .get(ix) .expect("Invalid matches state: no element for index {ix}"); - let icon = match &path_match { + let history_icon = match &path_match { Match::History { .. } => Icon::new(IconName::HistoryRerun) .color(Color::Muted) .size(IconSize::Small) @@ -1059,10 +1069,17 @@ impl PickerDelegate for FileFinderDelegate { let (file_name, file_name_positions, full_path, full_path_positions) = self.labels_for_match(path_match, cx, ix); + let file_icon = if settings.file_icons { + FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path) + } else { + None + }; + Some( ListItem::new(ix) .spacing(ListItemSpacing::Sparse) - .end_slot::(Some(icon)) + .start_slot::(file_icon) + .end_slot::(history_icon) .inset(true) .selected(selected) .child( diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs new file mode 100644 index 0000000000..c02008c917 --- /dev/null +++ b/crates/file_finder/src/file_finder_settings.rs @@ -0,0 +1,27 @@ +use anyhow::Result; +use schemars::JsonSchema; +use serde_derive::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +pub struct FileFinderSettings { + pub file_icons: bool, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct FileFinderSettingsContent { + /// Whether to show file icons in the file finder. + /// + /// Default: true + pub file_icons: Option, +} + +impl Settings for FileFinderSettings { + const KEY: Option<&'static str> = Some("file_finder"); + + type FileContent = FileFinderSettingsContent; + + fn load(sources: SettingsSources, _: &mut gpui::AppContext) -> Result { + sources.json_merge() + } +} From 45388805ad4bc5e27c0fcdd6936fb5bce687a8ff Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 20 Sep 2024 13:02:39 -0600 Subject: [PATCH 259/270] vim: gq (#18156) Closes #ISSUE Release Notes: - vim: Added gq/gw for rewrapping lines --- assets/keymaps/vim.json | 13 +++- crates/editor/src/editor.rs | 6 +- crates/vim/src/normal.rs | 30 +++++++- crates/vim/src/rewrap.rs | 114 ++++++++++++++++++++++++++++++ crates/vim/src/state.rs | 3 + crates/vim/src/vim.rs | 2 + crates/vim/test_data/test_gq.json | 12 ++++ 7 files changed, 177 insertions(+), 3 deletions(-) create mode 100644 crates/vim/src/rewrap.rs create mode 100644 crates/vim/test_data/test_gq.json diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 18b38384ef..8d933f19af 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -124,7 +124,6 @@ "g i": "vim::InsertAtPrevious", "g ,": "vim::ChangeListNewer", "g ;": "vim::ChangeListOlder", - "g q": "editor::Rewrap", "shift-h": "vim::WindowTop", "shift-m": "vim::WindowMiddle", "shift-l": "vim::WindowBottom", @@ -240,6 +239,8 @@ "g shift-u": ["vim::PushOperator", "Uppercase"], "g ~": ["vim::PushOperator", "OppositeCase"], "\"": ["vim::PushOperator", "Register"], + "g q": ["vim::PushOperator", "Rewrap"], + "g w": ["vim::PushOperator", "Rewrap"], "q": "vim::ToggleRecord", "shift-q": "vim::ReplayLastRecording", "@": ["vim::PushOperator", "ReplayRegister"], @@ -301,6 +302,7 @@ "i": ["vim::PushOperator", { "Object": { "around": false } }], "a": ["vim::PushOperator", { "Object": { "around": true } }], "g c": "vim::ToggleComments", + "g q": "vim::Rewrap", "\"": ["vim::PushOperator", "Register"], // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", @@ -428,6 +430,15 @@ "~": "vim::CurrentLine" } }, + { + "context": "vim_operator == gq", + "bindings": { + "g q": "vim::CurrentLine", + "q": "vim::CurrentLine", + "g w": "vim::CurrentLine", + "w": "vim::CurrentLine" + } + }, { "context": "vim_operator == y", "bindings": { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 33eb51cb0e..1f4a9376d2 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6705,6 +6705,10 @@ impl Editor { } pub fn rewrap(&mut self, _: &Rewrap, cx: &mut ViewContext) { + self.rewrap_impl(true, cx) + } + + pub fn rewrap_impl(&mut self, only_text: bool, cx: &mut ViewContext) { let buffer = self.buffer.read(cx).snapshot(cx); let selections = self.selections.all::(cx); let mut selections = selections.iter().peekable(); @@ -6725,7 +6729,7 @@ impl Editor { continue; } - let mut should_rewrap = false; + let mut should_rewrap = !only_text; if let Some(language_scope) = buffer.language_scope_at(selection.head()) { match language_scope.language_name().0.as_ref() { diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 741e09f178..10bf3c8e8d 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -168,6 +168,7 @@ impl Vim { Some(Operator::Yank) => self.yank_motion(motion, times, cx), Some(Operator::AddSurrounds { target: None }) => {} Some(Operator::Indent) => self.indent_motion(motion, times, IndentDirection::In, cx), + Some(Operator::Rewrap) => self.rewrap_motion(motion, times, cx), Some(Operator::Outdent) => self.indent_motion(motion, times, IndentDirection::Out, cx), Some(Operator::Lowercase) => { self.change_case_motion(motion, times, CaseTarget::Lowercase, cx) @@ -199,6 +200,7 @@ impl Vim { Some(Operator::Outdent) => { self.indent_object(object, around, IndentDirection::Out, cx) } + Some(Operator::Rewrap) => self.rewrap_object(object, around, cx), Some(Operator::Lowercase) => { self.change_case_object(object, around, CaseTarget::Lowercase, cx) } @@ -478,8 +480,9 @@ impl Vim { } #[cfg(test)] mod test { - use gpui::{KeyBinding, TestAppContext}; + use gpui::{KeyBinding, TestAppContext, UpdateGlobal}; use indoc::indoc; + use language::language_settings::AllLanguageSettings; use settings::SettingsStore; use crate::{ @@ -1386,4 +1389,29 @@ mod test { cx.simulate_shared_keystrokes("2 0 r - ").await; cx.shared_state().await.assert_eq("ˇhello world\n"); } + + #[gpui::test] + async fn test_gq(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("textwidth=5").await; + + cx.update(|cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.update_user_settings::(cx, |settings| { + settings.defaults.preferred_line_length = Some(5); + }); + }) + }); + + cx.set_shared_state("ˇth th th th th th\n").await; + cx.simulate_shared_keystrokes("g q q").await; + cx.shared_state().await.assert_eq("th th\nth th\nˇth th\n"); + + cx.set_shared_state("ˇth th th th th th\nth th th th th th\n") + .await; + cx.simulate_shared_keystrokes("v j g q").await; + cx.shared_state() + .await + .assert_eq("th th\nth th\nth th\nth th\nth th\nˇth th\n"); + } } diff --git a/crates/vim/src/rewrap.rs b/crates/vim/src/rewrap.rs new file mode 100644 index 0000000000..3e61b3c3a1 --- /dev/null +++ b/crates/vim/src/rewrap.rs @@ -0,0 +1,114 @@ +use crate::{motion::Motion, object::Object, state::Mode, Vim}; +use collections::HashMap; +use editor::{display_map::ToDisplayPoint, scroll::Autoscroll, Bias, Editor}; +use gpui::actions; +use language::SelectionGoal; +use ui::ViewContext; + +actions!(vim, [Rewrap]); + +pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { + Vim::action(editor, cx, |vim, _: &Rewrap, cx| { + vim.record_current_action(cx); + vim.take_count(cx); + vim.store_visual_marks(cx); + vim.update_editor(cx, |vim, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut positions = vim.save_selection_starts(editor, cx); + editor.rewrap_impl(false, cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + if let Some(anchor) = positions.remove(&selection.id) { + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + } + }); + }); + }); + }); + if vim.mode.is_visual() { + vim.switch_mode(Mode::Normal, true, cx) + } + }); +} + +impl Vim { + pub(crate) fn rewrap_motion( + &mut self, + motion: Motion, + times: Option, + cx: &mut ViewContext, + ) { + self.stop_recording(cx); + self.update_editor(cx, |_, editor, cx| { + let text_layout_details = editor.text_layout_details(cx); + editor.transact(cx, |editor, cx| { + let mut selection_starts: HashMap<_, _> = Default::default(); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = map.display_point_to_anchor(selection.head(), Bias::Right); + selection_starts.insert(selection.id, anchor); + motion.expand_selection(map, selection, times, false, &text_layout_details); + }); + }); + editor.rewrap_impl(false, cx); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = selection_starts.remove(&selection.id).unwrap(); + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + }); + }); + }); + }); + } + + pub(crate) fn rewrap_object( + &mut self, + object: Object, + around: bool, + cx: &mut ViewContext, + ) { + self.stop_recording(cx); + self.update_editor(cx, |_, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut original_positions: HashMap<_, _> = Default::default(); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = map.display_point_to_anchor(selection.head(), Bias::Right); + original_positions.insert(selection.id, anchor); + object.expand_selection(map, selection, around); + }); + }); + editor.rewrap_impl(false, cx); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = original_positions.remove(&selection.id).unwrap(); + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + }); + }); + }); + }); + } +} + +#[cfg(test)] +mod test { + use crate::test::NeovimBackedTestContext; + + #[gpui::test] + async fn test_indent_gv(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("shiftwidth=4").await; + + cx.set_shared_state("ˇhello\nworld\n").await; + cx.simulate_shared_keystrokes("v j > g v").await; + cx.shared_state() + .await + .assert_eq("« hello\n ˇ» world\n"); + } +} diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 1d642e990f..b61cb405e1 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -72,6 +72,7 @@ pub enum Operator { Jump { line: bool }, Indent, Outdent, + Rewrap, Lowercase, Uppercase, OppositeCase, @@ -454,6 +455,7 @@ impl Operator { Operator::Jump { line: true } => "'", Operator::Jump { line: false } => "`", Operator::Indent => ">", + Operator::Rewrap => "gq", Operator::Outdent => "<", Operator::Uppercase => "gU", Operator::Lowercase => "gu", @@ -482,6 +484,7 @@ impl Operator { Operator::Change | Operator::Delete | Operator::Yank + | Operator::Rewrap | Operator::Indent | Operator::Outdent | Operator::Lowercase diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index a4b77b1a7a..701972c19b 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -13,6 +13,7 @@ mod motion; mod normal; mod object; mod replace; +mod rewrap; mod state; mod surrounds; mod visual; @@ -291,6 +292,7 @@ impl Vim { command::register(editor, cx); replace::register(editor, cx); indent::register(editor, cx); + rewrap::register(editor, cx); object::register(editor, cx); visual::register(editor, cx); change_list::register(editor, cx); diff --git a/crates/vim/test_data/test_gq.json b/crates/vim/test_data/test_gq.json new file mode 100644 index 0000000000..08cdb12315 --- /dev/null +++ b/crates/vim/test_data/test_gq.json @@ -0,0 +1,12 @@ +{"SetOption":{"value":"textwidth=5"}} +{"Put":{"state":"ˇth th th th th th\n"}} +{"Key":"g"} +{"Key":"q"} +{"Key":"q"} +{"Get":{"state":"th th\nth th\nˇth th\n","mode":"Normal"}} +{"Put":{"state":"ˇth th th th th th\nth th th th th th\n"}} +{"Key":"v"} +{"Key":"j"} +{"Key":"g"} +{"Key":"q"} +{"Get":{"state":"th th\nth th\nth th\nth th\nth th\nˇth th\n","mode":"Normal"}} From 7d62fda5a38d1199e79c30177828dfac2a1ce4b3 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Sat, 21 Sep 2024 03:49:40 +0800 Subject: [PATCH 260/270] file_finder: Notify user when picker an non-utf8 file (#18136) notify user when using file finder picker an file which cannot open. Release Notes: - N/A --- crates/file_finder/src/file_finder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index e1e0998f8a..4c3f92d3c1 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -31,7 +31,7 @@ use std::{ use text::Point; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::{paths::PathWithPosition, post_inc, ResultExt}; -use workspace::{item::PreviewTabsSettings, ModalView, Workspace}; +use workspace::{item::PreviewTabsSettings, notifications::NotifyResultExt, ModalView, Workspace}; actions!(file_finder, [SelectPrev]); @@ -1011,7 +1011,7 @@ impl PickerDelegate for FileFinderDelegate { let finder = self.file_finder.clone(); cx.spawn(|_, mut cx| async move { - let item = open_task.await.log_err()?; + let item = open_task.await.notify_async_err(&mut cx)?; if let Some(row) = row { if let Some(active_editor) = item.downcast::() { active_editor From 5905fbb9accdc5d34b7fec0fe021022a5b38420e Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Fri, 20 Sep 2024 16:59:12 -0400 Subject: [PATCH 261/270] Allow Anthropic custom models to override temperature (#18160) Release Notes: - Allow Anthropic custom models to override "temperature" This also centralized the defaulting of "temperature" to be inside of each model's `into_x` call instead of being sprinkled around the code. --- crates/anthropic/src/anthropic.rs | 14 ++++++++++++++ crates/assistant/src/context.rs | 2 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/assistant/src/prompt_library.rs | 2 +- .../assistant/src/slash_command/auto_command.rs | 2 +- .../assistant/src/terminal_inline_assistant.rs | 2 +- crates/language_model/src/provider/anthropic.rs | 10 ++++++++-- crates/language_model/src/provider/cloud.rs | 16 +++++++++++++--- crates/language_model/src/provider/ollama.rs | 2 +- crates/language_model/src/request.rs | 15 ++++++++++----- crates/language_model/src/settings.rs | 2 ++ crates/semantic_index/src/summary_index.rs | 2 +- 12 files changed, 54 insertions(+), 17 deletions(-) diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index f960dc541a..91b6723e90 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -49,6 +49,7 @@ pub enum Model { /// Indicates whether this custom model supports caching. cache_configuration: Option, max_output_tokens: Option, + default_temperature: Option, }, } @@ -124,6 +125,19 @@ impl Model { } } + pub fn default_temperature(&self) -> f32 { + match self { + Self::Claude3_5Sonnet + | Self::Claude3Opus + | Self::Claude3Sonnet + | Self::Claude3Haiku => 1.0, + Self::Custom { + default_temperature, + .. + } => default_temperature.unwrap_or(1.0), + } + } + pub fn tool_model_id(&self) -> &str { if let Self::Custom { tool_override: Some(tool_override), diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 830c098049..97a5b3ea98 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -2180,7 +2180,7 @@ impl Context { messages: Vec::new(), tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; for message in self.messages(cx) { if message.status != MessageStatus::Done { diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index d95b54d3c6..f2428c3a2e 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2732,7 +2732,7 @@ impl CodegenAlternative { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }) } diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs index 76ee95d507..24e20a18a7 100644 --- a/crates/assistant/src/prompt_library.rs +++ b/crates/assistant/src/prompt_library.rs @@ -796,7 +796,7 @@ impl PromptLibrary { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }, cx, ) diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index e1f20c311b..14cee29682 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -216,7 +216,7 @@ async fn commands_for_summaries( }], tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; while let Some(current_summaries) = stack.pop() { diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index caf819bae5..e1a26d8510 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -284,7 +284,7 @@ impl TerminalInlineAssistant { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }) } diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 1e3d275094..86538bec49 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -51,6 +51,7 @@ pub struct AvailableModel { /// Configuration of Anthropic's caching API. pub cache_configuration: Option, pub max_output_tokens: Option, + pub default_temperature: Option, } pub struct AnthropicLanguageModelProvider { @@ -200,6 +201,7 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { } }), max_output_tokens: model.max_output_tokens, + default_temperature: model.default_temperature, }, ); } @@ -375,8 +377,11 @@ impl LanguageModel for AnthropicModel { request: LanguageModelRequest, cx: &AsyncAppContext, ) -> BoxFuture<'static, Result>>> { - let request = - request.into_anthropic(self.model.id().into(), self.model.max_output_tokens()); + let request = request.into_anthropic( + self.model.id().into(), + self.model.default_temperature(), + self.model.max_output_tokens(), + ); let request = self.stream_completion(request, cx); let future = self.request_limiter.stream(async move { let response = request.await.map_err(|err| anyhow!(err))?; @@ -405,6 +410,7 @@ impl LanguageModel for AnthropicModel { ) -> BoxFuture<'static, Result>>> { let mut request = request.into_anthropic( self.model.tool_model_id().into(), + self.model.default_temperature(), self.model.max_output_tokens(), ); request.tool_choice = Some(anthropic::ToolChoice::Tool { diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 58efb4cfe1..606a6fbace 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -87,6 +87,8 @@ pub struct AvailableModel { pub tool_override: Option, /// Indicates whether this custom model supports caching. pub cache_configuration: Option, + /// The default temperature to use for this model. + pub default_temperature: Option, } pub struct CloudLanguageModelProvider { @@ -255,6 +257,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { min_total_token: config.min_total_token, } }), + default_temperature: model.default_temperature, max_output_tokens: model.max_output_tokens, }), AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom { @@ -516,7 +519,11 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let request = request.into_anthropic(model.id().into(), model.max_output_tokens()); + let request = request.into_anthropic( + model.id().into(), + model.default_temperature(), + model.max_output_tokens(), + ); let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); let future = self.request_limiter.stream(async move { @@ -642,8 +649,11 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let mut request = - request.into_anthropic(model.tool_model_id().into(), model.max_output_tokens()); + let mut request = request.into_anthropic( + model.tool_model_id().into(), + model.default_temperature(), + model.max_output_tokens(), + ); request.tool_choice = Some(anthropic::ToolChoice::Tool { name: tool_name.clone(), }); diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index 6a3190dee7..a29ff3cf6a 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -235,7 +235,7 @@ impl OllamaLanguageModel { options: Some(ChatOptions { num_ctx: Some(self.model.max_tokens), stop: Some(request.stop), - temperature: Some(request.temperature), + temperature: request.temperature.or(Some(1.0)), ..Default::default() }), tools: vec![], diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index dd480b8aaf..06dde1862a 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -236,7 +236,7 @@ pub struct LanguageModelRequest { pub messages: Vec, pub tools: Vec, pub stop: Vec, - pub temperature: f32, + pub temperature: Option, } impl LanguageModelRequest { @@ -262,7 +262,7 @@ impl LanguageModelRequest { .collect(), stream, stop: self.stop, - temperature: self.temperature, + temperature: self.temperature.unwrap_or(1.0), max_tokens: max_output_tokens, tools: Vec::new(), tool_choice: None, @@ -290,7 +290,7 @@ impl LanguageModelRequest { candidate_count: Some(1), stop_sequences: Some(self.stop), max_output_tokens: None, - temperature: Some(self.temperature as f64), + temperature: self.temperature.map(|t| t as f64).or(Some(1.0)), top_p: None, top_k: None, }), @@ -298,7 +298,12 @@ impl LanguageModelRequest { } } - pub fn into_anthropic(self, model: String, max_output_tokens: u32) -> anthropic::Request { + pub fn into_anthropic( + self, + model: String, + default_temperature: f32, + max_output_tokens: u32, + ) -> anthropic::Request { let mut new_messages: Vec = Vec::new(); let mut system_message = String::new(); @@ -400,7 +405,7 @@ impl LanguageModelRequest { tool_choice: None, metadata: None, stop_sequences: Vec::new(), - temperature: Some(self.temperature), + temperature: self.temperature.or(Some(default_temperature)), top_k: None, top_p: None, } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 8888d51e11..2bf8deb042 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -99,6 +99,7 @@ impl AnthropicSettingsContent { tool_override, cache_configuration, max_output_tokens, + default_temperature, } => Some(provider::anthropic::AvailableModel { name, display_name, @@ -112,6 +113,7 @@ impl AnthropicSettingsContent { }, ), max_output_tokens, + default_temperature, }), _ => None, }) diff --git a/crates/semantic_index/src/summary_index.rs b/crates/semantic_index/src/summary_index.rs index 08f25ae028..f4c6d4726c 100644 --- a/crates/semantic_index/src/summary_index.rs +++ b/crates/semantic_index/src/summary_index.rs @@ -562,7 +562,7 @@ impl SummaryIndex { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; let code_len = code.len(); From e309fbda2a95a55a043ad41ead97c568c7aeef19 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 20 Sep 2024 15:09:18 -0700 Subject: [PATCH 262/270] Add a slash command for automatically retrieving relevant context (#17972) * [x] put this slash command behind a feature flag until we release embedding access to the general population * [x] choose a name for this slash command and name the rust module to match Release Notes: - N/A --------- Co-authored-by: Jason Co-authored-by: Richard Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Richard Feldman --- assets/prompts/project_slash_command.hbs | 8 + crates/assistant/src/assistant.rs | 32 +- crates/assistant/src/context.rs | 5 +- crates/assistant/src/prompts.rs | 15 + crates/assistant/src/slash_command.rs | 2 +- .../slash_command/cargo_workspace_command.rs | 153 ++++++++++ .../src/slash_command/project_command.rs | 257 +++++++++------- .../src/slash_command/search_command.rs | 63 ++-- .../assistant/src/slash_command_settings.rs | 10 +- crates/evals/src/eval.rs | 2 +- crates/semantic_index/examples/index.rs | 2 +- crates/semantic_index/src/embedding.rs | 23 +- crates/semantic_index/src/project_index.rs | 59 ++-- crates/semantic_index/src/semantic_index.rs | 275 +++++++++++++++--- 14 files changed, 683 insertions(+), 223 deletions(-) create mode 100644 assets/prompts/project_slash_command.hbs create mode 100644 crates/assistant/src/slash_command/cargo_workspace_command.rs diff --git a/assets/prompts/project_slash_command.hbs b/assets/prompts/project_slash_command.hbs new file mode 100644 index 0000000000..6c63f71d89 --- /dev/null +++ b/assets/prompts/project_slash_command.hbs @@ -0,0 +1,8 @@ +A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings. +Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets +that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently +distinct from previous ones. + +Here is the question that's been asked, together with context that the developer has added manually: + +{{{context_buffer}}} diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 8b9c66ee55..9cc63af5a1 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,9 +41,10 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - auto_command, context_server_command, default_command, delta_command, diagnostics_command, - docs_command, fetch_command, file_command, now_command, project_command, prompt_command, - search_command, symbols_command, tab_command, terminal_command, workflow_command, + auto_command, cargo_workspace_command, context_server_command, default_command, delta_command, + diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command, + prompt_command, search_command, symbols_command, tab_command, terminal_command, + workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -384,20 +385,33 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut slash_command_registry.register_command(delta_command::DeltaSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); slash_command_registry.register_command(prompt_command::PromptSlashCommand, true); slash_command_registry.register_command(default_command::DefaultSlashCommand, false); slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true); slash_command_registry.register_command(now_command::NowSlashCommand, false); slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true); + slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); if let Some(prompt_builder) = prompt_builder { slash_command_registry.register_command( workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), true, ); + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + slash_command_registry.register_command( + project_command::ProjectSlashCommand::new(prompt_builder.clone()), + true, + ); + } + } + }) + .detach(); } - slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); cx.observe_flag::({ let slash_command_registry = slash_command_registry.clone(); @@ -435,10 +449,12 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) { slash_command_registry.unregister_command(docs_command::DocsSlashCommand); } - if settings.project.enabled { - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + if settings.cargo_workspace.enabled { + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); } else { - slash_command_registry.unregister_command(project_command::ProjectSlashCommand); + slash_command_registry + .unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand); } } diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 97a5b3ea98..1cac47831f 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -1967,8 +1967,9 @@ impl Context { } pub fn assist(&mut self, cx: &mut ModelContext) -> Option { - let provider = LanguageModelRegistry::read_global(cx).active_provider()?; - let model = LanguageModelRegistry::read_global(cx).active_model()?; + let model_registry = LanguageModelRegistry::read_global(cx); + let provider = model_registry.active_provider()?; + let model = model_registry.active_model()?; let last_message_id = self.get_last_valid_message_id(cx)?; if !provider.is_authenticated(cx) { diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 3b9f75bac9..106935cb88 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -40,6 +40,11 @@ pub struct TerminalAssistantPromptContext { pub user_prompt: String, } +#[derive(Serialize)] +pub struct ProjectSlashCommandPromptContext { + pub context_buffer: String, +} + /// Context required to generate a workflow step resolution prompt. #[derive(Debug, Serialize)] pub struct StepResolutionContext { @@ -317,4 +322,14 @@ impl PromptBuilder { pub fn generate_workflow_prompt(&self) -> Result { self.handlebars.lock().render("edit_workflow", &()) } + + pub fn generate_project_slash_command_prompt( + &self, + context_buffer: String, + ) -> Result { + self.handlebars.lock().render( + "project_slash_command", + &ProjectSlashCommandPromptContext { context_buffer }, + ) + } } diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index cf957a15c6..e430e35622 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -18,8 +18,8 @@ use std::{ }; use ui::ActiveTheme; use workspace::Workspace; - pub mod auto_command; +pub mod cargo_workspace_command; pub mod context_server_command; pub mod default_command; pub mod delta_command; diff --git a/crates/assistant/src/slash_command/cargo_workspace_command.rs b/crates/assistant/src/slash_command/cargo_workspace_command.rs new file mode 100644 index 0000000000..baf16d7f01 --- /dev/null +++ b/crates/assistant/src/slash_command/cargo_workspace_command.rs @@ -0,0 +1,153 @@ +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Context, Result}; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use fs::Fs; +use gpui::{AppContext, Model, Task, WeakView}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use project::{Project, ProjectPath}; +use std::{ + fmt::Write, + path::Path, + sync::{atomic::AtomicBool, Arc}, +}; +use ui::prelude::*; +use workspace::Workspace; + +pub(crate) struct CargoWorkspaceSlashCommand; + +impl CargoWorkspaceSlashCommand { + async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { + let buffer = fs.load(path_to_cargo_toml).await?; + let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; + + let mut message = String::new(); + writeln!(message, "You are in a Rust project.")?; + + if let Some(workspace) = cargo_toml.workspace { + writeln!( + message, + "The project is a Cargo workspace with the following members:" + )?; + for member in workspace.members { + writeln!(message, "- {member}")?; + } + + if !workspace.default_members.is_empty() { + writeln!(message, "The default members are:")?; + for member in workspace.default_members { + writeln!(message, "- {member}")?; + } + } + + if !workspace.dependencies.is_empty() { + writeln!( + message, + "The following workspace dependencies are installed:" + )?; + for dependency in workspace.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } else if let Some(package) = cargo_toml.package { + writeln!( + message, + "The project name is \"{name}\".", + name = package.name + )?; + + let description = package + .description + .as_ref() + .and_then(|description| description.get().ok().cloned()); + if let Some(description) = description.as_ref() { + writeln!(message, "It describes itself as \"{description}\".")?; + } + + if !cargo_toml.dependencies.is_empty() { + writeln!(message, "The following dependencies are installed:")?; + for dependency in cargo_toml.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } + + Ok(message) + } + + fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { + let worktree = project.read(cx).worktrees(cx).next()?; + let worktree = worktree.read(cx); + let entry = worktree.entry_for_path("Cargo.toml")?; + let path = ProjectPath { + worktree_id: worktree.id(), + path: entry.path.clone(), + }; + Some(Arc::from( + project.read(cx).absolute_path(&path, cx)?.as_path(), + )) + } +} + +impl SlashCommand for CargoWorkspaceSlashCommand { + fn name(&self) -> String { + "cargo-workspace".into() + } + + fn description(&self) -> String { + "insert project workspace metadata".into() + } + + fn menu_text(&self) -> String { + "Insert Project Workspace Metadata".into() + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + Task::ready(Err(anyhow!("this command does not require argument"))) + } + + fn requires_argument(&self) -> bool { + false + } + + fn run( + self: Arc, + _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let output = workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + let fs = workspace.project().read(cx).fs().clone(); + let path = Self::path_to_cargo_toml(project, cx); + let output = cx.background_executor().spawn(async move { + let path = path.with_context(|| "Cargo.toml not found")?; + Self::build_message(fs, &path).await + }); + + cx.foreground_executor().spawn(async move { + let text = output.await?; + let range = 0..text.len(); + Ok(SlashCommandOutput { + text, + sections: vec![SlashCommandOutputSection { + range, + icon: IconName::FileTree, + label: "Project".into(), + metadata: None, + }], + run_commands_in_text: false, + }) + }) + }); + output.unwrap_or_else(|error| Task::ready(Err(error))) + } +} diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 3e8596d942..197e91d91a 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -1,90 +1,39 @@ -use super::{SlashCommand, SlashCommandOutput}; -use anyhow::{anyhow, Context, Result}; +use super::{ + create_label_for_command, search_command::add_search_result_section, SlashCommand, + SlashCommandOutput, +}; +use crate::PromptBuilder; +use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; -use fs::Fs; -use gpui::{AppContext, Model, Task, WeakView}; -use language::{BufferSnapshot, LspAdapterDelegate}; -use project::{Project, ProjectPath}; +use feature_flags::FeatureFlag; +use gpui::{AppContext, Task, WeakView, WindowContext}; +use language::{Anchor, CodeLabel, LspAdapterDelegate}; +use language_model::{LanguageModelRegistry, LanguageModelTool}; +use schemars::JsonSchema; +use semantic_index::SemanticDb; +use serde::Deserialize; + +pub struct ProjectSlashCommandFeatureFlag; + +impl FeatureFlag for ProjectSlashCommandFeatureFlag { + const NAME: &'static str = "project-slash-command"; +} + use std::{ - fmt::Write, - path::Path, + fmt::Write as _, + ops::DerefMut, sync::{atomic::AtomicBool, Arc}, }; -use ui::prelude::*; +use ui::{BorrowAppContext as _, IconName}; use workspace::Workspace; -pub(crate) struct ProjectSlashCommand; +pub struct ProjectSlashCommand { + prompt_builder: Arc, +} impl ProjectSlashCommand { - async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { - let buffer = fs.load(path_to_cargo_toml).await?; - let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; - - let mut message = String::new(); - writeln!(message, "You are in a Rust project.")?; - - if let Some(workspace) = cargo_toml.workspace { - writeln!( - message, - "The project is a Cargo workspace with the following members:" - )?; - for member in workspace.members { - writeln!(message, "- {member}")?; - } - - if !workspace.default_members.is_empty() { - writeln!(message, "The default members are:")?; - for member in workspace.default_members { - writeln!(message, "- {member}")?; - } - } - - if !workspace.dependencies.is_empty() { - writeln!( - message, - "The following workspace dependencies are installed:" - )?; - for dependency in workspace.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } else if let Some(package) = cargo_toml.package { - writeln!( - message, - "The project name is \"{name}\".", - name = package.name - )?; - - let description = package - .description - .as_ref() - .and_then(|description| description.get().ok().cloned()); - if let Some(description) = description.as_ref() { - writeln!(message, "It describes itself as \"{description}\".")?; - } - - if !cargo_toml.dependencies.is_empty() { - writeln!(message, "The following dependencies are installed:")?; - for dependency in cargo_toml.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } - - Ok(message) - } - - fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { - let worktree = project.read(cx).worktrees(cx).next()?; - let worktree = worktree.read(cx); - let entry = worktree.entry_for_path("Cargo.toml")?; - let path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - Some(Arc::from( - project.read(cx).absolute_path(&path, cx)?.as_path(), - )) + pub fn new(prompt_builder: Arc) -> Self { + Self { prompt_builder } } } @@ -93,12 +42,20 @@ impl SlashCommand for ProjectSlashCommand { "project".into() } + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("project", &[], cx) + } + fn description(&self) -> String { - "insert project metadata".into() + "Generate semantic searches based on the current context".into() } fn menu_text(&self) -> String { - "Insert Project Metadata".into() + "Project Context".into() + } + + fn requires_argument(&self) -> bool { + false } fn complete_argument( @@ -108,46 +65,126 @@ impl SlashCommand for ProjectSlashCommand { _workspace: Option>, _cx: &mut WindowContext, ) -> Task>> { - Task::ready(Err(anyhow!("this command does not require argument"))) - } - - fn requires_argument(&self) -> bool { - false + Task::ready(Ok(Vec::new())) } fn run( self: Arc, _arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, + _context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, ) -> Task> { - let output = workspace.update(cx, |workspace, cx| { - let project = workspace.project().clone(); - let fs = workspace.project().read(cx).fs().clone(); - let path = Self::path_to_cargo_toml(project, cx); - let output = cx.background_executor().spawn(async move { - let path = path.with_context(|| "Cargo.toml not found")?; - Self::build_message(fs, &path).await - }); + let model_registry = LanguageModelRegistry::read_global(cx); + let current_model = model_registry.active_model(); + let prompt_builder = self.prompt_builder.clone(); - cx.foreground_executor().spawn(async move { - let text = output.await?; - let range = 0..text.len(); - Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::FileTree, - label: "Project".into(), + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + let project = workspace.read(cx).project().clone(); + let fs = project.read(cx).fs().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; + + cx.spawn(|mut cx| async move { + let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?; + + let prompt = + prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?; + + let search_queries = current_model + .use_tool::( + language_model::LanguageModelRequest { + messages: vec![language_model::LanguageModelRequestMessage { + role: language_model::Role::User, + content: vec![language_model::MessageContent::Text(prompt)], + cache: false, + }], + tools: vec![], + stop: vec![], + temperature: None, + }, + cx.deref_mut(), + ) + .await? + .search_queries; + + let results = project_index + .read_with(&cx, |project_index, cx| { + project_index.search(search_queries.clone(), 25, cx) + })? + .await?; + + let results = SemanticDb::load_results(results, &fs, &cx).await?; + + cx.background_executor() + .spawn(async move { + let mut output = "Project context:\n".to_string(); + let mut sections = Vec::new(); + + for (ix, query) in search_queries.into_iter().enumerate() { + let start_ix = output.len(); + writeln!(&mut output, "Results for {query}:").unwrap(); + let mut has_results = false; + for result in &results { + if result.query_index == ix { + add_search_result_section(result, &mut output, &mut sections); + has_results = true; + } + } + if has_results { + sections.push(SlashCommandOutputSection { + range: start_ix..output.len(), + icon: IconName::MagnifyingGlass, + label: query.into(), + metadata: None, + }); + output.push('\n'); + } else { + output.truncate(start_ix); + } + } + + sections.push(SlashCommandOutputSection { + range: 0..output.len(), + icon: IconName::Book, + label: "Project context".into(), metadata: None, - }], - run_commands_in_text: false, + }); + + Ok(SlashCommandOutput { + text: output, + sections, + run_commands_in_text: true, + }) }) - }) - }); - output.unwrap_or_else(|error| Task::ready(Err(error))) + .await + }) + } +} + +#[derive(JsonSchema, Deserialize)] +struct SearchQueries { + /// An array of semantic search queries. + /// + /// These queries will be used to search the user's codebase. + /// The function can only accept 4 queries, otherwise it will error. + /// As such, it's important that you limit the length of the search_queries array to 5 queries or less. + search_queries: Vec, +} + +impl LanguageModelTool for SearchQueries { + fn name() -> String { + "search_queries".to_string() + } + + fn description() -> String { + "Generate semantic search queries based on context".to_string() } } diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 7e408cad39..f0f3ee3d25 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -7,7 +7,7 @@ use anyhow::Result; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; -use language::{CodeLabel, LineEnding, LspAdapterDelegate}; +use language::{CodeLabel, LspAdapterDelegate}; use semantic_index::{LoadedSearchResult, SemanticDb}; use std::{ fmt::Write, @@ -101,7 +101,7 @@ impl SlashCommand for SearchSlashCommand { cx.spawn(|cx| async move { let results = project_index .read_with(&cx, |project_index, cx| { - project_index.search(query.clone(), limit.unwrap_or(5), cx) + project_index.search(vec![query.clone()], limit.unwrap_or(5), cx) })? .await?; @@ -112,31 +112,8 @@ impl SlashCommand for SearchSlashCommand { .spawn(async move { let mut text = format!("Search results for {query}:\n"); let mut sections = Vec::new(); - for LoadedSearchResult { - path, - range, - full_path, - file_content, - row_range, - } in loaded_results - { - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path( - Some(&path), - Some(row_range.clone()), - )); - - let mut excerpt = file_content[range].to_string(); - LineEnding::normalize(&mut excerpt); - text.push_str(&excerpt); - writeln!(text, "\n```\n").unwrap(); - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - Some(&full_path), - false, - Some(row_range.start() + 1..row_range.end() + 1), - )); + for loaded_result in &loaded_results { + add_search_result_section(loaded_result, &mut text, &mut sections); } let query = SharedString::from(query); @@ -159,3 +136,35 @@ impl SlashCommand for SearchSlashCommand { }) } } + +pub fn add_search_result_section( + loaded_result: &LoadedSearchResult, + text: &mut String, + sections: &mut Vec>, +) { + let LoadedSearchResult { + path, + full_path, + excerpt_content, + row_range, + .. + } = loaded_result; + let section_start_ix = text.len(); + text.push_str(&codeblock_fence_for_path( + Some(&path), + Some(row_range.clone()), + )); + + text.push_str(&excerpt_content); + if !text.ends_with('\n') { + text.push('\n'); + } + writeln!(text, "```\n").unwrap(); + let section_end_ix = text.len() - 1; + sections.push(build_entry_output_section( + section_start_ix..section_end_ix, + Some(&full_path), + false, + Some(row_range.start() + 1..row_range.end() + 1), + )); +} diff --git a/crates/assistant/src/slash_command_settings.rs b/crates/assistant/src/slash_command_settings.rs index eda950b6a2..c524b37803 100644 --- a/crates/assistant/src/slash_command_settings.rs +++ b/crates/assistant/src/slash_command_settings.rs @@ -10,9 +10,9 @@ pub struct SlashCommandSettings { /// Settings for the `/docs` slash command. #[serde(default)] pub docs: DocsCommandSettings, - /// Settings for the `/project` slash command. + /// Settings for the `/cargo-workspace` slash command. #[serde(default)] - pub project: ProjectCommandSettings, + pub cargo_workspace: CargoWorkspaceCommandSettings, } /// Settings for the `/docs` slash command. @@ -23,10 +23,10 @@ pub struct DocsCommandSettings { pub enabled: bool, } -/// Settings for the `/project` slash command. +/// Settings for the `/cargo-workspace` slash command. #[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] -pub struct ProjectCommandSettings { - /// Whether `/project` is enabled. +pub struct CargoWorkspaceCommandSettings { + /// Whether `/cargo-workspace` is enabled. #[serde(default)] pub enabled: bool, } diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 0580053373..e2c8b42644 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -438,7 +438,7 @@ async fn run_eval_project( loop { match cx.update(|cx| { let project_index = project_index.read(cx); - project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) + project_index.search(vec![query.query.clone()], SEARCH_RESULT_LIMIT, cx) }) { Ok(task) => match task.await { Ok(answer) => { diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 0cc3f9f317..c5c2c633a1 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -98,7 +98,7 @@ fn main() { .update(|cx| { let project_index = project_index.read(cx); let query = "converting an anchor to a point"; - project_index.search(query.into(), 4, cx) + project_index.search(vec![query.into()], 4, cx) }) .unwrap() .await diff --git a/crates/semantic_index/src/embedding.rs b/crates/semantic_index/src/embedding.rs index b05c4ac9da..1e1e0f0be7 100644 --- a/crates/semantic_index/src/embedding.rs +++ b/crates/semantic_index/src/embedding.rs @@ -42,14 +42,23 @@ impl Embedding { self.0.len() } - pub fn similarity(self, other: &Embedding) -> f32 { - debug_assert_eq!(self.0.len(), other.0.len()); - self.0 + pub fn similarity(&self, others: &[Embedding]) -> (f32, usize) { + debug_assert!(others.iter().all(|other| self.0.len() == other.0.len())); + others .iter() - .copied() - .zip(other.0.iter().copied()) - .map(|(a, b)| a * b) - .sum() + .enumerate() + .map(|(index, other)| { + let dot_product: f32 = self + .0 + .iter() + .copied() + .zip(other.0.iter().copied()) + .map(|(a, b)| a * b) + .sum(); + (dot_product, index) + }) + .max_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal)) + .unwrap_or((0.0, 0)) } } diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs index 5c35c93fa9..21c036d60a 100644 --- a/crates/semantic_index/src/project_index.rs +++ b/crates/semantic_index/src/project_index.rs @@ -31,20 +31,23 @@ pub struct SearchResult { pub path: Arc, pub range: Range, pub score: f32, + pub query_index: usize, } +#[derive(Debug, PartialEq, Eq)] pub struct LoadedSearchResult { pub path: Arc, - pub range: Range, pub full_path: PathBuf, - pub file_content: String, + pub excerpt_content: String, pub row_range: RangeInclusive, + pub query_index: usize, } pub struct WorktreeSearchResult { pub worktree_id: WorktreeId, pub path: Arc, pub range: Range, + pub query_index: usize, pub score: f32, } @@ -227,7 +230,7 @@ impl ProjectIndex { pub fn search( &self, - query: String, + queries: Vec, limit: usize, cx: &AppContext, ) -> Task>> { @@ -275,15 +278,18 @@ impl ProjectIndex { cx.spawn(|cx| async move { #[cfg(debug_assertions)] let embedding_query_start = std::time::Instant::now(); - log::info!("Searching for {query}"); + log::info!("Searching for {queries:?}"); + let queries: Vec = queries + .iter() + .map(|s| TextToEmbed::new(s.as_str())) + .collect(); - let query_embeddings = embedding_provider - .embed(&[TextToEmbed::new(&query)]) - .await?; - let query_embedding = query_embeddings - .into_iter() - .next() - .ok_or_else(|| anyhow!("no embedding for query"))?; + let query_embeddings = embedding_provider.embed(&queries[..]).await?; + if query_embeddings.len() != queries.len() { + return Err(anyhow!( + "The number of query embeddings does not match the number of queries" + )); + } let mut results_by_worker = Vec::new(); for _ in 0..cx.background_executor().num_cpus() { @@ -292,28 +298,34 @@ impl ProjectIndex { #[cfg(debug_assertions)] let search_start = std::time::Instant::now(); - cx.background_executor() .scoped(|cx| { for results in results_by_worker.iter_mut() { cx.spawn(async { while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { - let score = chunk.embedding.similarity(&query_embedding); + let (score, query_index) = + chunk.embedding.similarity(&query_embeddings); + let ix = match results.binary_search_by(|probe| { score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) }) { Ok(ix) | Err(ix) => ix, }; - results.insert( - ix, - WorktreeSearchResult { - worktree_id, - path: path.clone(), - range: chunk.chunk.range.clone(), - score, - }, - ); - results.truncate(limit); + if ix < limit { + results.insert( + ix, + WorktreeSearchResult { + worktree_id, + path: path.clone(), + range: chunk.chunk.range.clone(), + query_index, + score, + }, + ); + if results.len() > limit { + results.pop(); + } + } } }); } @@ -333,6 +345,7 @@ impl ProjectIndex { path: result.path, range: result.range, score: result.score, + query_index: result.query_index, }) })); } diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 6c97ece024..332b4271a0 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -12,8 +12,13 @@ use anyhow::{Context as _, Result}; use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; -use project::Project; -use std::{path::PathBuf, sync::Arc}; +use language::LineEnding; +use project::{Project, Worktree}; +use std::{ + cmp::Ordering, + path::{Path, PathBuf}, + sync::Arc, +}; use ui::ViewContext; use util::ResultExt as _; use workspace::Workspace; @@ -77,46 +82,127 @@ impl SemanticDb { } pub async fn load_results( - results: Vec, + mut results: Vec, fs: &Arc, cx: &AsyncAppContext, ) -> Result> { - let mut loaded_results = Vec::new(); - for result in results { - let (full_path, file_content) = result.worktree.read_with(cx, |worktree, _cx| { - let entry_abs_path = worktree.abs_path().join(&result.path); - let mut entry_full_path = PathBuf::from(worktree.root_name()); - entry_full_path.push(&result.path); - let file_content = async { - let entry_abs_path = entry_abs_path; - fs.load(&entry_abs_path).await - }; - (entry_full_path, file_content) - })?; - if let Some(file_content) = file_content.await.log_err() { - let range_start = result.range.start.min(file_content.len()); - let range_end = result.range.end.min(file_content.len()); - - let start_row = file_content[0..range_start].matches('\n').count() as u32; - let end_row = file_content[0..range_end].matches('\n').count() as u32; - let start_line_byte_offset = file_content[0..range_start] - .rfind('\n') - .map(|pos| pos + 1) - .unwrap_or_default(); - let end_line_byte_offset = file_content[range_end..] - .find('\n') - .map(|pos| range_end + pos) - .unwrap_or_else(|| file_content.len()); - - loaded_results.push(LoadedSearchResult { - path: result.path, - range: start_line_byte_offset..end_line_byte_offset, - full_path, - file_content, - row_range: start_row..=end_row, - }); + let mut max_scores_by_path = HashMap::<_, (f32, usize)>::default(); + for result in &results { + let (score, query_index) = max_scores_by_path + .entry((result.worktree.clone(), result.path.clone())) + .or_default(); + if result.score > *score { + *score = result.score; + *query_index = result.query_index; } } + + results.sort_by(|a, b| { + let max_score_a = max_scores_by_path[&(a.worktree.clone(), a.path.clone())].0; + let max_score_b = max_scores_by_path[&(b.worktree.clone(), b.path.clone())].0; + max_score_b + .partial_cmp(&max_score_a) + .unwrap_or(Ordering::Equal) + .then_with(|| a.worktree.entity_id().cmp(&b.worktree.entity_id())) + .then_with(|| a.path.cmp(&b.path)) + .then_with(|| a.range.start.cmp(&b.range.start)) + }); + + let mut last_loaded_file: Option<(Model, Arc, PathBuf, String)> = None; + let mut loaded_results = Vec::::new(); + for result in results { + let full_path; + let file_content; + if let Some(last_loaded_file) = + last_loaded_file + .as_ref() + .filter(|(last_worktree, last_path, _, _)| { + last_worktree == &result.worktree && last_path == &result.path + }) + { + full_path = last_loaded_file.2.clone(); + file_content = &last_loaded_file.3; + } else { + let output = result.worktree.read_with(cx, |worktree, _cx| { + let entry_abs_path = worktree.abs_path().join(&result.path); + let mut entry_full_path = PathBuf::from(worktree.root_name()); + entry_full_path.push(&result.path); + let file_content = async { + let entry_abs_path = entry_abs_path; + fs.load(&entry_abs_path).await + }; + (entry_full_path, file_content) + })?; + full_path = output.0; + let Some(content) = output.1.await.log_err() else { + continue; + }; + last_loaded_file = Some(( + result.worktree.clone(), + result.path.clone(), + full_path.clone(), + content, + )); + file_content = &last_loaded_file.as_ref().unwrap().3; + }; + + let query_index = max_scores_by_path[&(result.worktree.clone(), result.path.clone())].1; + + let mut range_start = result.range.start.min(file_content.len()); + let mut range_end = result.range.end.min(file_content.len()); + while !file_content.is_char_boundary(range_start) { + range_start += 1; + } + while !file_content.is_char_boundary(range_end) { + range_end += 1; + } + + let start_row = file_content[0..range_start].matches('\n').count() as u32; + let mut end_row = file_content[0..range_end].matches('\n').count() as u32; + let start_line_byte_offset = file_content[0..range_start] + .rfind('\n') + .map(|pos| pos + 1) + .unwrap_or_default(); + let mut end_line_byte_offset = range_end; + if file_content[..end_line_byte_offset].ends_with('\n') { + end_row -= 1; + } else { + end_line_byte_offset = file_content[range_end..] + .find('\n') + .map(|pos| range_end + pos + 1) + .unwrap_or_else(|| file_content.len()); + } + let mut excerpt_content = + file_content[start_line_byte_offset..end_line_byte_offset].to_string(); + LineEnding::normalize(&mut excerpt_content); + + if let Some(prev_result) = loaded_results.last_mut() { + if prev_result.full_path == full_path { + if *prev_result.row_range.end() + 1 == start_row { + prev_result.row_range = *prev_result.row_range.start()..=end_row; + prev_result.excerpt_content.push_str(&excerpt_content); + continue; + } + } + } + + loaded_results.push(LoadedSearchResult { + path: result.path, + full_path, + excerpt_content, + row_range: start_row..=end_row, + query_index, + }); + } + + for result in &mut loaded_results { + while result.excerpt_content.ends_with("\n\n") { + result.excerpt_content.pop(); + result.row_range = + *result.row_range.start()..=result.row_range.end().saturating_sub(1) + } + } + Ok(loaded_results) } @@ -312,7 +398,7 @@ mod tests { .update(|cx| { let project_index = project_index.read(cx); let query = "garbage in, garbage out"; - project_index.search(query.into(), 4, cx) + project_index.search(vec![query.into()], 4, cx) }) .await .unwrap(); @@ -426,4 +512,117 @@ mod tests { ], ); } + + #[gpui::test] + async fn test_load_search_results(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project_path = Path::new("/fake_project"); + + let file1_content = "one\ntwo\nthree\nfour\nfive\n"; + let file2_content = "aaa\nbbb\nccc\nddd\neee\n"; + + fs.insert_tree( + project_path, + json!({ + "file1.txt": file1_content, + "file2.txt": file2_content, + }), + ) + .await; + + let fs = fs as Arc; + let project = Project::test(fs.clone(), [project_path], cx).await; + let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + + // chunk that is already newline-aligned + let search_results = vec![SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: 0..file1_content.find("four").unwrap(), + score: 0.5, + query_index: 0, + }]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: "one\ntwo\nthree\n".into(), + row_range: 0..=2, + query_index: 0, + }] + ); + + // chunk that is *not* newline-aligned + let search_results = vec![SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: file1_content.find("two").unwrap() + 1..file1_content.find("four").unwrap() + 2, + score: 0.5, + query_index: 0, + }]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: "two\nthree\nfour\n".into(), + row_range: 1..=3, + query_index: 0, + }] + ); + + // chunks that are adjacent + + let search_results = vec![ + SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: file1_content.find("two").unwrap()..file1_content.len(), + score: 0.6, + query_index: 0, + }, + SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: 0..file1_content.find("two").unwrap(), + score: 0.5, + query_index: 1, + }, + SearchResult { + worktree: worktree.clone(), + path: Path::new("file2.txt").into(), + range: 0..file2_content.len(), + score: 0.8, + query_index: 1, + }, + ]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[ + LoadedSearchResult { + path: Path::new("file2.txt").into(), + full_path: "fake_project/file2.txt".into(), + excerpt_content: file2_content.into(), + row_range: 0..=4, + query_index: 1, + }, + LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: file1_content.into(), + row_range: 0..=4, + query_index: 0, + } + ] + ); + } } From 743feb98bcae8e00c8399be03fb27dc2b925bcdb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 20 Sep 2024 15:28:50 -0700 Subject: [PATCH 263/270] Add the ability to propose changes to a set of buffers (#18170) This PR introduces functionality for creating *branches* of buffers that can be used to preview and edit change sets that haven't yet been applied to the buffers themselves. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers Co-authored-by: Marshall --- Cargo.lock | 1 - crates/assistant/src/context.rs | 9 +- crates/channel/src/channel_buffer.rs | 5 +- crates/clock/src/clock.rs | 83 ++++++---- crates/editor/src/actions.rs | 1 + crates/editor/src/editor.rs | 78 ++++++++-- crates/editor/src/element.rs | 5 +- crates/editor/src/git.rs | 24 +-- crates/editor/src/hunk_diff.rs | 24 +-- crates/editor/src/proposed_changes_editor.rs | 125 +++++++++++++++ crates/editor/src/test.rs | 6 +- crates/git/src/diff.rs | 70 ++++----- crates/language/src/buffer.rs | 154 ++++++++++++++----- crates/language/src/buffer_tests.rs | 146 ++++++++++++++++-- crates/multi_buffer/Cargo.toml | 1 - crates/multi_buffer/src/multi_buffer.rs | 46 +++--- crates/project/src/project.rs | 7 +- crates/project/src/project_tests.rs | 2 +- crates/remote_server/src/headless_project.rs | 7 +- crates/text/src/text.rs | 14 ++ 20 files changed, 622 insertions(+), 186 deletions(-) create mode 100644 crates/editor/src/proposed_changes_editor.rs diff --git a/Cargo.lock b/Cargo.lock index dd07dfa1cf..c0f6751b89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7055,7 +7055,6 @@ dependencies = [ "ctor", "env_logger", "futures 0.3.30", - "git", "gpui", "itertools 0.13.0", "language", diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 1cac47831f..4f1f885b33 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -1006,9 +1006,12 @@ impl Context { cx: &mut ModelContext, ) { match event { - language::BufferEvent::Operation(operation) => cx.emit(ContextEvent::Operation( - ContextOperation::BufferOperation(operation.clone()), - )), + language::BufferEvent::Operation { + operation, + is_local: true, + } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation( + operation.clone(), + ))), language::BufferEvent::Edited => { self.count_remaining_tokens(cx); self.reparse(cx); diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 755e7400e1..0a4a259648 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -175,7 +175,10 @@ impl ChannelBuffer { cx: &mut ModelContext, ) { match event { - language::BufferEvent::Operation(operation) => { + language::BufferEvent::Operation { + operation, + is_local: true, + } => { if *ZED_ALWAYS_ACTIVE { if let language::Operation::UpdateSelections { selections, .. } = operation { if selections.is_empty() { diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index f7d36ed4a8..2b45e4a8fa 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -9,6 +9,8 @@ use std::{ pub use system_clock::*; +pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX; + /// A unique identifier for each distributed node. pub type ReplicaId = u16; @@ -25,7 +27,10 @@ pub struct Lamport { /// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock). #[derive(Clone, Default, Hash, Eq, PartialEq)] -pub struct Global(SmallVec<[u32; 8]>); +pub struct Global { + values: SmallVec<[u32; 8]>, + local_branch_value: u32, +} impl Global { pub fn new() -> Self { @@ -33,41 +38,51 @@ impl Global { } pub fn get(&self, replica_id: ReplicaId) -> Seq { - self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq + if replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value + } else { + self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq + } } pub fn observe(&mut self, timestamp: Lamport) { if timestamp.value > 0 { - let new_len = timestamp.replica_id as usize + 1; - if new_len > self.0.len() { - self.0.resize(new_len, 0); - } + if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value); + } else { + let new_len = timestamp.replica_id as usize + 1; + if new_len > self.values.len() { + self.values.resize(new_len, 0); + } - let entry = &mut self.0[timestamp.replica_id as usize]; - *entry = cmp::max(*entry, timestamp.value); + let entry = &mut self.values[timestamp.replica_id as usize]; + *entry = cmp::max(*entry, timestamp.value); + } } } pub fn join(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } - for (left, right) in self.0.iter_mut().zip(&other.0) { + for (left, right) in self.values.iter_mut().zip(&other.values) { *left = cmp::max(*left, *right); } + + self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value); } pub fn meet(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } let mut new_len = 0; for (ix, (left, right)) in self - .0 + .values .iter_mut() - .zip(other.0.iter().chain(iter::repeat(&0))) + .zip(other.values.iter().chain(iter::repeat(&0))) .enumerate() { if *left == 0 { @@ -80,7 +95,8 @@ impl Global { new_len = ix + 1; } } - self.0.resize(new_len, 0); + self.values.resize(new_len, 0); + self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value); } pub fn observed(&self, timestamp: Lamport) -> bool { @@ -88,34 +104,44 @@ impl Global { } pub fn observed_any(&self, other: &Self) -> bool { - self.0 + self.values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| *right > 0 && left >= right) + || (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value) } pub fn observed_all(&self, other: &Self) -> bool { - let mut rhs = other.0.iter(); - self.0.iter().all(|left| match rhs.next() { + let mut rhs = other.values.iter(); + self.values.iter().all(|left| match rhs.next() { Some(right) => left >= right, None => true, }) && rhs.next().is_none() + && self.local_branch_value >= other.local_branch_value } pub fn changed_since(&self, other: &Self) -> bool { - self.0.len() > other.0.len() + self.values.len() > other.values.len() || self - .0 + .values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| left > right) + || self.local_branch_value > other.local_branch_value } pub fn iter(&self) -> impl Iterator + '_ { - self.0.iter().enumerate().map(|(replica_id, seq)| Lamport { - replica_id: replica_id as ReplicaId, - value: *seq, - }) + self.values + .iter() + .enumerate() + .map(|(replica_id, seq)| Lamport { + replica_id: replica_id as ReplicaId, + value: *seq, + }) + .chain((self.local_branch_value > 0).then_some(Lamport { + replica_id: LOCAL_BRANCH_REPLICA_ID, + value: self.local_branch_value, + })) } } @@ -192,6 +218,9 @@ impl fmt::Debug for Global { } write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; } + if self.local_branch_value > 0 { + write!(f, ": {}", self.local_branch_value)?; + } write!(f, "}}") } } diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 93c83af195..2383c7f71a 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -273,6 +273,7 @@ gpui::actions!( NextScreen, OpenExcerpts, OpenExcerptsSplit, + OpenProposedChangesEditor, OpenFile, OpenPermalinkToLine, OpenUrl, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 1f4a9376d2..b1a3d95a0d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -35,6 +35,7 @@ mod lsp_ext; mod mouse_context_menu; pub mod movement; mod persistence; +mod proposed_changes_editor; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; @@ -46,7 +47,7 @@ mod signature_help; #[cfg(any(test, feature = "test-support"))] pub mod test; -use ::git::diff::{DiffHunk, DiffHunkStatus}; +use ::git::diff::DiffHunkStatus; use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; pub(crate) use actions::*; use aho_corasick::AhoCorasick; @@ -98,6 +99,7 @@ use language::{ }; use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; use linked_editing_ranges::refresh_linked_ranges; +use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor}; use similar::{ChangeTag, TextDiff}; use task::{ResolvedTask, TaskTemplate, TaskVariables}; @@ -113,7 +115,9 @@ pub use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, }; -use multi_buffer::{ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16}; +use multi_buffer::{ + ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16, +}; use ordered_float::OrderedFloat; use parking_lot::{Mutex, RwLock}; use project::project_settings::{GitGutterSetting, ProjectSettings}; @@ -6152,7 +6156,7 @@ impl Editor { pub fn prepare_revert_change( revert_changes: &mut HashMap, Rope)>>, multi_buffer: &Model, - hunk: &DiffHunk, + hunk: &MultiBufferDiffHunk, cx: &AppContext, ) -> Option<()> { let buffer = multi_buffer.read(cx).buffer(hunk.buffer_id)?; @@ -9338,7 +9342,7 @@ impl Editor { snapshot: &DisplaySnapshot, initial_point: Point, is_wrapped: bool, - hunks: impl Iterator>, + hunks: impl Iterator, cx: &mut ViewContext, ) -> bool { let display_point = initial_point.to_display_point(snapshot); @@ -11885,6 +11889,52 @@ impl Editor { self.searchable } + fn open_proposed_changes_editor( + &mut self, + _: &OpenProposedChangesEditor, + cx: &mut ViewContext, + ) { + let Some(workspace) = self.workspace() else { + cx.propagate(); + return; + }; + + let buffer = self.buffer.read(cx); + let mut new_selections_by_buffer = HashMap::default(); + for selection in self.selections.all::(cx) { + for (buffer, mut range, _) in + buffer.range_to_buffer_ranges(selection.start..selection.end, cx) + { + if selection.reversed { + mem::swap(&mut range.start, &mut range.end); + } + let mut range = range.to_point(buffer.read(cx)); + range.start.column = 0; + range.end.column = buffer.read(cx).line_len(range.end.row); + new_selections_by_buffer + .entry(buffer) + .or_insert(Vec::new()) + .push(range) + } + } + + let proposed_changes_buffers = new_selections_by_buffer + .into_iter() + .map(|(buffer, ranges)| ProposedChangesBuffer { buffer, ranges }) + .collect::>(); + let proposed_changes_editor = cx.new_view(|cx| { + ProposedChangesEditor::new(proposed_changes_buffers, self.project.clone(), cx) + }); + + cx.window_context().defer(move |cx| { + workspace.update(cx, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(proposed_changes_editor), true, true, None, cx); + }); + }); + }); + } + fn open_excerpts_in_split(&mut self, _: &OpenExcerptsSplit, cx: &mut ViewContext) { self.open_excerpts_common(true, cx) } @@ -12399,7 +12449,7 @@ impl Editor { fn hunks_for_selections( multi_buffer_snapshot: &MultiBufferSnapshot, selections: &[Selection], -) -> Vec> { +) -> Vec { let buffer_rows_for_selections = selections.iter().map(|selection| { let head = selection.head(); let tail = selection.tail(); @@ -12418,7 +12468,7 @@ fn hunks_for_selections( pub fn hunks_for_rows( rows: impl Iterator>, multi_buffer_snapshot: &MultiBufferSnapshot, -) -> Vec> { +) -> Vec { let mut hunks = Vec::new(); let mut processed_buffer_rows: HashMap>> = HashMap::default(); @@ -12430,14 +12480,14 @@ pub fn hunks_for_rows( // when the caret is just above or just below the deleted hunk. let allow_adjacent = hunk_status(&hunk) == DiffHunkStatus::Removed; let related_to_selection = if allow_adjacent { - hunk.associated_range.overlaps(&query_rows) - || hunk.associated_range.start == query_rows.end - || hunk.associated_range.end == query_rows.start + hunk.row_range.overlaps(&query_rows) + || hunk.row_range.start == query_rows.end + || hunk.row_range.end == query_rows.start } else { // `selected_multi_buffer_rows` are inclusive (e.g. [2..2] means 2nd row is selected) - // `hunk.associated_range` is exclusive (e.g. [2..3] means 2nd row is selected) - hunk.associated_range.overlaps(&selected_multi_buffer_rows) - || selected_multi_buffer_rows.end == hunk.associated_range.start + // `hunk.row_range` is exclusive (e.g. [2..3] means 2nd row is selected) + hunk.row_range.overlaps(&selected_multi_buffer_rows) + || selected_multi_buffer_rows.end == hunk.row_range.start }; if related_to_selection { if !processed_buffer_rows @@ -13738,10 +13788,10 @@ impl RowRangeExt for Range { } } -fn hunk_status(hunk: &DiffHunk) -> DiffHunkStatus { +fn hunk_status(hunk: &MultiBufferDiffHunk) -> DiffHunkStatus { if hunk.diff_base_byte_range.is_empty() { DiffHunkStatus::Added - } else if hunk.associated_range.is_empty() { + } else if hunk.row_range.is_empty() { DiffHunkStatus::Removed } else { DiffHunkStatus::Modified diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 47107b9754..d4075431ff 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -346,6 +346,7 @@ impl EditorElement { register_action(view, cx, Editor::toggle_code_actions); register_action(view, cx, Editor::open_excerpts); register_action(view, cx, Editor::open_excerpts_in_split); + register_action(view, cx, Editor::open_proposed_changes_editor); register_action(view, cx, Editor::toggle_soft_wrap); register_action(view, cx, Editor::toggle_tab_bar); register_action(view, cx, Editor::toggle_line_numbers); @@ -3710,11 +3711,11 @@ impl EditorElement { ) .map(|hunk| { let start_display_row = - MultiBufferPoint::new(hunk.associated_range.start.0, 0) + MultiBufferPoint::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) .row(); let mut end_display_row = - MultiBufferPoint::new(hunk.associated_range.end.0, 0) + MultiBufferPoint::new(hunk.row_range.end.0, 0) .to_display_point(&snapshot.display_snapshot) .row(); if end_display_row != start_display_row { diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 63b083faa8..79b78d5d14 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -2,9 +2,9 @@ pub mod blame; use std::ops::Range; -use git::diff::{DiffHunk, DiffHunkStatus}; +use git::diff::DiffHunkStatus; use language::Point; -use multi_buffer::{Anchor, MultiBufferRow}; +use multi_buffer::{Anchor, MultiBufferDiffHunk}; use crate::{ display_map::{DisplaySnapshot, ToDisplayPoint}, @@ -49,25 +49,25 @@ impl DisplayDiffHunk { } pub fn diff_hunk_to_display( - hunk: &DiffHunk, + hunk: &MultiBufferDiffHunk, snapshot: &DisplaySnapshot, ) -> DisplayDiffHunk { - let hunk_start_point = Point::new(hunk.associated_range.start.0, 0); - let hunk_start_point_sub = Point::new(hunk.associated_range.start.0.saturating_sub(1), 0); + let hunk_start_point = Point::new(hunk.row_range.start.0, 0); + let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); let hunk_end_point_sub = Point::new( - hunk.associated_range + hunk.row_range .end .0 .saturating_sub(1) - .max(hunk.associated_range.start.0), + .max(hunk.row_range.start.0), 0, ); let status = hunk_status(hunk); let is_removal = status == DiffHunkStatus::Removed; - let folds_start = Point::new(hunk.associated_range.start.0.saturating_sub(2), 0); - let folds_end = Point::new(hunk.associated_range.end.0 + 2, 0); + let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); + let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); let folds_range = folds_start..folds_end; let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { @@ -87,7 +87,7 @@ pub fn diff_hunk_to_display( } else { let start = hunk_start_point.to_display_point(snapshot).row(); - let hunk_end_row = hunk.associated_range.end.max(hunk.associated_range.start); + let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); let hunk_end_point = Point::new(hunk_end_row.0, 0); let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point); @@ -288,7 +288,7 @@ mod tests { assert_eq!( snapshot .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), &expected, ); @@ -296,7 +296,7 @@ mod tests { assert_eq!( snapshot .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), expected .iter() diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 361ea6246e..917d07ec4e 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -4,11 +4,12 @@ use std::{ }; use collections::{hash_map, HashMap, HashSet}; -use git::diff::{DiffHunk, DiffHunkStatus}; +use git::diff::DiffHunkStatus; use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; use language::Buffer; use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, ToPoint, + Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, + MultiBufferSnapshot, ToPoint, }; use settings::SettingsStore; use text::{BufferId, Point}; @@ -190,9 +191,9 @@ impl Editor { .buffer_snapshot .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) .filter(|hunk| { - let hunk_display_row_range = Point::new(hunk.associated_range.start.0, 0) + let hunk_display_row_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) - ..Point::new(hunk.associated_range.end.0, 0) + ..Point::new(hunk.row_range.end.0, 0) .to_display_point(&snapshot.display_snapshot); let row_range_end = display_rows_with_expanded_hunks.get(&hunk_display_row_range.start.row()); @@ -203,7 +204,7 @@ impl Editor { fn toggle_hunks_expanded( &mut self, - hunks_to_toggle: Vec>, + hunks_to_toggle: Vec, cx: &mut ViewContext, ) { let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); @@ -274,8 +275,8 @@ impl Editor { }); for remaining_hunk in hunks_to_toggle { let remaining_hunk_point_range = - Point::new(remaining_hunk.associated_range.start.0, 0) - ..Point::new(remaining_hunk.associated_range.end.0, 0); + Point::new(remaining_hunk.row_range.start.0, 0) + ..Point::new(remaining_hunk.row_range.end.0, 0); hunks_to_expand.push(HoveredHunk { status: hunk_status(&remaining_hunk), multi_buffer_range: remaining_hunk_point_range @@ -705,7 +706,7 @@ impl Editor { fn to_diff_hunk( hovered_hunk: &HoveredHunk, multi_buffer_snapshot: &MultiBufferSnapshot, -) -> Option> { +) -> Option { let buffer_id = hovered_hunk .multi_buffer_range .start @@ -716,9 +717,8 @@ fn to_diff_hunk( let point_range = hovered_hunk .multi_buffer_range .to_point(multi_buffer_snapshot); - Some(DiffHunk { - associated_range: MultiBufferRow(point_range.start.row) - ..MultiBufferRow(point_range.end.row), + Some(MultiBufferDiffHunk { + row_range: MultiBufferRow(point_range.start.row)..MultiBufferRow(point_range.end.row), buffer_id, buffer_range, diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(), @@ -868,7 +868,7 @@ fn editor_with_deleted_text( fn buffer_diff_hunk( buffer_snapshot: &MultiBufferSnapshot, row_range: Range, -) -> Option> { +) -> Option { let mut hunks = buffer_snapshot.git_diff_hunks_in_range( MultiBufferRow(row_range.start.row)..MultiBufferRow(row_range.end.row), ); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs new file mode 100644 index 0000000000..3979e558a4 --- /dev/null +++ b/crates/editor/src/proposed_changes_editor.rs @@ -0,0 +1,125 @@ +use crate::{Editor, EditorEvent}; +use collections::HashSet; +use futures::{channel::mpsc, future::join_all}; +use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View}; +use language::{Buffer, BufferEvent, Capability}; +use multi_buffer::{ExcerptRange, MultiBuffer}; +use project::Project; +use smol::stream::StreamExt; +use std::{ops::Range, time::Duration}; +use text::ToOffset; +use ui::prelude::*; +use workspace::Item; + +pub struct ProposedChangesEditor { + editor: View, + _subscriptions: Vec, + _recalculate_diffs_task: Task>, + recalculate_diffs_tx: mpsc::UnboundedSender>, +} + +pub struct ProposedChangesBuffer { + pub buffer: Model, + pub ranges: Vec>, +} + +impl ProposedChangesEditor { + pub fn new( + buffers: Vec>, + project: Option>, + cx: &mut ViewContext, + ) -> Self { + let mut subscriptions = Vec::new(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + for buffer in buffers { + let branch_buffer = buffer.buffer.update(cx, |buffer, cx| buffer.branch(cx)); + subscriptions.push(cx.subscribe(&branch_buffer, Self::on_buffer_event)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + branch_buffer, + buffer.ranges.into_iter().map(|range| ExcerptRange { + context: range, + primary: None, + }), + cx, + ); + }); + } + + let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded(); + + Self { + editor: cx + .new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)), + recalculate_diffs_tx, + _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { + let mut buffers_to_diff = HashSet::default(); + while let Some(buffer) = recalculate_diffs_rx.next().await { + buffers_to_diff.insert(buffer); + + loop { + cx.background_executor() + .timer(Duration::from_millis(250)) + .await; + let mut had_further_changes = false; + while let Ok(next_buffer) = recalculate_diffs_rx.try_next() { + buffers_to_diff.insert(next_buffer?); + had_further_changes = true; + } + if !had_further_changes { + break; + } + } + + join_all(buffers_to_diff.drain().filter_map(|buffer| { + buffer + .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) + .ok()? + })) + .await; + } + None + }), + _subscriptions: subscriptions, + } + } + + fn on_buffer_event( + &mut self, + buffer: Model, + event: &BufferEvent, + _cx: &mut ViewContext, + ) { + if let BufferEvent::Edited = event { + self.recalculate_diffs_tx.unbounded_send(buffer).ok(); + } + } +} + +impl Render for ProposedChangesEditor { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + self.editor.clone() + } +} + +impl FocusableView for ProposedChangesEditor { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.editor.focus_handle(cx) + } +} + +impl EventEmitter for ProposedChangesEditor {} + +impl Item for ProposedChangesEditor { + type Event = EditorEvent; + + fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + Some(Icon::new(IconName::Pencil)) + } + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some("Proposed changes".into()) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index fcbd3bd423..50214cd723 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -108,16 +108,16 @@ pub fn editor_hunks( .buffer_snapshot .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) .map(|hunk| { - let display_range = Point::new(hunk.associated_range.start.0, 0) + let display_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(snapshot) .row() - ..Point::new(hunk.associated_range.end.0, 0) + ..Point::new(hunk.row_range.end.0, 0) .to_display_point(snapshot) .row(); let (_, buffer, _) = editor .buffer() .read(cx) - .excerpt_containing(Point::new(hunk.associated_range.start.0, 0), cx) + .excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx) .expect("no excerpt for expanded buffer's hunk start"); let diff_base = buffer .read(cx) diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 8cc7ee1863..1f7930ce14 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -1,7 +1,7 @@ use rope::Rope; use std::{iter, ops::Range}; use sum_tree::SumTree; -use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; pub use git2 as libgit; use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; @@ -13,29 +13,30 @@ pub enum DiffHunkStatus { Removed, } -/// A diff hunk, representing a range of consequent lines in a singleton buffer, associated with a generic range. +/// A diff hunk resolved to rows in the buffer. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct DiffHunk { - /// E.g. a range in multibuffer, that has an excerpt added, singleton buffer for which has this diff hunk. - /// Consider a singleton buffer with 10 lines, all of them are modified — so a corresponding diff hunk would have a range 0..10. - /// And a multibuffer with the excerpt of lines 2-6 from the singleton buffer. - /// If the multibuffer is searched for diff hunks, the associated range would be multibuffer rows, corresponding to rows 2..6 from the singleton buffer. - /// But the hunk range would be 0..10, same for any other excerpts from the same singleton buffer. - pub associated_range: Range, - /// Singleton buffer ID this hunk belongs to. - pub buffer_id: BufferId, - /// A consequent range of lines in the singleton buffer, that were changed and produced this diff hunk. +pub struct DiffHunk { + /// The buffer range, expressed in terms of rows. + pub row_range: Range, + /// The range in the buffer to which this hunk corresponds. pub buffer_range: Range, - /// Original singleton buffer text before the change, that was instead of the `buffer_range`. + /// The range in the buffer's diff base text to which this hunk corresponds. pub diff_base_byte_range: Range, } -impl sum_tree::Item for DiffHunk { +/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range. +#[derive(Debug, Clone)] +struct InternalDiffHunk { + buffer_range: Range, + diff_base_byte_range: Range, +} + +impl sum_tree::Item for InternalDiffHunk { type Summary = DiffHunkSummary; fn summary(&self) -> Self::Summary { DiffHunkSummary { - buffer_range: self.associated_range.clone(), + buffer_range: self.buffer_range.clone(), } } } @@ -64,7 +65,7 @@ impl sum_tree::Summary for DiffHunkSummary { #[derive(Debug, Clone)] pub struct BufferDiff { last_buffer_version: Option, - tree: SumTree>, + tree: SumTree, } impl BufferDiff { @@ -79,11 +80,12 @@ impl BufferDiff { self.tree.is_empty() } + #[cfg(any(test, feature = "test-support"))] pub fn hunks_in_row_range<'a>( &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let start = buffer.anchor_before(Point::new(range.start, 0)); let end = buffer.anchor_after(Point::new(range.end, 0)); @@ -94,7 +96,7 @@ impl BufferDiff { &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { @@ -109,11 +111,8 @@ impl BufferDiff { }) .flat_map(move |hunk| { [ - ( - &hunk.associated_range.start, - hunk.diff_base_byte_range.start, - ), - (&hunk.associated_range.end, hunk.diff_base_byte_range.end), + (&hunk.buffer_range.start, hunk.diff_base_byte_range.start), + (&hunk.buffer_range.end, hunk.diff_base_byte_range.end), ] .into_iter() }); @@ -129,10 +128,9 @@ impl BufferDiff { } Some(DiffHunk { - associated_range: start_point.row..end_point.row, + row_range: start_point.row..end_point.row, diff_base_byte_range: start_base..end_base, buffer_range: buffer.anchor_before(start_point)..buffer.anchor_after(end_point), - buffer_id: buffer.remote_id(), }) }) } @@ -141,7 +139,7 @@ impl BufferDiff { &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { @@ -154,7 +152,7 @@ impl BufferDiff { cursor.prev(buffer); let hunk = cursor.item()?; - let range = hunk.associated_range.to_point(buffer); + let range = hunk.buffer_range.to_point(buffer); let end_row = if range.end.column > 0 { range.end.row + 1 } else { @@ -162,10 +160,9 @@ impl BufferDiff { }; Some(DiffHunk { - associated_range: range.start.row..end_row, + row_range: range.start.row..end_row, diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, }) }) } @@ -196,7 +193,7 @@ impl BufferDiff { } #[cfg(test)] - fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { + fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator { let start = text.anchor_before(Point::new(0, 0)); let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); self.hunks_intersecting_range(start..end, text) @@ -229,7 +226,7 @@ impl BufferDiff { hunk_index: usize, buffer: &text::BufferSnapshot, buffer_row_divergence: &mut i64, - ) -> DiffHunk { + ) -> InternalDiffHunk { let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); assert!(line_item_count > 0); @@ -284,11 +281,9 @@ impl BufferDiff { let start = Point::new(buffer_row_range.start, 0); let end = Point::new(buffer_row_range.end, 0); let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); - DiffHunk { - associated_range: buffer_range.clone(), + InternalDiffHunk { buffer_range, diff_base_byte_range, - buffer_id: buffer.remote_id(), } } } @@ -302,17 +297,16 @@ pub fn assert_hunks( diff_base: &str, expected_hunks: &[(Range, &str, &str)], ) where - Iter: Iterator>, + Iter: Iterator, { let actual_hunks = diff_hunks .map(|hunk| { ( - hunk.associated_range.clone(), + hunk.row_range.clone(), &diff_base[hunk.diff_base_byte_range], buffer .text_for_range( - Point::new(hunk.associated_range.start, 0) - ..Point::new(hunk.associated_range.end, 0), + Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0), ) .collect::(), ) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index acb57273e3..5735ee9616 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -21,8 +21,8 @@ use async_watch as watch; pub use clock::ReplicaId; use futures::channel::oneshot; use gpui::{ - AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Pixels, Task, TaskLabel, - WindowContext, + AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext, + Pixels, Task, TaskLabel, WindowContext, }; use lsp::LanguageServerId; use parking_lot::Mutex; @@ -84,11 +84,17 @@ pub enum Capability { pub type BufferRow = u32; +#[derive(Clone)] +enum BufferDiffBase { + Git(Rope), + PastBufferVersion(Model, BufferSnapshot), +} + /// An in-memory representation of a source code file, including its text, /// syntax trees, git status, and diagnostics. pub struct Buffer { text: TextBuffer, - diff_base: Option, + diff_base: Option, git_diff: git::diff::BufferDiff, file: Option>, /// The mtime of the file when this buffer was last loaded from @@ -121,6 +127,7 @@ pub struct Buffer { /// Memoize calls to has_changes_since(saved_version). /// The contents of a cell are (self.version, has_changes) at the time of a last call. has_unsaved_edits: Cell<(clock::Global, bool)>, + _subscriptions: Vec, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -308,7 +315,10 @@ pub enum Operation { pub enum BufferEvent { /// The buffer was changed in a way that must be /// propagated to its other replicas. - Operation(Operation), + Operation { + operation: Operation, + is_local: bool, + }, /// The buffer was edited. Edited, /// The buffer's `dirty` bit changed. @@ -644,7 +654,7 @@ impl Buffer { id: self.remote_id().into(), file: self.file.as_ref().map(|f| f.to_proto(cx)), base_text: self.base_text().to_string(), - diff_base: self.diff_base.as_ref().map(|h| h.to_string()), + diff_base: self.diff_base().as_ref().map(|h| h.to_string()), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, saved_version: proto::serialize_version(&self.saved_version), saved_mtime: self.saved_mtime.map(|time| time.into()), @@ -734,12 +744,10 @@ impl Buffer { was_dirty_before_starting_transaction: None, has_unsaved_edits: Cell::new((buffer.version(), false)), text: buffer, - diff_base: diff_base - .map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - raw_diff_base - }) - .map(Rope::from), + diff_base: diff_base.map(|mut raw_diff_base| { + LineEnding::normalize(&mut raw_diff_base); + BufferDiffBase::Git(Rope::from(raw_diff_base)) + }), diff_base_version: 0, git_diff, file, @@ -759,6 +767,7 @@ impl Buffer { completion_triggers_timestamp: Default::default(), deferred_ops: OperationQueue::new(), has_conflict: false, + _subscriptions: Vec::new(), } } @@ -782,6 +791,52 @@ impl Buffer { } } + pub fn branch(&mut self, cx: &mut ModelContext) -> Model { + let this = cx.handle(); + cx.new_model(|cx| { + let mut branch = Self { + diff_base: Some(BufferDiffBase::PastBufferVersion( + this.clone(), + self.snapshot(), + )), + language: self.language.clone(), + has_conflict: self.has_conflict, + has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()), + _subscriptions: vec![cx.subscribe(&this, |branch: &mut Self, _, event, cx| { + if let BufferEvent::Operation { operation, .. } = event { + branch.apply_ops([operation.clone()], cx); + branch.diff_base_version += 1; + } + })], + ..Self::build( + self.text.branch(), + None, + self.file.clone(), + self.capability(), + ) + }; + if let Some(language_registry) = self.language_registry() { + branch.set_language_registry(language_registry); + } + + branch + }) + } + + pub fn merge(&mut self, branch: &Model, cx: &mut ModelContext) { + let branch = branch.read(cx); + let edits = branch + .edits_since::(&self.version) + .map(|edit| { + ( + edit.old, + branch.text_for_range(edit.new).collect::(), + ) + }) + .collect::>(); + self.edit(edits, None, cx); + } + #[cfg(test)] pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot { &self.text @@ -961,20 +1016,23 @@ impl Buffer { /// Returns the current diff base, see [Buffer::set_diff_base]. pub fn diff_base(&self) -> Option<&Rope> { - self.diff_base.as_ref() + match self.diff_base.as_ref()? { + BufferDiffBase::Git(rope) => Some(rope), + BufferDiffBase::PastBufferVersion(_, buffer_snapshot) => { + Some(buffer_snapshot.as_rope()) + } + } } /// Sets the text that will be used to compute a Git diff /// against the buffer text. pub fn set_diff_base(&mut self, diff_base: Option, cx: &mut ModelContext) { - self.diff_base = diff_base - .map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - raw_diff_base - }) - .map(Rope::from); + self.diff_base = diff_base.map(|mut raw_diff_base| { + LineEnding::normalize(&mut raw_diff_base); + BufferDiffBase::Git(Rope::from(raw_diff_base)) + }); self.diff_base_version += 1; - if let Some(recalc_task) = self.git_diff_recalc(cx) { + if let Some(recalc_task) = self.recalculate_diff(cx) { cx.spawn(|buffer, mut cx| async move { recalc_task.await; buffer @@ -992,14 +1050,21 @@ impl Buffer { self.diff_base_version } - /// Recomputes the Git diff status. - pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) -> Option> { - let diff_base = self.diff_base.clone()?; + /// Recomputes the diff. + pub fn recalculate_diff(&mut self, cx: &mut ModelContext) -> Option> { + let diff_base_rope = match self.diff_base.as_mut()? { + BufferDiffBase::Git(rope) => rope.clone(), + BufferDiffBase::PastBufferVersion(base_buffer, base_buffer_snapshot) => { + let new_base_snapshot = base_buffer.read(cx).snapshot(); + *base_buffer_snapshot = new_base_snapshot; + base_buffer_snapshot.as_rope().clone() + } + }; let snapshot = self.snapshot(); let mut diff = self.git_diff.clone(); let diff = cx.background_executor().spawn(async move { - diff.update(&diff_base, &snapshot).await; + diff.update(&diff_base_rope, &snapshot).await; diff }); @@ -1169,7 +1234,7 @@ impl Buffer { lamport_timestamp, }; self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx); - self.send_operation(op, cx); + self.send_operation(op, true, cx); } fn request_autoindent(&mut self, cx: &mut ModelContext) { @@ -1743,6 +1808,7 @@ impl Buffer { lamport_timestamp, cursor_shape, }, + true, cx, ); self.non_text_state_update_count += 1; @@ -1889,7 +1955,7 @@ impl Buffer { } self.end_transaction(cx); - self.send_operation(Operation::Buffer(edit_operation), cx); + self.send_operation(Operation::Buffer(edit_operation), true, cx); Some(edit_id) } @@ -1991,6 +2057,9 @@ impl Buffer { } }) .collect::>(); + for operation in buffer_ops.iter() { + self.send_operation(Operation::Buffer(operation.clone()), false, cx); + } self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); @@ -2114,8 +2183,16 @@ impl Buffer { } } - fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { - cx.emit(BufferEvent::Operation(operation)); + fn send_operation( + &mut self, + operation: Operation, + is_local: bool, + cx: &mut ModelContext, + ) { + cx.emit(BufferEvent::Operation { + operation, + is_local, + }); } /// Removes the selections for a given peer. @@ -2130,7 +2207,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some((transaction_id, operation)) = self.text.undo() { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -2147,7 +2224,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); if let Some(operation) = self.text.undo_transaction(transaction_id) { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); true } else { @@ -2167,7 +2244,7 @@ impl Buffer { let operations = self.text.undo_to_transaction(transaction_id); let undone = !operations.is_empty(); for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); } if undone { self.did_edit(&old_version, was_dirty, cx) @@ -2181,7 +2258,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some((transaction_id, operation)) = self.text.redo() { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -2201,7 +2278,7 @@ impl Buffer { let operations = self.text.redo_to_transaction(transaction_id); let redone = !operations.is_empty(); for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); } if redone { self.did_edit(&old_version, was_dirty, cx) @@ -2218,6 +2295,7 @@ impl Buffer { triggers, lamport_timestamp: self.completion_triggers_timestamp, }, + true, cx, ); cx.notify(); @@ -2297,7 +2375,7 @@ impl Buffer { let ops = self.text.randomly_undo_redo(rng); if !ops.is_empty() { for op in ops { - self.send_operation(Operation::Buffer(op), cx); + self.send_operation(Operation::Buffer(op), true, cx); self.did_edit(&old_version, was_dirty, cx); } } @@ -3638,12 +3716,12 @@ impl BufferSnapshot { !self.git_diff.is_empty() } - /// Returns all the Git diff hunks intersecting the given - /// row range. + /// Returns all the Git diff hunks intersecting the given row range. + #[cfg(any(test, feature = "test-support"))] pub fn git_diff_hunks_in_row_range( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_in_row_range(range, self) } @@ -3652,7 +3730,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_intersecting_range( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_intersecting_range(range, self) } @@ -3661,7 +3739,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_intersecting_range_rev( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_intersecting_range_rev(range, self) } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 23faa33316..1335a94dd0 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -6,6 +6,7 @@ use crate::Buffer; use clock::ReplicaId; use collections::BTreeMap; use futures::FutureExt as _; +use git::diff::assert_hunks; use gpui::{AppContext, BorrowAppContext, Model}; use gpui::{Context, TestAppContext}; use indoc::indoc; @@ -275,13 +276,19 @@ fn test_edit_events(cx: &mut gpui::AppContext) { |buffer, cx| { let buffer_1_events = buffer_1_events.clone(); cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() { - BufferEvent::Operation(op) => buffer1_ops.lock().push(op), + BufferEvent::Operation { + operation, + is_local: true, + } => buffer1_ops.lock().push(operation), event => buffer_1_events.lock().push(event), }) .detach(); let buffer_2_events = buffer_2_events.clone(); - cx.subscribe(&buffer2, move |_, _, event, _| { - buffer_2_events.lock().push(event.clone()) + cx.subscribe(&buffer2, move |_, _, event, _| match event.clone() { + BufferEvent::Operation { + is_local: false, .. + } => {} + event => buffer_2_events.lock().push(event), }) .detach(); @@ -2370,6 +2377,118 @@ async fn test_find_matching_indent(cx: &mut TestAppContext) { ); } +#[gpui::test] +fn test_branch_and_merge(cx: &mut TestAppContext) { + cx.update(|cx| init_settings(cx, |_| {})); + + let base_buffer = cx.new_model(|cx| Buffer::local("one\ntwo\nthree\n", cx)); + + // Create a remote replica of the base buffer. + let base_buffer_replica = cx.new_model(|cx| { + Buffer::from_proto( + 1, + Capability::ReadWrite, + base_buffer.read(cx).to_proto(cx), + None, + ) + .unwrap() + }); + base_buffer.update(cx, |_buffer, cx| { + cx.subscribe(&base_buffer_replica, |this, _, event, cx| { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + this.apply_ops([operation.clone()], cx); + } + }) + .detach(); + }); + + // Create a branch, which initially has the same state as the base buffer. + let branch_buffer = base_buffer.update(cx, |buffer, cx| buffer.branch(cx)); + branch_buffer.read_with(cx, |buffer, _| { + assert_eq!(buffer.text(), "one\ntwo\nthree\n"); + }); + + // Edits to the branch are not applied to the base. + branch_buffer.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(1, 0)..Point::new(1, 0), "ONE_POINT_FIVE\n")], + None, + cx, + ) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!(base_buffer.read(cx).text(), "one\ntwo\nthree\n"); + assert_eq!(branch_buffer.text(), "one\nONE_POINT_FIVE\ntwo\nthree\n"); + }); + + // Edits to the base are applied to the branch. + base_buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\nthree\n"); + assert_eq!( + branch_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nthree\n" + ); + }); + + assert_diff_hunks(&branch_buffer, cx, &[(2..3, "", "ONE_POINT_FIVE\n")]); + + // Edits to any replica of the base are applied to the branch. + base_buffer_replica.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(2, 0)..Point::new(2, 0), "TWO_POINT_FIVE\n")], + None, + cx, + ) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!( + base_buffer.read(cx).text(), + "ZERO\none\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + assert_eq!( + branch_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + }); + + // Merging the branch applies all of its changes to the base. + base_buffer.update(cx, |base_buffer, cx| { + base_buffer.merge(&branch_buffer, cx); + assert_eq!( + base_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + }); +} + +fn assert_diff_hunks( + buffer: &Model, + cx: &mut TestAppContext, + expected_hunks: &[(Range, &str, &str)], +) { + buffer + .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) + .detach(); + cx.executor().run_until_parked(); + + buffer.read_with(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + assert_hunks( + snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), + &snapshot, + &buffer.diff_base().unwrap().to_string(), + expected_hunks, + ); + }); +} + #[gpui::test(iterations = 100)] fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { let min_peers = env::var("MIN_PEERS") @@ -2407,10 +2526,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { - if let BufferEvent::Operation(op) = event { - network - .lock() - .broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]); + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + network.lock().broadcast( + buffer.replica_id(), + vec![proto::serialize_operation(operation)], + ); } }) .detach(); @@ -2533,10 +2657,14 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { - if let BufferEvent::Operation(op) = event { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { network.lock().broadcast( buffer.replica_id(), - vec![proto::serialize_operation(op)], + vec![proto::serialize_operation(operation)], ); } }) diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index acd0c89f8e..444fe3c75c 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -27,7 +27,6 @@ collections.workspace = true ctor.workspace = true env_logger.workspace = true futures.workspace = true -git.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index f6a61f562a..d406f9bfaf 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5,7 +5,6 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; use futures::{channel::mpsc, SinkExt}; -use git::diff::DiffHunk; use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext}; use itertools::Itertools; use language::{ @@ -110,6 +109,19 @@ pub enum Event { DiagnosticsUpdated, } +/// A diff hunk, representing a range of consequent lines in a multibuffer. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MultiBufferDiffHunk { + /// The row range in the multibuffer where this diff hunk appears. + pub row_range: Range, + /// The buffer ID that this hunk belongs to. + pub buffer_id: BufferId, + /// The range of the underlying buffer that this hunk corresponds to. + pub buffer_range: Range, + /// The range within the buffer's diff base that this hunk corresponds to. + pub diff_base_byte_range: Range, +} + pub type MultiBufferPoint = Point; #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, serde::Deserialize)] @@ -1711,7 +1723,7 @@ impl MultiBuffer { } // - language::BufferEvent::Operation(_) => return, + language::BufferEvent::Operation { .. } => return, }); } @@ -3561,7 +3573,7 @@ impl MultiBufferSnapshot { pub fn git_diff_hunks_in_range_rev( &self, row_range: Range, - ) -> impl Iterator> + '_ { + ) -> impl Iterator + '_ { let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &()); @@ -3599,22 +3611,19 @@ impl MultiBufferSnapshot { .git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end) .map(move |hunk| { let start = multibuffer_start.row - + hunk - .associated_range - .start - .saturating_sub(excerpt_start_point.row); + + hunk.row_range.start.saturating_sub(excerpt_start_point.row); let end = multibuffer_start.row + hunk - .associated_range + .row_range .end .min(excerpt_end_point.row + 1) .saturating_sub(excerpt_start_point.row); - DiffHunk { - associated_range: MultiBufferRow(start)..MultiBufferRow(end), + MultiBufferDiffHunk { + row_range: MultiBufferRow(start)..MultiBufferRow(end), diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, + buffer_id: excerpt.buffer_id, } }); @@ -3628,7 +3637,7 @@ impl MultiBufferSnapshot { pub fn git_diff_hunks_in_range( &self, row_range: Range, - ) -> impl Iterator> + '_ { + ) -> impl Iterator + '_ { let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &()); @@ -3673,23 +3682,20 @@ impl MultiBufferSnapshot { MultiBufferRow(0)..MultiBufferRow(1) } else { let start = multibuffer_start.row - + hunk - .associated_range - .start - .saturating_sub(excerpt_rows.start); + + hunk.row_range.start.saturating_sub(excerpt_rows.start); let end = multibuffer_start.row + hunk - .associated_range + .row_range .end .min(excerpt_rows.end + 1) .saturating_sub(excerpt_rows.start); MultiBufferRow(start)..MultiBufferRow(end) }; - DiffHunk { - associated_range: buffer_range, + MultiBufferDiffHunk { + row_range: buffer_range, diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, + buffer_id: excerpt.buffer_id, } }); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 435c143024..bd9c17ecb2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2182,7 +2182,10 @@ impl Project { let buffer_id = buffer.read(cx).remote_id(); match event { - BufferEvent::Operation(operation) => { + BufferEvent::Operation { + operation, + is_local: true, + } => { let operation = language::proto::serialize_operation(operation); if let Some(ssh) = &self.ssh_session { @@ -2267,7 +2270,7 @@ impl Project { .filter_map(|buffer| { let buffer = buffer.upgrade()?; buffer - .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx)) + .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) .ok() .flatten() }) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 72a38ccba7..d0d67f0cda 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -3288,7 +3288,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { cx.subscribe(&buffer1, { let events = events.clone(); move |_, _, event, _| match event { - BufferEvent::Operation(_) => {} + BufferEvent::Operation { .. } => {} _ => events.lock().push(event.clone()), } }) diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 54f48e3626..9d5c26d6c7 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -146,12 +146,15 @@ impl HeadlessProject { cx: &mut ModelContext, ) { match event { - BufferEvent::Operation(op) => cx + BufferEvent::Operation { + operation, + is_local: true, + } => cx .background_executor() .spawn(self.session.request(proto::UpdateBuffer { project_id: SSH_PROJECT_ID, buffer_id: buffer.read(cx).remote_id().to_proto(), - operations: vec![serialize_operation(op)], + operations: vec![serialize_operation(operation)], })) .detach(), _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 8d2cd97aac..8bdc9fdb03 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -13,6 +13,7 @@ mod undo_map; pub use anchor::*; use anyhow::{anyhow, Context as _, Result}; pub use clock::ReplicaId; +use clock::LOCAL_BRANCH_REPLICA_ID; use collections::{HashMap, HashSet}; use locator::Locator; use operation_queue::OperationQueue; @@ -715,6 +716,19 @@ impl Buffer { self.snapshot.clone() } + pub fn branch(&self) -> Self { + Self { + snapshot: self.snapshot.clone(), + history: History::new(self.base_text().clone()), + deferred_ops: OperationQueue::new(), + deferred_replicas: HashSet::default(), + lamport_clock: clock::Lamport::new(LOCAL_BRANCH_REPLICA_ID), + subscriptions: Default::default(), + edit_id_resolvers: Default::default(), + wait_for_version_txs: Default::default(), + } + } + pub fn replica_id(&self) -> ReplicaId { self.lamport_clock.replica_id } From 4f227fd3bf19fe7393d278545edfa06343dc5958 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 20 Sep 2024 18:51:34 -0600 Subject: [PATCH 264/270] Use LanguageServerName in more places (#18167) This pushes the new LanguageServerName type to more places. As both languages and language servers were identified by Arc, it was sometimes hard to tell which was intended. Release Notes: - N/A --- .../src/activity_indicator.rs | 7 ++- .../remote_editing_collaboration_tests.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 4 +- .../src/wasm_host/wit/since_v0_2_0.rs | 4 +- crates/gpui/src/shared_string.rs | 7 +++ crates/language/src/language.rs | 57 ++++++++++++++++--- crates/language/src/language_settings.rs | 32 ++++++----- crates/language_tools/src/lsp_log.rs | 2 +- crates/languages/src/c.rs | 7 ++- crates/languages/src/go.rs | 7 ++- crates/languages/src/python.rs | 21 ++++--- crates/languages/src/rust.rs | 8 +-- crates/languages/src/tailwind.rs | 9 +-- crates/languages/src/typescript.rs | 13 +++-- crates/languages/src/vtsls.rs | 8 +-- crates/languages/src/yaml.rs | 8 +-- crates/project/src/lsp_store.rs | 24 ++++---- crates/project/src/prettier_store.rs | 4 +- crates/project/src/project_settings.rs | 3 +- .../remote_server/src/remote_editing_tests.rs | 8 +-- 20 files changed, 150 insertions(+), 85 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 3f567c9e80..a9ae7d075d 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -19,7 +19,10 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace}; actions!(activity_indicator, [ShowErrorMessage]); pub enum Event { - ShowError { lsp_name: Arc, error: String }, + ShowError { + lsp_name: LanguageServerName, + error: String, + }, } pub struct ActivityIndicator { @@ -123,7 +126,7 @@ impl ActivityIndicator { self.statuses.retain(|status| { if let LanguageServerBinaryStatus::Failed { error } = &status.status { cx.emit(Event::ShowError { - lsp_name: status.name.0.clone(), + lsp_name: status.name.clone(), error: error.clone(), }); false diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index c4410fd776..cdcf69cf7e 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -102,7 +102,7 @@ async fn test_sharing_an_ssh_remote_project( all_language_settings(file, cx) .language(Some(&("Rust".into()))) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 50547b6371..3835f58f88 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; -use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; +use language::{LanguageName, LanguageServerName}; use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ @@ -366,7 +366,7 @@ impl ExtensionImports for WasmState { .and_then(|key| { ProjectSettings::get(location, cx) .lsp - .get(&Arc::::from(key)) + .get(&LanguageServerName(key.into())) }) .cloned() .unwrap_or_default(); diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs index 7fa79c2544..eb6e1a09a2 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; -use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; +use language::{LanguageName, LanguageServerName}; use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ @@ -412,7 +412,7 @@ impl ExtensionImports for WasmState { .and_then(|key| { ProjectSettings::get(location, cx) .lsp - .get(&Arc::::from(key)) + .get(&LanguageServerName::from_proto(key)) }) .cloned() .unwrap_or_default(); diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index a4ed36ec21..f5aef6adf8 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -9,6 +9,13 @@ use util::arc_cow::ArcCow; #[derive(Deref, DerefMut, Eq, PartialEq, PartialOrd, Ord, Hash, Clone)] pub struct SharedString(ArcCow<'static, str>); +impl SharedString { + /// creates a static SharedString + pub const fn new_static(s: &'static str) -> Self { + Self(ArcCow::Borrowed(s)) + } +} + impl Default for SharedString { fn default() -> Self { Self(ArcCow::Owned(Arc::default())) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 309a67a1a9..29a7ac1860 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -139,11 +139,52 @@ pub trait ToLspPosition { /// A name of a language server. #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] -pub struct LanguageServerName(pub Arc); +pub struct LanguageServerName(pub SharedString); +impl std::fmt::Display for LanguageServerName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(&self.0, f) + } +} + +impl AsRef for LanguageServerName { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl AsRef for LanguageServerName { + fn as_ref(&self) -> &OsStr { + self.0.as_ref().as_ref() + } +} + +impl JsonSchema for LanguageServerName { + fn schema_name() -> String { + "LanguageServerName".into() + } + + fn json_schema(_: &mut SchemaGenerator) -> Schema { + SchemaObject { + instance_type: Some(InstanceType::String.into()), + ..Default::default() + } + .into() + } +} impl LanguageServerName { + pub const fn new_static(s: &'static str) -> Self { + Self(SharedString::new_static(s)) + } + pub fn from_proto(s: String) -> Self { - Self(Arc::from(s)) + Self(s.into()) + } +} + +impl<'a> From<&'a str> for LanguageServerName { + fn from(str: &'a str) -> LanguageServerName { + LanguageServerName(str.to_string().into()) } } @@ -202,8 +243,8 @@ impl CachedLspAdapter { }) } - pub fn name(&self) -> Arc { - self.adapter.name().0.clone() + pub fn name(&self) -> LanguageServerName { + self.adapter.name().clone() } pub async fn get_language_server_command( @@ -594,7 +635,7 @@ pub struct LanguageConfig { pub block_comment: Option<(Arc, Arc)>, /// A list of language servers that are allowed to run on subranges of a given language. #[serde(default)] - pub scope_opt_in_language_servers: Vec, + pub scope_opt_in_language_servers: Vec, #[serde(default)] pub overrides: HashMap, /// A list of characters that Zed should treat as word characters for the @@ -658,7 +699,7 @@ pub struct LanguageConfigOverride { #[serde(default)] pub word_characters: Override>, #[serde(default)] - pub opt_into_language_servers: Vec, + pub opt_into_language_servers: Vec, } #[derive(Clone, Deserialize, Debug, Serialize, JsonSchema)] @@ -1479,9 +1520,9 @@ impl LanguageScope { pub fn language_allowed(&self, name: &LanguageServerName) -> bool { let config = &self.language.config; let opt_in_servers = &config.scope_opt_in_language_servers; - if opt_in_servers.iter().any(|o| *o == *name.0) { + if opt_in_servers.iter().any(|o| *o == *name) { if let Some(over) = self.config_override() { - over.opt_into_language_servers.iter().any(|o| *o == *name.0) + over.opt_into_language_servers.iter().any(|o| *o == *name) } else { false } diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 6121cb6a39..82d4208aae 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -99,7 +99,7 @@ pub struct LanguageSettings { /// special tokens: /// - `"!"` - A language server ID prefixed with a `!` will be disabled. /// - `"..."` - A placeholder to refer to the **rest** of the registered language servers for this language. - pub language_servers: Vec>, + pub language_servers: Vec, /// Controls whether inline completions are shown immediately (true) /// or manually by triggering `editor::ShowInlineCompletion` (false). pub show_inline_completions: bool, @@ -137,22 +137,24 @@ impl LanguageSettings { } pub(crate) fn resolve_language_servers( - configured_language_servers: &[Arc], + configured_language_servers: &[String], available_language_servers: &[LanguageServerName], ) -> Vec { - let (disabled_language_servers, enabled_language_servers): (Vec>, Vec>) = - configured_language_servers.iter().partition_map( - |language_server| match language_server.strip_prefix('!') { - Some(disabled) => Either::Left(disabled.into()), - None => Either::Right(language_server.clone()), - }, - ); + let (disabled_language_servers, enabled_language_servers): ( + Vec, + Vec, + ) = configured_language_servers.iter().partition_map( + |language_server| match language_server.strip_prefix('!') { + Some(disabled) => Either::Left(LanguageServerName(disabled.to_string().into())), + None => Either::Right(LanguageServerName(language_server.clone().into())), + }, + ); let rest = available_language_servers .iter() .filter(|&available_language_server| { - !disabled_language_servers.contains(&available_language_server.0) - && !enabled_language_servers.contains(&available_language_server.0) + !disabled_language_servers.contains(&available_language_server) + && !enabled_language_servers.contains(&available_language_server) }) .cloned() .collect::>(); @@ -160,10 +162,10 @@ impl LanguageSettings { enabled_language_servers .into_iter() .flat_map(|language_server| { - if language_server.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { + if language_server.0.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { rest.clone() } else { - vec![LanguageServerName(language_server.clone())] + vec![language_server.clone()] } }) .collect::>() @@ -295,7 +297,7 @@ pub struct LanguageSettingsContent { /// /// Default: ["..."] #[serde(default)] - pub language_servers: Option>>, + pub language_servers: Option>, /// Controls whether inline completions are shown immediately (true) /// or manually by triggering `editor::ShowInlineCompletion` (false). /// @@ -1165,7 +1167,7 @@ mod tests { names .iter() .copied() - .map(|name| LanguageServerName(name.into())) + .map(|name| LanguageServerName(name.to_string().into())) .collect::>() } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 53def5eb2a..bde5fe9b19 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -236,7 +236,7 @@ impl LogStore { )); this.add_language_server( LanguageServerKind::Global { - name: LanguageServerName(Arc::from("copilot")), + name: LanguageServerName::new_static("copilot"), }, server.server_id(), Some(server.clone()), diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 4ebb4569ef..8a04e0aae6 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -13,13 +13,13 @@ use util::{fs::remove_matching, maybe, ResultExt}; pub struct CLspAdapter; impl CLspAdapter { - const SERVER_NAME: &'static str = "clangd"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd"); } #[async_trait(?Send)] impl super::LspAdapter for CLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -28,7 +28,8 @@ impl super::LspAdapter for CLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a103c4783c..a1a996c066 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -33,7 +33,7 @@ fn server_binary_arguments() -> Vec { pub struct GoLspAdapter; impl GoLspAdapter { - const SERVER_NAME: &'static str = "gopls"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("gopls"); } static GOPLS_VERSION_REGEX: LazyLock = @@ -46,7 +46,7 @@ static GO_ESCAPE_SUBTEST_NAME_REGEX: LazyLock = LazyLock::new(|| { #[async_trait(?Send)] impl super::LspAdapter for GoLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -71,7 +71,8 @@ impl super::LspAdapter for GoLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index ee127c00cc..0dce8fb661 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -30,7 +30,7 @@ pub struct PythonLspAdapter { } impl PythonLspAdapter { - const SERVER_NAME: &'static str = "pyright"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright"); pub fn new(node: Arc) -> Self { PythonLspAdapter { node } @@ -40,7 +40,7 @@ impl PythonLspAdapter { #[async_trait(?Send)] impl LspAdapter for PythonLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -49,7 +49,7 @@ impl LspAdapter for PythonLspAdapter { ) -> Result> { Ok(Box::new( self.node - .npm_package_latest_version(Self::SERVER_NAME) + .npm_package_latest_version(Self::SERVER_NAME.as_ref()) .await?, ) as Box<_>) } @@ -62,16 +62,23 @@ impl LspAdapter for PythonLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = Self::SERVER_NAME; let should_install_language_server = self .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) + .should_install_npm_package( + Self::SERVER_NAME.as_ref(), + &server_path, + &container_dir, + &latest_version, + ) .await; if should_install_language_server { self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) + .npm_install_packages( + &container_dir, + &[(Self::SERVER_NAME.as_ref(), latest_version.as_str())], + ) .await?; } @@ -182,7 +189,7 @@ impl LspAdapter for PythonLspAdapter { cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { - language_server_settings(adapter.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() }) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index a32ffe50f5..eebd573a7e 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -25,13 +25,13 @@ use util::{fs::remove_matching, maybe, ResultExt}; pub struct RustLspAdapter; impl RustLspAdapter { - const SERVER_NAME: &'static str = "rust-analyzer"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer"); } #[async_trait(?Send)] impl LspAdapter for RustLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -41,7 +41,7 @@ impl LspAdapter for RustLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()?; @@ -60,7 +60,7 @@ impl LspAdapter for RustLspAdapter { path_lookup: None, .. }) => { - let path = delegate.which(Self::SERVER_NAME.as_ref()).await; + let path = delegate.which("rust-analyzer".as_ref()).await; let env = delegate.shell_env().await; if let Some(path) = path { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 9a053dbd87..e3e17a8fa7 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -32,7 +32,8 @@ pub struct TailwindLspAdapter { } impl TailwindLspAdapter { - const SERVER_NAME: &'static str = "tailwindcss-language-server"; + const SERVER_NAME: LanguageServerName = + LanguageServerName::new_static("tailwindcss-language-server"); pub fn new(node: Arc) -> Self { TailwindLspAdapter { node } @@ -42,7 +43,7 @@ impl TailwindLspAdapter { #[async_trait(?Send)] impl LspAdapter for TailwindLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -52,7 +53,7 @@ impl LspAdapter for TailwindLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -152,7 +153,7 @@ impl LspAdapter for TailwindLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index c65b74aa9b..b09216c970 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -71,7 +71,8 @@ pub struct TypeScriptLspAdapter { impl TypeScriptLspAdapter { const OLD_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.js"; const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; - const SERVER_NAME: &'static str = "typescript-language-server"; + const SERVER_NAME: LanguageServerName = + LanguageServerName::new_static("typescript-language-server"); pub fn new(node: Arc) -> Self { TypeScriptLspAdapter { node } } @@ -97,7 +98,7 @@ struct TypeScriptVersions { #[async_trait(?Send)] impl LspAdapter for TypeScriptLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -239,7 +240,7 @@ impl LspAdapter for TypeScriptLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; if let Some(options) = override_options { @@ -304,7 +305,7 @@ impl EsLintLspAdapter { const GITHUB_ASSET_KIND: AssetKind = AssetKind::Zip; const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; - const SERVER_NAME: &'static str = "eslint"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("eslint"); const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; @@ -331,7 +332,7 @@ impl LspAdapter for EsLintLspAdapter { let workspace_root = delegate.worktree_root_path(); let eslint_user_settings = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; @@ -403,7 +404,7 @@ impl LspAdapter for EsLintLspAdapter { } fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 9499b5c54f..5ec3121384 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -48,11 +48,11 @@ struct TypeScriptVersions { server_version: String, } -const SERVER_NAME: &str = "vtsls"; +const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("vtsls"); #[async_trait(?Send)] impl LspAdapter for VtslsLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(SERVER_NAME.into()) + SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -74,7 +74,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { @@ -267,7 +267,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 06360847ac..583961f4b1 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -30,7 +30,7 @@ pub struct YamlLspAdapter { } impl YamlLspAdapter { - const SERVER_NAME: &'static str = "yaml-language-server"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server"); pub fn new(node: Arc) -> Self { YamlLspAdapter { node } } @@ -39,7 +39,7 @@ impl YamlLspAdapter { #[async_trait(?Send)] impl LspAdapter for YamlLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -49,7 +49,7 @@ impl LspAdapter for YamlLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -145,7 +145,7 @@ impl LspAdapter for YamlLspAdapter { let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); let project_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; if let Some(override_options) = project_options { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 92f37f87af..6a3788c879 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -103,7 +103,7 @@ pub struct LocalLspStore { supplementary_language_servers: HashMap)>, prettier_store: Model, - current_lsp_settings: HashMap, LspSettings>, + current_lsp_settings: HashMap, _subscription: gpui::Subscription, } @@ -138,7 +138,7 @@ impl RemoteLspStore {} pub struct SshLspStore { upstream_client: AnyProtoClient, - current_lsp_settings: HashMap, LspSettings>, + current_lsp_settings: HashMap, } #[allow(clippy::large_enum_variant)] @@ -316,8 +316,8 @@ impl LspStore { pub fn swap_current_lsp_settings( &mut self, - new_settings: HashMap, LspSettings>, - ) -> Option, LspSettings>> { + new_settings: HashMap, + ) -> Option> { match &mut self.mode { LspStoreMode::Ssh(SshLspStore { current_lsp_settings, @@ -933,7 +933,7 @@ impl LspStore { if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); } else if let Some(worktree) = worktree { - let server_name = &adapter.name.0; + let server_name = &adapter.name; match ( current_lsp_settings.get(server_name), new_lsp_settings.get(server_name), @@ -4765,7 +4765,7 @@ impl LspStore { let project_id = self.project_id; let worktree_id = worktree.read(cx).id().to_proto(); let upstream_client = ssh.upstream_client.clone(); - let name = adapter.name().to_string(); + let name = adapter.name(); let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); @@ -4783,7 +4783,7 @@ impl LspStore { } }; - let name = adapter.name().to_string(); + let name = adapter.name(); let code_action_kinds = adapter .adapter .code_action_kinds() @@ -4809,7 +4809,7 @@ impl LspStore { .request(proto::CreateLanguageServer { project_id, worktree_id, - name, + name: name.0.to_string(), binary: Some(language_server_command), initialization_options, code_action_kinds, @@ -4892,7 +4892,7 @@ impl LspStore { ); // We need some on the SSH client, and some on SSH host - let lsp = project_settings.lsp.get(&adapter.name.0); + let lsp = project_settings.lsp.get(&adapter.name); let override_options = lsp.and_then(|s| s.initialization_options.clone()); let server_id = pending_server.server_id; @@ -5078,7 +5078,7 @@ impl LspStore { async fn shutdown_language_server( server_state: Option, - name: Arc, + name: LanguageServerName, cx: AsyncAppContext, ) { let server = match server_state { @@ -5123,7 +5123,7 @@ impl LspStore { let key = (worktree_id, adapter_name); if self.mode.is_local() { if let Some(server_id) = self.language_server_ids.remove(&key) { - let name = key.1 .0; + let name = key.1; log::info!("stopping language server {name}"); // Remove other entries for this language server as well @@ -7168,7 +7168,7 @@ impl LspAdapter for SshLspAdapter { } pub fn language_server_settings<'a, 'b: 'a>( delegate: &'a dyn LspAdapterDelegate, - language: &str, + language: &LanguageServerName, cx: &'b AppContext, ) -> Option<&'a LspSettings> { ProjectSettings::get( diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 29101917fb..75d70c1d3f 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -338,7 +338,7 @@ impl PrettierStore { prettier_store .update(cx, |prettier_store, cx| { let name = if is_default { - LanguageServerName(Arc::from("prettier (default)")) + LanguageServerName("prettier (default)".to_string().into()) } else { let worktree_path = worktree_id .and_then(|id| { @@ -366,7 +366,7 @@ impl PrettierStore { } None => format!("prettier ({})", prettier_dir.display()), }; - LanguageServerName(Arc::from(name)) + LanguageServerName(name.into()) }; cx.emit(PrettierStoreEvent::LanguageServerAdded { new_server_id, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 9a7c80703c..904efe0a6b 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,6 +1,7 @@ use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; +use language::LanguageServerName; use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; @@ -27,7 +28,7 @@ pub struct ProjectSettings { /// name to the lsp value. /// Default: null #[serde(default)] - pub lsp: HashMap, LspSettings>, + pub lsp: HashMap, /// Configuration for Git-related features #[serde(default)] diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index b7fc56d3c6..b5ab1c4007 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -205,7 +205,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo AllLanguageSettings::get_global(cx) .language(Some(&"Rust".into())) .language_servers, - ["custom-rust-analyzer".into()] + ["custom-rust-analyzer".to_string()] ) }); @@ -264,7 +264,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo ) .language(Some(&"Rust".into())) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); @@ -274,7 +274,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo all_language_settings(file, cx) .language(Some(&"Rust".into())) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); } @@ -357,7 +357,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext all_language_settings(file, cx) .language(Some(&"Rust".into())) .language_servers, - ["rust-analyzer".into()] + ["rust-analyzer".to_string()] ) }); From 3ca18af40b8a7cb83d8303a8131e90ca997f09ca Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sat, 21 Sep 2024 21:01:29 +0900 Subject: [PATCH 265/270] docs: Fix typo in `configuring-zed.md` (#18178) Fix typo in `configuring-zed.md` Release Notes: - N/A --- docs/src/configuring-zed.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index de7433bf5d..7cc6a4a8cb 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -313,10 +313,10 @@ List of `string` values "cursor_shape": "block" ``` -3. An underline that runs along the following character: +3. An underscore that runs along the following character: ```json -"cursor_shape": "underline" +"cursor_shape": "underscore" ``` 4. An box drawn around the following character: From 9612b60ccb5c271bbe641d1acdfc5d721c0bf7ad Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Sat, 21 Sep 2024 17:31:50 +0200 Subject: [PATCH 266/270] Refactor: Move types to the correct place and move specific request code to the dapstore (#39) * Make dap store global * Partially move initialize & capability code to dap store * Reuse shutdown for shutdown clients * Rename merge_capabilities * Correctly fallback to current thread id for checking to skip event * Move mthod * Move terminate threads to dap store * Move disconnect and restart to dap store * Update dap-types to the correct version This includes the capabilities::merge method * Change dap store to WeakModel in debug panels * Make clippy happy * Move pause thread to dap store * WIP refactor out thread state and capbilities * Move update thread status to debug panel method * Remove double notify * Move continue thread to dap store * Change WeakModel dapStore to Model dapStore * Move step over to dap store * Move step in to dap store * Move step out to dap store * Remove step back we don't support this yet * Move threadState type to debugPanel * Change to background task * Fix panic when debugSession stopped * Remove capabilities when debug client stops * Add missing notify * Remove drain that causes panic * Remove Arc from debug_panel_item instead use the id * Reset stack_frame_list to prevent crash * Refactor ThreadState to model to prevent recursion dependency in variable_list * WIP * WIP move set_variable_value and get_variables to dap store * Remove unused method * Fix correctly insert updated variables Before this changes you would see the variables * scopes. Because it did not insert the the variables per scope. * Correctly update current stack frame on variable list * Only allow building variable list entries for current stack frame * Make toggle variables & scopes work again * Fix clippy * Pass around id instead of entire client * Move set breakpoints to dap store * Show thread status again in tooltip text * Move stack frames and scope requests to dap store * Move terminate request to dap store * Remove gap that is not doing anything * Add retain back to remove also threads that belong to the client * Add debug kind back to tab content --- Cargo.lock | 2 +- crates/dap/src/adapters.rs | 4 +- crates/dap/src/client.rs | 321 +--------- crates/debugger_ui/src/debugger_panel.rs | 535 +++++++++------- crates/debugger_ui/src/debugger_panel_item.rs | 280 ++++---- crates/debugger_ui/src/lib.rs | 2 + crates/debugger_ui/src/variable_list.rs | 398 ++++++------ crates/project/src/dap_store.rs | 596 ++++++++++++++++-- crates/project/src/project.rs | 46 +- crates/remote_server/src/headless_project.rs | 2 +- 10 files changed, 1216 insertions(+), 970 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9d0cc6fcd1..b4c694974b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3346,7 +3346,7 @@ dependencies = [ [[package]] name = "dap-types" version = "0.0.1" -source = "git+https://github.com/zed-industries/dap-types#d4e23edcf7c8ded91a3bdfd32a216bcab68b710c" +source = "git+https://github.com/zed-industries/dap-types#b7404edcd158d7d3ed8a7e81cf6cb3145ff3eb19" dependencies = [ "serde", "serde_json", diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index f82b0a56e4..cfb4731916 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -194,9 +194,9 @@ impl DebugAdapter for CustomDebugAdapter { async fn connect(&self, cx: &mut AsyncAppContext) -> Result { match &self.connection { - DebugConnectionType::STDIO => create_stdio_client(&self.start_command, &vec![].into()), + DebugConnectionType::STDIO => create_stdio_client(&self.start_command, &vec![]), DebugConnectionType::TCP(tcp_host) => { - create_tcp_client(tcp_host.clone(), &self.start_command, &vec![].into(), cx).await + create_tcp_client(tcp_host.clone(), &self.start_command, &vec![], cx).await } } } diff --git a/crates/dap/src/client.rs b/crates/dap/src/client.rs index 067c24a0df..3f2875a6c9 100644 --- a/crates/dap/src/client.rs +++ b/crates/dap/src/client.rs @@ -4,29 +4,18 @@ use anyhow::{anyhow, Context, Result}; use crate::adapters::{build_adapter, DebugAdapter}; use dap_types::{ messages::{Message, Response}, - requests::{ - Attach, ConfigurationDone, Continue, Disconnect, Initialize, Launch, Next, Pause, Request, - Restart, SetBreakpoints, StepBack, StepIn, StepOut, Terminate, TerminateThreads, Variables, - }, - AttachRequestArguments, ConfigurationDoneArguments, ContinueArguments, ContinueResponse, - DisconnectArguments, InitializeRequestArgumentsPathFormat, LaunchRequestArguments, - NextArguments, PauseArguments, RestartArguments, Scope, SetBreakpointsArguments, - SetBreakpointsResponse, Source, SourceBreakpoint, StackFrame, StepBackArguments, - StepInArguments, StepOutArguments, SteppingGranularity, TerminateArguments, - TerminateThreadsArguments, Variable, VariablesArguments, + requests::Request, }; use futures::{AsyncBufRead, AsyncWrite}; use gpui::{AppContext, AsyncAppContext}; -use parking_lot::{Mutex, MutexGuard}; +use parking_lot::Mutex; use serde_json::Value; use smol::{ channel::{bounded, Receiver, Sender}, process::Child, }; use std::{ - collections::{BTreeMap, HashMap, HashSet}, hash::Hash, - path::Path, sync::{ atomic::{AtomicU64, Ordering}, Arc, @@ -47,27 +36,6 @@ pub enum ThreadStatus { #[repr(transparent)] pub struct DebugAdapterClientId(pub usize); -#[derive(Debug, Clone)] -pub struct VariableContainer { - pub container_reference: u64, - pub variable: Variable, - pub depth: usize, -} - -#[derive(Debug, Default, Clone)] -pub struct ThreadState { - pub status: ThreadStatus, - pub stack_frames: Vec, - /// HashMap> - pub scopes: HashMap>, - /// BTreeMap> - pub variables: BTreeMap>, - pub fetched_variable_ids: HashSet, - // we update this value only once we stopped, - // we will use this to indicated if we should show a warning when debugger thread was exited - pub stopped: bool, -} - pub struct DebugAdapterClient { id: DebugAdapterClientId, adapter: Arc>, @@ -75,9 +43,6 @@ pub struct DebugAdapterClient { _process: Arc>>, sequence_count: AtomicU64, config: DebugAdapterConfig, - /// thread_id -> thread_state - thread_states: Arc>>, - capabilities: Arc>>, } pub struct TransportParams { @@ -129,8 +94,6 @@ impl DebugAdapterClient { config, adapter, transport, - capabilities: Default::default(), - thread_states: Default::default(), sequence_count: AtomicU64::new(1), _process: Arc::new(Mutex::new(transport_params.process)), })) @@ -226,248 +189,24 @@ impl DebugAdapterClient { self.config.clone() } - pub fn request_args(&self) -> Option { - // TODO Debugger: Get request args from adapter - Some(self.adapter.request_args()) + pub fn adapter(&self) -> Arc> { + self.adapter.clone() + } + + pub fn request_args(&self) -> Value { + self.adapter.request_args() } pub fn request_type(&self) -> DebugRequestType { self.config.request.clone() } - pub fn capabilities(&self) -> dap_types::Capabilities { - self.capabilities.lock().clone().unwrap_or_default() - } - /// Get the next sequence id to be used in a request pub fn next_sequence_id(&self) -> u64 { self.sequence_count.fetch_add(1, Ordering::Relaxed) } - pub fn update_thread_state_status(&self, thread_id: u64, status: ThreadStatus) { - if let Some(thread_state) = self.thread_states().get_mut(&thread_id) { - thread_state.status = status; - }; - } - - pub fn thread_states(&self) -> MutexGuard> { - self.thread_states.lock() - } - - pub fn thread_state_by_id(&self, thread_id: u64) -> ThreadState { - self.thread_states.lock().get(&thread_id).cloned().unwrap() - } - - pub async fn initialize(&self) -> Result { - let args = dap_types::InitializeRequestArguments { - client_id: Some("zed".to_owned()), - client_name: Some("Zed".to_owned()), - adapter_id: self.adapter.id(), - locale: Some("en-us".to_owned()), - path_format: Some(InitializeRequestArgumentsPathFormat::Path), - supports_variable_type: Some(true), - supports_variable_paging: Some(false), - supports_run_in_terminal_request: Some(true), - supports_memory_references: Some(true), - supports_progress_reporting: Some(true), - supports_invalidated_event: Some(true), - lines_start_at1: Some(true), - columns_start_at1: Some(true), - supports_memory_event: Some(true), - supports_args_can_be_interpreted_by_shell: Some(true), - supports_start_debugging_request: Some(true), - }; - - let capabilities = self.request::(args).await?; - - *self.capabilities.lock() = Some(capabilities.clone()); - - Ok(capabilities) - } - - pub async fn launch(&self, args: Option) -> Result<()> { - self.request::(LaunchRequestArguments { - raw: args.unwrap_or(Value::Null), - }) - .await - } - - pub async fn attach(&self, args: Option) -> Result<()> { - self.request::(AttachRequestArguments { - raw: args.unwrap_or(Value::Null), - }) - .await - } - - pub async fn resume(&self, thread_id: u64) -> Result { - let supports_single_thread_execution_requests = self - .capabilities() - .supports_single_thread_execution_requests - .unwrap_or_default(); - - self.request::(ContinueArguments { - thread_id, - single_thread: supports_single_thread_execution_requests.then(|| true), - }) - .await - } - - pub async fn step_over(&self, thread_id: u64, granularity: SteppingGranularity) -> Result<()> { - let capabilities = self.capabilities(); - - let supports_single_thread_execution_requests = capabilities - .supports_single_thread_execution_requests - .unwrap_or_default(); - let supports_stepping_granularity = capabilities - .supports_stepping_granularity - .unwrap_or_default(); - - self.request::(NextArguments { - thread_id, - granularity: supports_stepping_granularity.then(|| granularity), - single_thread: supports_single_thread_execution_requests.then(|| true), - }) - .await - } - - pub async fn step_in(&self, thread_id: u64, granularity: SteppingGranularity) -> Result<()> { - let capabilities = self.capabilities(); - - let supports_single_thread_execution_requests = capabilities - .supports_single_thread_execution_requests - .unwrap_or_default(); - let supports_stepping_granularity = capabilities - .supports_stepping_granularity - .unwrap_or_default(); - - self.request::(StepInArguments { - thread_id, - target_id: None, - granularity: supports_stepping_granularity.then(|| granularity), - single_thread: supports_single_thread_execution_requests.then(|| true), - }) - .await - } - - pub async fn step_out(&self, thread_id: u64, granularity: SteppingGranularity) -> Result<()> { - let capabilities = self.capabilities(); - - let supports_single_thread_execution_requests = capabilities - .supports_single_thread_execution_requests - .unwrap_or_default(); - let supports_stepping_granularity = capabilities - .supports_stepping_granularity - .unwrap_or_default(); - - self.request::(StepOutArguments { - thread_id, - granularity: supports_stepping_granularity.then(|| granularity), - single_thread: supports_single_thread_execution_requests.then(|| true), - }) - .await - } - - pub async fn step_back(&self, thread_id: u64, granularity: SteppingGranularity) -> Result<()> { - let capabilities = self.capabilities(); - - let supports_single_thread_execution_requests = capabilities - .supports_single_thread_execution_requests - .unwrap_or_default(); - let supports_stepping_granularity = capabilities - .supports_stepping_granularity - .unwrap_or_default(); - - self.request::(StepBackArguments { - thread_id, - granularity: supports_stepping_granularity.then(|| granularity), - single_thread: supports_single_thread_execution_requests.then(|| true), - }) - .await - } - - pub async fn restart(&self) -> Result<()> { - self.request::(RestartArguments { - raw: self.adapter.request_args(), - }) - .await - } - - pub async fn pause(&self, thread_id: u64) -> Result<()> { - self.request::(PauseArguments { thread_id }).await - } - - pub async fn disconnect( - &self, - restart: Option, - terminate: Option, - suspend: Option, - ) -> Result<()> { - let supports_terminate_debuggee = self - .capabilities() - .support_terminate_debuggee - .unwrap_or_default(); - - let supports_suspend_debuggee = self - .capabilities() - .support_terminate_debuggee - .unwrap_or_default(); - - self.request::(DisconnectArguments { - restart, - terminate_debuggee: if supports_terminate_debuggee { - terminate - } else { - None - }, - suspend_debuggee: if supports_suspend_debuggee { - suspend - } else { - None - }, - }) - .await - } - - pub async fn set_breakpoints( - &self, - absolute_file_path: Arc, - breakpoints: Vec, - ) -> Result { - self.request::(SetBreakpointsArguments { - source: Source { - path: Some(String::from(absolute_file_path.to_string_lossy())), - name: None, - source_reference: None, - presentation_hint: None, - origin: None, - sources: None, - adapter_data: None, - checksums: None, - }, - breakpoints: Some(breakpoints), - source_modified: None, - lines: None, - }) - .await - } - - pub async fn configuration_done(&self) -> Result<()> { - let support_configuration_done_request = self - .capabilities() - .supports_configuration_done_request - .unwrap_or_default(); - - if support_configuration_done_request { - self.request::(ConfigurationDoneArguments) - .await - } else { - Ok(()) - } - } - pub async fn shutdown(&self) -> Result<()> { - let _ = self.terminate().await; - self.transport.server_tx.close(); self.transport.server_rx.close(); @@ -492,48 +231,4 @@ impl DebugAdapterClient { } .await } - - pub async fn terminate(&self) -> Result<()> { - let support_terminate_request = self - .capabilities() - .supports_terminate_request - .unwrap_or_default(); - - if support_terminate_request { - self.request::(TerminateArguments { - restart: Some(false), - }) - .await - } else { - self.disconnect(None, Some(true), None).await - } - } - - pub async fn terminate_threads(&self, thread_ids: Option>) -> Result<()> { - let support_terminate_threads = self - .capabilities() - .supports_terminate_threads_request - .unwrap_or_default(); - - if support_terminate_threads { - self.request::(TerminateThreadsArguments { thread_ids }) - .await - } else { - self.terminate().await - } - } - - pub async fn variables(&self, variables_reference: u64) -> Result> { - anyhow::Ok( - self.request::(VariablesArguments { - variables_reference, - filter: None, - start: None, - count: None, - format: None, - }) - .await? - .variables, - ) - } } diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 64dc2a48dd..62b254825e 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -1,25 +1,26 @@ use crate::debugger_panel_item::DebugPanelItem; use anyhow::Result; use dap::client::DebugAdapterClient; -use dap::client::{DebugAdapterClientId, ThreadState, ThreadStatus, VariableContainer}; +use dap::client::{DebugAdapterClientId, ThreadStatus}; use dap::debugger_settings::DebuggerSettings; use dap::messages::{Events, Message}; -use dap::requests::{Request, Scopes, StackTrace, StartDebugging}; +use dap::requests::{Request, StartDebugging}; use dap::{ - Capabilities, ContinuedEvent, ExitedEvent, OutputEvent, ScopesArguments, StackFrame, - StackTraceArguments, StoppedEvent, TerminatedEvent, ThreadEvent, ThreadEventReason, + Capabilities, ContinuedEvent, ExitedEvent, OutputEvent, Scope, StackFrame, StoppedEvent, + TerminatedEvent, ThreadEvent, ThreadEventReason, Variable, }; use editor::Editor; use futures::future::try_join_all; use gpui::{ actions, Action, AppContext, AsyncWindowContext, EventEmitter, FocusHandle, FocusableView, - FontWeight, Subscription, Task, View, ViewContext, WeakView, + FontWeight, Model, Subscription, Task, View, ViewContext, WeakView, }; +use project::dap_store::DapStore; use settings::Settings; -use std::collections::HashSet; +use std::collections::{BTreeMap, HashMap, HashSet}; use std::path::Path; use std::sync::Arc; -use task::DebugRequestType; +use std::u64; use ui::prelude::*; use util::ResultExt; use workspace::{ @@ -39,13 +40,36 @@ pub enum DebugPanelEvent { actions!(debug_panel, [ToggleFocus]); +#[derive(Debug, Clone)] +pub struct VariableContainer { + pub container_reference: u64, + pub variable: Variable, + pub depth: usize, +} + +#[derive(Debug, Default, Clone)] +pub struct ThreadState { + pub status: ThreadStatus, + pub stack_frames: Vec, + /// HashMap> + pub scopes: HashMap>, + /// BTreeMap> + pub variables: BTreeMap>, + pub fetched_variable_ids: HashSet, + // we update this value only once we stopped, + // we will use this to indicated if we should show a warning when debugger thread was exited + pub stopped: bool, +} + pub struct DebugPanel { size: Pixels, pane: View, focus_handle: FocusHandle, + dap_store: Model, workspace: WeakView, - _subscriptions: Vec, show_did_not_stop_warning: bool, + _subscriptions: Vec, + thread_states: BTreeMap<(DebugAdapterClientId, u64), Model>, } impl DebugPanel { @@ -77,13 +101,13 @@ impl DebugPanel { cx.subscribe(&project, { move |this: &mut Self, _, event, cx| match event { project::Event::DebugClientEvent { message, client_id } => { - let Some(client) = this.debug_client_by_id(*client_id, cx) else { + let Some(client) = this.debug_client_by_id(client_id, cx) else { return cx.emit(DebugPanelEvent::ClientStopped(*client_id)); }; match message { Message::Event(event) => { - this.handle_debug_client_events(client, event, cx); + this.handle_debug_client_events(client_id, event, cx); } Message::Request(request) => { if StartDebugging::COMMAND == request.command { @@ -93,29 +117,13 @@ impl DebugPanel { _ => unreachable!(), } } - project::Event::DebugClientStarted(client_id) => { - let Some(client) = this.debug_client_by_id(*client_id, cx) else { - return cx.emit(DebugPanelEvent::ClientStopped(*client_id)); - }; - - cx.background_executor() - .spawn(async move { - client.initialize().await?; - - // send correct request based on adapter config - match client.config().request { - DebugRequestType::Launch => { - client.launch(client.request_args()).await - } - DebugRequestType::Attach => { - client.attach(client.request_args()).await - } - } - }) - .detach_and_log_err(cx); - } project::Event::DebugClientStopped(client_id) => { cx.emit(DebugPanelEvent::ClientStopped(*client_id)); + + this.thread_states + .retain(|&(client_id_, _), _| client_id_ != *client_id); + + cx.notify(); } _ => {} } @@ -126,8 +134,10 @@ impl DebugPanel { pane, size: px(300.), _subscriptions, + dap_store: DapStore::global(cx), focus_handle: cx.focus_handle(), show_did_not_stop_warning: false, + thread_states: Default::default(), workspace: workspace.weak_handle(), } }) @@ -142,9 +152,41 @@ impl DebugPanel { }) } + pub fn update_thread_state_status( + &mut self, + client_id: &DebugAdapterClientId, + thread_id: Option, + status: ThreadStatus, + all_threads_continued: Option, + cx: &mut ViewContext, + ) { + if all_threads_continued.unwrap_or(false) { + for (_, thread_state) in self + .thread_states + .range_mut((*client_id, u64::MIN)..(*client_id, u64::MAX)) + { + thread_state.update(cx, |thread_state, cx| { + thread_state.status = status; + + cx.notify(); + }); + } + } else if let Some(thread_state) = + thread_id.and_then(|thread_id| self.thread_states.get_mut(&(*client_id, thread_id))) + { + thread_state.update(cx, |thread_state, cx| { + thread_state.status = ThreadStatus::Running; + + cx.notify(); + }); + } + + cx.notify(); + } + fn debug_client_by_id( &self, - client_id: DebugAdapterClientId, + client_id: &DebugAdapterClientId, cx: &mut ViewContext, ) -> Option> { self.workspace @@ -169,21 +211,17 @@ impl DebugPanel { pane::Event::RemovedItem { item } => { let thread_panel = item.downcast::().unwrap(); - thread_panel.update(cx, |pane, cx| { - let thread_id = pane.thread_id(); - let client = pane.client(); - let thread_status = client.thread_state_by_id(thread_id).status; + let thread_id = thread_panel.read(cx).thread_id(); + let client_id = thread_panel.read(cx).client_id(); - // only terminate thread if the thread has not yet ended - if thread_status != ThreadStatus::Ended && thread_status != ThreadStatus::Exited - { - let client = client.clone(); - cx.background_executor() - .spawn(async move { - client.terminate_threads(Some(vec![thread_id; 1])).await - }) - .detach_and_log_err(cx); - } + self.thread_states.remove(&(client_id, thread_id)); + + cx.notify(); + + self.dap_store.update(cx, |store, cx| { + store + .terminate_threads(&client_id, Some(vec![thread_id; 1]), cx) + .detach() }); } pane::Event::Remove { .. } => cx.emit(PanelEvent::Close), @@ -216,18 +254,18 @@ impl DebugPanel { fn handle_debug_client_events( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, event: &Events, cx: &mut ViewContext, ) { match event { - Events::Initialized(event) => self.handle_initialized_event(client, event, cx), - Events::Stopped(event) => self.handle_stopped_event(client, event, cx), - Events::Continued(event) => self.handle_continued_event(client, event, cx), - Events::Exited(event) => self.handle_exited_event(client, event, cx), - Events::Terminated(event) => self.handle_terminated_event(client, event, cx), - Events::Thread(event) => self.handle_thread_event(client, event, cx), - Events::Output(event) => self.handle_output_event(client, event, cx), + Events::Initialized(event) => self.handle_initialized_event(&client_id, event, cx), + Events::Stopped(event) => self.handle_stopped_event(&client_id, event, cx), + Events::Continued(event) => self.handle_continued_event(&client_id, event, cx), + Events::Exited(event) => self.handle_exited_event(&client_id, event, cx), + Events::Terminated(event) => self.handle_terminated_event(&client_id, event, cx), + Events::Thread(event) => self.handle_thread_event(&client_id, event, cx), + Events::Output(event) => self.handle_output_event(&client_id, event, cx), Events::Breakpoint(_) => {} Events::Module(_) => {} Events::LoadedSource(_) => {} @@ -304,111 +342,116 @@ impl DebugPanel { }) } - async fn remove_highlights_for_thread( - workspace: WeakView, - client: Arc, - thread_id: u64, - cx: AsyncWindowContext, - ) -> Result<()> { - let mut tasks = Vec::new(); - let mut paths: HashSet = HashSet::new(); - let thread_state = client.thread_state_by_id(thread_id); + // async fn remove_highlights_for_thread( + // workspace: WeakView, + // client: Arc, + // thread_id: u64, + // cx: AsyncWindowContext, + // ) -> Result<()> { + // let mut tasks = Vec::new(); + // let mut paths: HashSet = HashSet::new(); + // let thread_state = client.thread_state_by_id(thread_id); - for stack_frame in thread_state.stack_frames.into_iter() { - let Some(path) = stack_frame.source.clone().and_then(|s| s.path.clone()) else { - continue; - }; + // for stack_frame in thread_state.stack_frames.into_iter() { + // let Some(path) = stack_frame.source.clone().and_then(|s| s.path.clone()) else { + // continue; + // }; - if paths.contains(&path) { - continue; - } + // if paths.contains(&path) { + // continue; + // } - paths.insert(path.clone()); - tasks.push(Self::remove_editor_highlight( - workspace.clone(), - path, - cx.clone(), - )); - } + // paths.insert(path.clone()); + // tasks.push(Self::remove_editor_highlight( + // workspace.clone(), + // path, + // cx.clone(), + // )); + // } - if !tasks.is_empty() { - try_join_all(tasks).await?; - } + // if !tasks.is_empty() { + // try_join_all(tasks).await?; + // } - anyhow::Ok(()) - } + // anyhow::Ok(()) + // } - async fn remove_editor_highlight( - workspace: WeakView, - path: String, - mut cx: AsyncWindowContext, - ) -> Result<()> { - let task = workspace.update(&mut cx, |workspace, cx| { - let project_path = workspace.project().read_with(cx, |project, cx| { - project.project_path_for_absolute_path(&Path::new(&path), cx) - }); + // async fn remove_editor_highlight( + // workspace: WeakView, + // path: String, + // mut cx: AsyncWindowContext, + // ) -> Result<()> { + // let task = workspace.update(&mut cx, |workspace, cx| { + // let project_path = workspace.project().read_with(cx, |project, cx| { + // project.project_path_for_absolute_path(&Path::new(&path), cx) + // }); - if let Some(project_path) = project_path { - workspace.open_path(project_path, None, false, cx) - } else { - Task::ready(Err(anyhow::anyhow!( - "No project path found for path: {}", - path - ))) - } - })?; + // if let Some(project_path) = project_path { + // workspace.open_path(project_path, None, false, cx) + // } else { + // Task::ready(Err(anyhow::anyhow!( + // "No project path found for path: {}", + // path + // ))) + // } + // })?; - let editor = task.await?.downcast::().unwrap(); + // let editor = task.await?.downcast::().unwrap(); - editor.update(&mut cx, |editor, _| { - editor.clear_row_highlights::(); - }) - } + // editor.update(&mut cx, |editor, _| { + // editor.clear_row_highlights::(); + // }) + // } fn handle_initialized_event( &mut self, - client: Arc, - _: &Option, + client_id: &DebugAdapterClientId, + capabilities: &Option, cx: &mut ViewContext, ) { - cx.spawn(|this, mut cx| async move { - let task = this.update(&mut cx, |this, cx| { - this.workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |project, cx| { - project.send_breakpoints(client.clone(), cx) - }) - }) - })??; + if let Some(capabilities) = capabilities { + self.dap_store.update(cx, |store, cx| { + store.merge_capabilities_for_client(&client_id, capabilities, cx); + }); + } - task.await?; + let send_breakpoints_task = self.workspace.update(cx, |workspace, cx| { + workspace + .project() + .update(cx, |project, cx| project.send_breakpoints(&client_id, cx)) + }); - client.configuration_done().await - }) - .detach_and_log_err(cx); + let configuration_done_task = self.dap_store.update(cx, |store, cx| { + store.send_configuration_done(&client_id, cx) + }); + + cx.background_executor() + .spawn(async move { + send_breakpoints_task?.await; + + configuration_done_task.await + }) + .detach_and_log_err(cx); } fn handle_continued_event( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, event: &ContinuedEvent, cx: &mut ViewContext, ) { - let all_threads = event.all_threads_continued.unwrap_or(false); - - if all_threads { - for thread in client.thread_states().values_mut() { - thread.status = ThreadStatus::Running; - } - } else { - client.update_thread_state_status(event.thread_id, ThreadStatus::Running); - } - - cx.notify(); + self.update_thread_state_status( + client_id, + Some(event.thread_id), + ThreadStatus::Running, + event.all_threads_continued, + cx, + ); } fn handle_stopped_event( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, event: &StoppedEvent, cx: &mut ViewContext, ) { @@ -416,87 +459,106 @@ impl DebugPanel { return; }; - let client_id = client.id(); + let Some(client_kind) = self + .dap_store + .read(cx) + .client_by_id(client_id) + .map(|c| c.config().kind) + else { + return; // this can never happen + }; + + let client_id = *client_id; + cx.spawn({ let event = event.clone(); |this, mut cx| async move { - let stack_trace_response = client - .request::(StackTraceArguments { - thread_id, - start_frame: None, - levels: None, - format: None, + let stack_frames_task = this.update(&mut cx, |this, cx| { + this.dap_store.update(cx, |store, cx| { + store.stack_frames(&client_id, thread_id, cx) }) - .await?; + })?; - let mut thread_state = ThreadState::default(); + let stack_frames = stack_frames_task.await?; + + let current_stack_frame = stack_frames.first().unwrap().clone(); - let current_stack_frame = - stack_trace_response.stack_frames.first().unwrap().clone(); let mut scope_tasks = Vec::new(); - for stack_frame in stack_trace_response.stack_frames.clone().into_iter() { - let client = client.clone(); + for stack_frame in stack_frames.clone().into_iter() { + let stack_frame_scopes_task = this.update(&mut cx, |this, cx| { + this.dap_store + .update(cx, |store, cx| store.scopes(&client_id, stack_frame.id, cx)) + }); + scope_tasks.push(async move { - anyhow::Ok(( - stack_frame.id, - client - .request::(ScopesArguments { - frame_id: stack_frame.id, - }) - .await?, - )) + anyhow::Ok((stack_frame.id, stack_frame_scopes_task?.await?)) }); } let mut stack_frame_tasks = Vec::new(); - for (stack_frame_id, response) in try_join_all(scope_tasks).await? { - let client = client.clone(); + for (stack_frame_id, scopes) in try_join_all(scope_tasks).await? { + let variable_tasks = this.update(&mut cx, |this, cx| { + this.dap_store.update(cx, |store, cx| { + let mut tasks = Vec::new(); + + for scope in scopes { + let variables_task = + store.variables(&client_id, scope.variables_reference, cx); + tasks.push( + async move { anyhow::Ok((scope, variables_task.await?)) }, + ); + } + + tasks + }) + })?; + stack_frame_tasks.push(async move { - let mut variable_tasks = Vec::new(); - - for scope in response.scopes { - let scope_reference = scope.variables_reference; - - let client = client.clone(); - variable_tasks.push(async move { - anyhow::Ok((scope, client.variables(scope_reference).await?)) - }); - } - anyhow::Ok((stack_frame_id, try_join_all(variable_tasks).await?)) }); } + let thread_state = this.update(&mut cx, |this, cx| { + this.thread_states + .entry((client_id, thread_id)) + .or_insert(cx.new_model(|_| ThreadState::default())) + .clone() + })?; + for (stack_frame_id, scopes) in try_join_all(stack_frame_tasks).await? { - thread_state - .scopes - .insert(stack_frame_id, scopes.iter().map(|s| s.0.clone()).collect()); - - for (scope, variables) in scopes { + thread_state.update(&mut cx, |thread_state, _| { thread_state - .fetched_variable_ids - .insert(scope.variables_reference); + .scopes + .insert(stack_frame_id, scopes.iter().map(|s| s.0.clone()).collect()); - thread_state.variables.insert( - scope.variables_reference, - variables - .into_iter() - .map(|v| VariableContainer { - container_reference: scope.variables_reference, - variable: v, - depth: 1, - }) - .collect::>(), - ); - } + for (scope, variables) in scopes { + thread_state + .fetched_variable_ids + .insert(scope.variables_reference); + + thread_state.variables.insert( + scope.variables_reference, + variables + .into_iter() + .map(|v| VariableContainer { + container_reference: scope.variables_reference, + variable: v, + depth: 1, + }) + .collect::>(), + ); + } + })?; } this.update(&mut cx, |this, cx| { - thread_state.stack_frames = stack_trace_response.stack_frames; - thread_state.status = ThreadStatus::Stopped; - thread_state.stopped = true; + thread_state.update(cx, |thread_state, cx| { + thread_state.stack_frames = stack_frames; + thread_state.status = ThreadStatus::Stopped; + thread_state.stopped = true; - client.thread_states().insert(thread_id, thread_state); + cx.notify(); + }); let existing_item = this .pane @@ -506,7 +568,7 @@ impl DebugPanel { .any(|item| { let item = item.read(cx); - item.client().id() == client_id && item.thread_id() == thread_id + item.client_id() == client_id && item.thread_id() == thread_id }); if !existing_item { @@ -516,7 +578,10 @@ impl DebugPanel { DebugPanelItem::new( debug_panel, this.workspace.clone(), - client.clone(), + this.dap_store.clone(), + thread_state.clone(), + &client_id, + &client_kind, thread_id, current_stack_frame.clone().id, cx, @@ -534,7 +599,7 @@ impl DebugPanel { if let Some(item) = this.pane.read(cx).active_item() { if let Some(pane) = item.downcast::() { let pane = pane.read(cx); - if pane.thread_id() == thread_id && pane.client().id() == client_id { + if pane.thread_id() == thread_id && pane.client_id() == client_id { let workspace = this.workspace.clone(); return cx.spawn(|_, cx| async move { Self::go_to_stack_frame( @@ -559,99 +624,87 @@ impl DebugPanel { fn handle_thread_event( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, event: &ThreadEvent, cx: &mut ViewContext, ) { let thread_id = event.thread_id; - if let Some(thread_state) = client.thread_states().get(&thread_id) { - if !thread_state.stopped && event.reason == ThreadEventReason::Exited { + if let Some(thread_state) = self.thread_states.get(&(*client_id, thread_id)) { + if !thread_state.read(cx).stopped && event.reason == ThreadEventReason::Exited { self.show_did_not_stop_warning = true; cx.notify(); }; } if event.reason == ThreadEventReason::Started { - client - .thread_states() - .insert(thread_id, ThreadState::default()); + self.thread_states.insert( + (*client_id, thread_id), + cx.new_model(|_| ThreadState::default()), + ); } else { - client.update_thread_state_status(thread_id, ThreadStatus::Ended); + self.update_thread_state_status( + client_id, + Some(thread_id), + ThreadStatus::Ended, + None, + cx, + ); - cx.notify(); + // TODO debugger: we want to figure out for witch clients/threads we should remove the highlights + // cx.spawn({ + // let client = client.clone(); + // |this, mut cx| async move { + // let workspace = this.update(&mut cx, |this, _| this.workspace.clone())?; - // TODO: we want to figure out for witch clients/threads we should remove the highlights - cx.spawn({ - let client = client.clone(); - |this, mut cx| async move { - let workspace = this.update(&mut cx, |this, _| this.workspace.clone())?; + // Self::remove_highlights_for_thread(workspace, client, thread_id, cx).await?; - Self::remove_highlights_for_thread(workspace, client, thread_id, cx).await?; - - anyhow::Ok(()) - } - }) - .detach_and_log_err(cx); + // anyhow::Ok(()) + // } + // }) + // .detach_and_log_err(cx); } - cx.emit(DebugPanelEvent::Thread((client.id(), event.clone()))); + cx.emit(DebugPanelEvent::Thread((*client_id, event.clone()))); } fn handle_exited_event( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, _: &ExitedEvent, cx: &mut ViewContext, ) { - for thread_state in client.thread_states().values_mut() { - thread_state.status = ThreadStatus::Exited; - } - - cx.notify(); + self.update_thread_state_status(client_id, None, ThreadStatus::Exited, Some(true), cx); } fn handle_terminated_event( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, event: &Option, cx: &mut ViewContext, ) { let restart_args = event.clone().and_then(|e| e.restart); - let workspace = self.workspace.clone(); - cx.spawn(|_, mut cx| async move { - Self::remove_highlights(workspace.clone(), cx.clone())?; + // TODO debugger: remove current hightlights + self.dap_store.update(cx, |store, cx| { if restart_args.is_some() { - client.disconnect(Some(true), None, None).await?; - - match client.request_type() { - DebugRequestType::Launch => client.launch(restart_args).await, - DebugRequestType::Attach => client.attach(restart_args).await, - } + store + .restart(&client_id, restart_args, cx) + .detach_and_log_err(cx); } else { - cx.update(|cx| { - workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |project, cx| { - project - .dap_store() - .update(cx, |store, cx| store.shutdown_client(client.id(), cx)) - }) - }) - })? + store.shutdown_client(&client_id, cx).detach_and_log_err(cx); } - }) - .detach_and_log_err(cx); + }); } fn handle_output_event( &mut self, - client: Arc, + client_id: &DebugAdapterClientId, event: &OutputEvent, cx: &mut ViewContext, ) { - cx.emit(DebugPanelEvent::Output((client.id(), event.clone()))); + cx.emit(DebugPanelEvent::Output((*client_id, event.clone()))); } fn render_did_not_stop_warning(&self, cx: &mut ViewContext) -> impl IntoElement { diff --git a/crates/debugger_ui/src/debugger_panel_item.rs b/crates/debugger_ui/src/debugger_panel_item.rs index 0acb8ec295..e53983ffdb 100644 --- a/crates/debugger_ui/src/debugger_panel_item.rs +++ b/crates/debugger_ui/src/debugger_panel_item.rs @@ -1,19 +1,19 @@ use crate::console::Console; -use crate::debugger_panel::{DebugPanel, DebugPanelEvent}; +use crate::debugger_panel::{DebugPanel, DebugPanelEvent, ThreadState}; use crate::variable_list::VariableList; -use anyhow::Result; -use dap::client::{DebugAdapterClient, DebugAdapterClientId, ThreadState, ThreadStatus}; +use dap::client::{DebugAdapterClientId, ThreadStatus}; use dap::debugger_settings::DebuggerSettings; -use dap::{OutputEvent, OutputEventCategory, StackFrame, StoppedEvent, ThreadEvent}; +use dap::{Capabilities, OutputEvent, OutputEventCategory, StackFrame, StoppedEvent, ThreadEvent}; use editor::Editor; use gpui::{ - impl_actions, list, AnyElement, AppContext, AsyncWindowContext, EventEmitter, FocusHandle, - FocusableView, ListState, Subscription, View, WeakView, + impl_actions, list, AnyElement, AppContext, EventEmitter, FocusHandle, FocusableView, + ListState, Model, Subscription, View, WeakView, }; +use project::dap_store::DapStore; use serde::Deserialize; use settings::Settings; -use std::sync::Arc; +use task::DebugAdapterKind; use ui::WindowContext; use ui::{prelude::*, Tooltip}; use workspace::dock::Panel; @@ -35,12 +35,16 @@ pub struct DebugPanelItem { thread_id: u64, console: View, focus_handle: FocusHandle, + dap_store: Model, stack_frame_list: ListState, output_editor: View, current_stack_frame_id: u64, + client_kind: DebugAdapterKind, + debug_panel: View, active_thread_item: ThreadItem, workspace: WeakView, - client: Arc, + client_id: DebugAdapterClientId, + thread_state: Model, variable_list: View, _subscriptions: Vec, } @@ -70,18 +74,32 @@ enum DebugPanelItemActionKind { } impl DebugPanelItem { + #[allow(clippy::too_many_arguments)] pub fn new( debug_panel: View, workspace: WeakView, - client: Arc, + dap_store: Model, + thread_state: Model, + client_id: &DebugAdapterClientId, + client_kind: &DebugAdapterKind, thread_id: u64, current_stack_frame_id: u64, cx: &mut ViewContext, ) -> Self { let focus_handle = cx.focus_handle(); - let model = cx.model().clone(); - let variable_list = cx.new_view(|cx| VariableList::new(model, cx)); + let capabilities = dap_store.read(cx).capabilities_by_id(&client_id); + + let variable_list = cx.new_view(|cx| { + VariableList::new( + dap_store.clone(), + &client_id, + &thread_state, + &capabilities, + current_stack_frame_id, + cx, + ) + }); let console = cx.new_view(Console::new); let weakview = cx.view().downgrade(); @@ -130,16 +148,20 @@ impl DebugPanelItem { }); Self { - client, - thread_id, - workspace, - focus_handle, - variable_list, console, + thread_id, + dap_store, + workspace, + debug_panel, + thread_state, + focus_handle, output_editor, + variable_list, _subscriptions, stack_frame_list, + client_id: *client_id, current_stack_frame_id, + client_kind: client_kind.clone(), active_thread_item: ThreadItem::Variables, } } @@ -149,7 +171,7 @@ impl DebugPanelItem { client_id: &DebugAdapterClientId, thread_id: u64, ) -> bool { - thread_id != this.thread_id || *client_id != this.client.id() + thread_id != this.thread_id || *client_id != this.client_id } fn handle_stopped_event( @@ -158,11 +180,11 @@ impl DebugPanelItem { event: &StoppedEvent, cx: &mut ViewContext, ) { - if Self::should_skip_event(this, client_id, event.thread_id.unwrap_or_default()) { + if Self::should_skip_event(this, client_id, event.thread_id.unwrap_or(this.thread_id)) { return; } - let thread_state = this.current_thread_state(); + let thread_state = this.thread_state.read(cx); this.stack_frame_list.reset(thread_state.stack_frames.len()); if let Some(stack_frame) = thread_state.stack_frames.first() { @@ -182,7 +204,7 @@ impl DebugPanelItem { return; } - // TODO: handle thread event + // TODO debugger: handle thread event } fn handle_output_event( @@ -244,58 +266,55 @@ impl DebugPanelItem { return; } + this.stack_frame_list.reset(0); + + cx.notify(); + cx.emit(Event::Close); } - pub fn client(&self) -> Arc { - self.client.clone() + pub fn client_id(&self) -> DebugAdapterClientId { + self.client_id } pub fn thread_id(&self) -> u64 { self.thread_id } - pub fn current_stack_frame_id(&self) -> u64 { - self.current_stack_frame_id + pub fn capabilities(&self, cx: &mut ViewContext) -> Capabilities { + self.dap_store + .read_with(cx, |store, _| store.capabilities_by_id(&self.client_id)) } - fn stack_frame_for_index(&self, ix: usize) -> StackFrame { - self.client - .thread_state_by_id(self.thread_id) + fn stack_frame_for_index(&self, ix: usize, cx: &mut ViewContext) -> StackFrame { + self.thread_state + .read(cx) .stack_frames .get(ix) .cloned() .unwrap() } - pub fn current_thread_state(&self) -> ThreadState { - self.client - .thread_states() - .get(&self.thread_id) - .cloned() - .unwrap() - } - fn update_stack_frame_id(&mut self, stack_frame_id: u64, cx: &mut ViewContext) { self.current_stack_frame_id = stack_frame_id; - let thread_state = self.current_thread_state(); - - self.variable_list.update(cx, |variable_list, _| { - variable_list.build_entries(thread_state, stack_frame_id, true, false); + self.variable_list.update(cx, |variable_list, cx| { + variable_list.update_stack_frame_id(stack_frame_id, cx); + variable_list.build_entries(true, false, cx); }); + + cx.notify(); } fn render_stack_frames(&self, _cx: &mut ViewContext) -> impl IntoElement { v_flex() - .gap_3() .size_full() .child(list(self.stack_frame_list.clone()).size_full()) .into_any() } fn render_stack_frame(&self, ix: usize, cx: &mut ViewContext) -> impl IntoElement { - let stack_frame = self.stack_frame_for_index(ix); + let stack_frame = self.stack_frame_for_index(ix, cx); let source = stack_frame.source.clone(); let is_selected_frame = stack_frame.id == self.current_stack_frame_id; @@ -352,31 +371,6 @@ impl DebugPanelItem { .into_any() } - // if the debug adapter does not send the continued event, - // and the status of the thread did not change we have to assume the thread is running - // so we have to update the thread state status to running - fn update_thread_state( - this: WeakView, - previous_status: ThreadStatus, - all_threads_continued: Option, - mut cx: AsyncWindowContext, - ) -> Result<()> { - this.update(&mut cx, |this, cx| { - if previous_status == this.current_thread_state().status { - if all_threads_continued.unwrap_or(false) { - for thread in this.client.thread_states().values_mut() { - thread.status = ThreadStatus::Running; - } - } else { - this.client - .update_thread_state_status(this.thread_id, ThreadStatus::Running); - } - - cx.notify(); - } - }) - } - /// Actions that should be handled even when Debug Panel is not in focus pub fn workspace_action_handler( workspace: &mut Workspace, @@ -417,90 +411,113 @@ impl DebugPanelItem { } fn handle_continue_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - let thread_id = self.thread_id; - let previous_status = self.current_thread_state().status; + self.debug_panel.update(cx, |panel, cx| { + panel.update_thread_state_status( + &self.client_id, + Some(self.thread_id), + ThreadStatus::Running, + None, + cx, + ); + }); - cx.spawn(|this, cx| async move { - let response = client.resume(thread_id).await?; - - Self::update_thread_state(this, previous_status, response.all_threads_continued, cx) - }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .continue_thread(&self.client_id, self.thread_id, cx) + .detach_and_log_err(cx); + }); } fn handle_step_over_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - let thread_id = self.thread_id; - let previous_status = self.current_thread_state().status; + self.debug_panel.update(cx, |panel, cx| { + panel.update_thread_state_status( + &self.client_id, + Some(self.thread_id), + ThreadStatus::Running, + None, + cx, + ); + }); + let granularity = DebuggerSettings::get_global(cx).stepping_granularity(); - cx.spawn(|this, cx| async move { - client.step_over(thread_id, granularity).await?; - - Self::update_thread_state(this, previous_status, None, cx) - }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .step_over(&self.client_id, self.thread_id, granularity, cx) + .detach_and_log_err(cx); + }); } fn handle_step_in_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - let thread_id = self.thread_id; - let previous_status = self.current_thread_state().status; + self.debug_panel.update(cx, |panel, cx| { + panel.update_thread_state_status( + &self.client_id, + Some(self.thread_id), + ThreadStatus::Running, + None, + cx, + ); + }); + let granularity = DebuggerSettings::get_global(cx).stepping_granularity(); - cx.spawn(|this, cx| async move { - client.step_in(thread_id, granularity).await?; - - Self::update_thread_state(this, previous_status, None, cx) - }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .step_in(&self.client_id, self.thread_id, granularity, cx) + .detach_and_log_err(cx); + }); } fn handle_step_out_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - let thread_id = self.thread_id; - let previous_status = self.current_thread_state().status; + self.debug_panel.update(cx, |panel, cx| { + panel.update_thread_state_status( + &self.client_id, + Some(self.thread_id), + ThreadStatus::Running, + None, + cx, + ); + }); + let granularity = DebuggerSettings::get_global(cx).stepping_granularity(); - cx.spawn(|this, cx| async move { - client.step_out(thread_id, granularity).await?; - - Self::update_thread_state(this, previous_status, None, cx) - }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .step_out(&self.client_id, self.thread_id, granularity, cx) + .detach_and_log_err(cx); + }); } fn handle_restart_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - - cx.background_executor() - .spawn(async move { client.restart().await }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .restart(&self.client_id, None, cx) + .detach_and_log_err(cx); + }); } fn handle_pause_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - let thread_id = self.thread_id; - cx.background_executor() - .spawn(async move { client.pause(thread_id).await }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .pause_thread(&self.client_id, self.thread_id, cx) + .detach_and_log_err(cx) + }); } fn handle_stop_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - let thread_ids = vec![self.thread_id; 1]; - - cx.background_executor() - .spawn(async move { client.terminate_threads(Some(thread_ids)).await }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .terminate_threads(&self.client_id, Some(vec![self.thread_id; 1]), cx) + .detach_and_log_err(cx) + }); } fn handle_disconnect_action(&mut self, cx: &mut ViewContext) { - let client = self.client.clone(); - cx.background_executor() - .spawn(async move { client.disconnect(None, Some(true), None).await }) - .detach_and_log_err(cx); + self.dap_store.update(cx, |store, cx| { + store + .disconnect_client(&self.client_id, cx) + .detach_and_log_err(cx); + }); } } @@ -522,8 +539,7 @@ impl Item for DebugPanelItem { ) -> AnyElement { Label::new(format!( "{:?} - Thread {}", - self.client.config().kind, - self.thread_id + self.client_kind, self.thread_id )) .color(if params.selected { Color::Default @@ -533,12 +549,12 @@ impl Item for DebugPanelItem { .into_any_element() } - fn tab_tooltip_text(&self, _: &AppContext) -> Option { + fn tab_tooltip_text(&self, cx: &AppContext) -> Option { Some(SharedString::from(format!( "{:?} Thread {} - {:?}", - self.client.config().kind, + self.client_kind, self.thread_id, - self.current_thread_state().status + self.thread_state.read(cx).status, ))) } @@ -551,9 +567,11 @@ impl Item for DebugPanelItem { impl Render for DebugPanelItem { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let thread_status = self.current_thread_state().status; + let thread_status = self.thread_state.read(cx).status; let active_thread_item = &self.active_thread_item; + let capabilities = self.capabilities(cx); + h_flex() .key_context("DebugPanelItem") .track_focus(&self.focus_handle) @@ -640,11 +658,7 @@ impl Render for DebugPanelItem { })) })) .disabled( - !self - .client - .capabilities() - .supports_restart_request - .unwrap_or_default(), + !capabilities.supports_restart_request.unwrap_or_default(), ) .tooltip(move |cx| Tooltip::text("Restart", cx)), ) diff --git a/crates/debugger_ui/src/lib.rs b/crates/debugger_ui/src/lib.rs index b35bdf0d64..6aecbc72d9 100644 --- a/crates/debugger_ui/src/lib.rs +++ b/crates/debugger_ui/src/lib.rs @@ -2,6 +2,7 @@ use dap::debugger_settings::DebuggerSettings; use debugger_panel::{DebugPanel, ToggleFocus}; use debugger_panel_item::DebugPanelItem; use gpui::AppContext; +use project::dap_store::{self}; use settings::Settings; use ui::ViewContext; use workspace::{StartDebugger, Workspace}; @@ -13,6 +14,7 @@ mod variable_list; pub fn init(cx: &mut AppContext) { DebuggerSettings::register(cx); + dap_store::init(cx); cx.observe_new_views( |workspace: &mut Workspace, _cx: &mut ViewContext| { diff --git a/crates/debugger_ui/src/variable_list.rs b/crates/debugger_ui/src/variable_list.rs index 5bc4601b95..2ef9220dcf 100644 --- a/crates/debugger_ui/src/variable_list.rs +++ b/crates/debugger_ui/src/variable_list.rs @@ -1,10 +1,5 @@ -use crate::debugger_panel_item::DebugPanelItem; -use dap::{ - client::{ThreadState, VariableContainer}, - requests::{SetExpression, SetVariable, Variables}, - Scope, SetExpressionArguments, SetVariableArguments, Variable, VariablesArguments, -}; - +use crate::debugger_panel::{ThreadState, VariableContainer}; +use dap::{client::DebugAdapterClientId, Capabilities, Scope, Variable}; use editor::{ actions::{self, SelectAll}, Editor, EditorEvent, @@ -15,6 +10,7 @@ use gpui::{ ListState, Model, MouseDownEvent, Point, Subscription, View, }; use menu::Confirm; +use project::dap_store::DapStore; use std::{collections::HashMap, sync::Arc}; use ui::{prelude::*, ContextMenu, ListItem}; @@ -46,17 +42,28 @@ pub enum VariableListEntry { pub struct VariableList { list: ListState, + stack_frame_id: u64, + dap_store: Model, focus_handle: FocusHandle, + capabilities: Capabilities, + client_id: DebugAdapterClientId, open_entries: Vec, + thread_state: Model, set_variable_editor: View, - debug_panel_item: Model, set_variable_state: Option, - stack_frame_entries: HashMap>, + entries: HashMap>, open_context_menu: Option<(View, Point, Subscription)>, } impl VariableList { - pub fn new(debug_panel_item: Model, cx: &mut ViewContext) -> Self { + pub fn new( + dap_store: Model, + client_id: &DebugAdapterClientId, + thread_state: &Model, + capabilities: &Capabilities, + stack_frame_id: u64, + cx: &mut ViewContext, + ) -> Self { let weakview = cx.view().downgrade(); let focus_handle = cx.focus_handle(); @@ -81,22 +88,22 @@ impl VariableList { Self { list, + dap_store, focus_handle, - debug_panel_item, + stack_frame_id, set_variable_editor, + client_id: *client_id, open_context_menu: None, set_variable_state: None, + entries: Default::default(), open_entries: Default::default(), - stack_frame_entries: Default::default(), + thread_state: thread_state.clone(), + capabilities: capabilities.clone(), } } fn render_entry(&mut self, ix: usize, cx: &mut ViewContext) -> AnyElement { - let debug_item = self.debug_panel_item.read(cx); - let Some(entries) = self - .stack_frame_entries - .get(&debug_item.current_stack_frame_id()) - else { + let Some(entries) = self.entries.get(&self.stack_frame_id) else { return div().into_any_element(); }; @@ -133,22 +140,24 @@ impl VariableList { } }; - let (stack_frame_id, thread_state) = self.debug_panel_item.read_with(cx, |panel, _| { - (panel.current_stack_frame_id(), panel.current_thread_state()) - }); + self.build_entries(false, true, cx); + } + + pub fn update_stack_frame_id(&mut self, stack_frame_id: u64, cx: &mut ViewContext) { + self.stack_frame_id = stack_frame_id; - self.build_entries(thread_state, stack_frame_id, false, true); cx.notify(); } pub fn build_entries( &mut self, - thread_state: ThreadState, - stack_frame_id: u64, open_first_scope: bool, keep_open_entries: bool, + cx: &mut ViewContext, ) { - let Some(scopes) = thread_state.scopes.get(&stack_frame_id) else { + let thread_state = self.thread_state.read(cx); + + let Some(scopes) = thread_state.scopes.get(&self.stack_frame_id) else { return; }; @@ -226,8 +235,10 @@ impl VariableList { } let len = entries.len(); - self.stack_frame_entries.insert(stack_frame_id, entries); + self.entries.insert(self.stack_frame_id, entries); self.list.reset(len); + + cx.notify(); } fn deploy_variable_context_menu( @@ -240,13 +251,8 @@ impl VariableList { ) { let this = cx.view().clone(); - let (stack_frame_id, client) = self - .debug_panel_item - .read_with(cx, |p, _| (p.current_stack_frame_id(), p.client())); - let support_set_variable = client - .capabilities() - .supports_set_variable - .unwrap_or_default(); + let stack_frame_id = self.stack_frame_id; + let support_set_variable = self.capabilities.supports_set_variable.unwrap_or_default(); let context_menu = ContextMenu::build(cx, |menu, cx| { menu.entry( @@ -292,12 +298,7 @@ impl VariableList { editor.focus(cx); }); - let thread_state = this - .debug_panel_item - .read_with(cx, |panel, _| panel.current_thread_state()); - this.build_entries(thread_state, stack_frame_id, false, true); - - cx.notify(); + this.build_entries(false, true, cx); }), ) }) @@ -323,12 +324,7 @@ impl VariableList { return; }; - let (stack_frame_id, thread_state) = self.debug_panel_item.read_with(cx, |panel, _| { - (panel.current_stack_frame_id(), panel.current_thread_state()) - }); - - self.build_entries(thread_state, stack_frame_id, false, true); - cx.notify(); + self.build_entries(false, true, cx); } fn set_variable_value(&mut self, _: &Confirm, cx: &mut ViewContext) { @@ -341,111 +337,89 @@ impl VariableList { }); let Some(state) = self.set_variable_state.take() else { - cx.notify(); - return; + return cx.notify(); }; - if new_variable_value == state.value { - cx.notify(); - return; + if new_variable_value == state.value || state.stack_frame_id != self.stack_frame_id { + return cx.notify(); } - let (mut thread_state, client) = self - .debug_panel_item - .read_with(cx, |p, _| (p.current_thread_state(), p.client())); + let client_id = self.client_id; let variables_reference = state.parent_variables_reference; let scope = state.scope; let name = state.name; let evaluate_name = state.evaluate_name; let stack_frame_id = state.stack_frame_id; - let supports_set_expression = client - .capabilities() - .supports_set_expression - .unwrap_or_default(); cx.spawn(|this, mut cx| async move { - if let Some(evaluate_name) = supports_set_expression.then(|| evaluate_name).flatten() { - client - .request::(SetExpressionArguments { - expression: evaluate_name, - value: new_variable_value, - frame_id: Some(stack_frame_id), - format: None, - }) - .await?; - } else { - client - .request::(SetVariableArguments { + let set_value_task = this.update(&mut cx, |this, cx| { + this.dap_store.update(cx, |store, cx| { + store.set_variable_value( + &client_id, + stack_frame_id, variables_reference, name, - value: new_variable_value, - format: None, - }) - .await?; - } + new_variable_value, + evaluate_name, + cx, + ) + }) + }); - let Some(scope_variables) = thread_state.variables.remove(&scope.variables_reference) + set_value_task?.await?; + + let Some(scope_variables) = this.update(&mut cx, |this, cx| { + this.thread_state.update(cx, |thread_state, _| { + thread_state.variables.remove(&scope.variables_reference) + }) + })? else { - return anyhow::Ok(()); + return Ok(()); }; - let mut tasks = Vec::new(); + let tasks = this.update(&mut cx, |this, cx| { + let mut tasks = Vec::new(); - for variable_container in scope_variables { - let client = client.clone(); - tasks.push(async move { - let variables = client - .request::(VariablesArguments { - variables_reference: variable_container.container_reference, - filter: None, - start: None, - count: None, - format: None, - }) - .await? - .variables; + for variable_container in scope_variables { + let fetch_variables_task = this.dap_store.update(cx, |store, cx| { + store.variables(&client_id, variable_container.container_reference, cx) + }); - let depth = variable_container.depth; - let container_reference = variable_container.container_reference; + tasks.push(async move { + let depth = variable_container.depth; + let container_reference = variable_container.container_reference; - anyhow::Ok( - variables - .into_iter() - .map(move |variable| VariableContainer { - container_reference, - variable, - depth, - }), - ) - }); - } + anyhow::Ok( + fetch_variables_task + .await? + .into_iter() + .map(move |variable| VariableContainer { + container_reference, + variable, + depth, + }) + .collect::>(), + ) + }); + } + + tasks + })?; let updated_variables = try_join_all(tasks).await?; this.update(&mut cx, |this, cx| { - let (thread_id, stack_frame_id, client) = - this.debug_panel_item.read_with(cx, |panel, _| { - ( - panel.thread_id(), - panel.current_stack_frame_id(), - panel.client(), - ) - }); + this.thread_state.update(cx, |thread_state, cx| { + for variables in updated_variables { + thread_state + .variables + .insert(scope.variables_reference, variables); + } - let mut thread_states = client.thread_states(); + cx.notify(); + }); - let Some(thread_state) = thread_states.get_mut(&thread_id) else { - return; - }; - - for variables in updated_variables { - thread_state - .variables - .insert(scope.variables_reference, variables.collect::<_>()); - } - - this.build_entries(thread_state.clone(), stack_frame_id, false, true); - cx.notify(); + this.build_entries(false, true, cx); }) }) .detach_and_log_err(cx); @@ -470,6 +444,88 @@ impl VariableList { .into_any_element() } + fn on_toggle_variable( + &mut self, + ix: usize, + variable_id: &SharedString, + variable_reference: u64, + has_children: bool, + disclosed: Option, + cx: &mut ViewContext, + ) { + if !has_children { + return; + } + + // if we already opened the variable/we already fetched it + // we can just toggle it because we already have the nested variable + if disclosed.unwrap_or(true) + || self + .thread_state + .read(cx) + .fetched_variable_ids + .contains(&variable_reference) + { + return self.toggle_entry_collapsed(&variable_id, cx); + } + + let Some(entries) = self.entries.get(&self.stack_frame_id) else { + return; + }; + + let Some(entry) = entries.get(ix) else { + return; + }; + + if let VariableListEntry::Variable { scope, depth, .. } = entry { + let variable_id = variable_id.clone(); + let scope = scope.clone(); + let depth = *depth; + + let fetch_variables_task = self.dap_store.update(cx, |store, cx| { + store.variables(&self.client_id, variable_reference, cx) + }); + + cx.spawn(|this, mut cx| async move { + let new_variables = fetch_variables_task.await?; + + this.update(&mut cx, |this, cx| { + this.thread_state.update(cx, |thread_state, cx| { + let Some(variables) = + thread_state.variables.get_mut(&scope.variables_reference) + else { + return; + }; + + let position = variables.iter().position(|v| { + variable_entry_id(&scope, &v.variable, v.depth) == variable_id + }); + + if let Some(position) = position { + variables.splice( + position + 1..position + 1, + new_variables.clone().into_iter().map(|variable| { + VariableContainer { + container_reference: variable_reference, + variable, + depth: depth + 1, + } + }), + ); + + thread_state.fetched_variable_ids.insert(variable_reference); + } + + cx.notify(); + }); + + this.toggle_entry_collapsed(&variable_id, cx); + }) + }) + .detach_and_log_err(cx); + } + } + #[allow(clippy::too_many_arguments)] fn render_variable( &self, @@ -501,93 +557,15 @@ impl VariableList { .indent_step_size(px(20.)) .always_show_disclosure_icon(true) .toggle(disclosed) - .on_toggle(cx.listener({ - let variable_id = variable_id.clone(); - move |this, _, cx| { - if !has_children { - return; - } - - let debug_item = this.debug_panel_item.read(cx); - - // if we already opened the variable/we already fetched it - // we can just toggle it because we already have the nested variable - if disclosed.unwrap_or(true) - || debug_item - .current_thread_state() - .fetched_variable_ids - .contains(&variable_reference) - { - return this.toggle_entry_collapsed(&variable_id, cx); - } - - let Some(entries) = this - .stack_frame_entries - .get(&debug_item.current_stack_frame_id()) - else { - return; - }; - - let Some(entry) = entries.get(ix) else { - return; - }; - - if let VariableListEntry::Variable { scope, depth, .. } = entry { - let variable_id = variable_id.clone(); - let client = debug_item.client(); - let scope = scope.clone(); - let depth = *depth; - - cx.spawn(|this, mut cx| async move { - let new_variables = - client.variables(variable_reference).await?; - - this.update(&mut cx, |this, cx| { - let client = client.clone(); - let mut thread_states = client.thread_states(); - let Some(thread_state) = thread_states - .get_mut(&this.debug_panel_item.read(cx).thread_id()) - else { - return; - }; - - let Some(variables) = thread_state - .variables - .get_mut(&scope.variables_reference) - else { - return; - }; - - let position = variables.iter().position(|v| { - variable_entry_id(&scope, &v.variable, v.depth) - == variable_id - }); - - if let Some(position) = position { - variables.splice( - position + 1..position + 1, - new_variables.clone().into_iter().map(|variable| { - VariableContainer { - container_reference: variable_reference, - variable, - depth: depth + 1, - } - }), - ); - - thread_state - .fetched_variable_ids - .insert(variable_reference); - } - - drop(thread_states); - this.toggle_entry_collapsed(&variable_id, cx); - cx.notify(); - }) - }) - .detach_and_log_err(cx); - } - } + .on_toggle(cx.listener(move |this, _, cx| { + this.on_toggle_variable( + ix, + &variable_id, + variable_reference, + has_children, + disclosed, + cx, + ) })) .on_secondary_mouse_down(cx.listener({ let scope = scope.clone(); diff --git a/crates/project/src/dap_store.rs b/crates/project/src/dap_store.rs index 89a43b08e7..2924648671 100644 --- a/crates/project/src/dap_store.rs +++ b/crates/project/src/dap_store.rs @@ -1,10 +1,24 @@ -use anyhow::Context as _; +use anyhow::{anyhow, Context as _, Result}; use collections::{HashMap, HashSet}; use dap::client::{DebugAdapterClient, DebugAdapterClientId}; use dap::messages::Message; -use dap::SourceBreakpoint; -use gpui::{EventEmitter, ModelContext, Task}; +use dap::requests::{ + Attach, ConfigurationDone, Continue, Disconnect, Initialize, Launch, Next, Pause, Scopes, + SetBreakpoints, SetExpression, SetVariable, StackTrace, StepIn, StepOut, Terminate, + TerminateThreads, Variables, +}; +use dap::{ + AttachRequestArguments, Capabilities, ConfigurationDoneArguments, ContinueArguments, + DisconnectArguments, InitializeRequestArguments, InitializeRequestArgumentsPathFormat, + LaunchRequestArguments, NextArguments, PauseArguments, Scope, ScopesArguments, + SetBreakpointsArguments, SetExpressionArguments, SetVariableArguments, Source, + SourceBreakpoint, StackFrame, StackTraceArguments, StepInArguments, StepOutArguments, + SteppingGranularity, TerminateArguments, TerminateThreadsArguments, Variable, + VariablesArguments, +}; +use gpui::{AppContext, Context, EventEmitter, Global, Model, ModelContext, Task}; use language::{Buffer, BufferSnapshot}; +use serde_json::Value; use settings::WorktreeId; use std::{ collections::BTreeMap, @@ -15,7 +29,7 @@ use std::{ Arc, }, }; -use task::DebugAdapterConfig; +use task::{DebugAdapterConfig, DebugRequestType}; use text::Point; use util::ResultExt as _; @@ -39,18 +53,33 @@ pub struct DapStore { next_client_id: AtomicUsize, clients: HashMap, breakpoints: BTreeMap>, + capabilities: HashMap, } impl EventEmitter for DapStore {} +struct GlobalDapStore(Model); + +impl Global for GlobalDapStore {} + +pub fn init(cx: &mut AppContext) { + let store = GlobalDapStore(cx.new_model(DapStore::new)); + cx.set_global(store); +} + impl DapStore { + pub fn global(cx: &AppContext) -> Model { + cx.global::().0.clone() + } + pub fn new(cx: &mut ModelContext) -> Self { cx.on_app_quit(Self::shutdown_clients).detach(); Self { - next_client_id: Default::default(), clients: Default::default(), + capabilities: HashMap::default(), breakpoints: Default::default(), + next_client_id: Default::default(), } } @@ -65,13 +94,33 @@ impl DapStore { }) } - pub fn client_by_id(&self, id: DebugAdapterClientId) -> Option> { - self.clients.get(&id).and_then(|state| match state { + pub fn client_by_id(&self, id: &DebugAdapterClientId) -> Option> { + self.clients.get(id).and_then(|state| match state { DebugAdapterClientState::Starting(_) => None, DebugAdapterClientState::Running(client) => Some(client.clone()), }) } + pub fn capabilities_by_id(&self, client_id: &DebugAdapterClientId) -> Capabilities { + self.capabilities + .get(client_id) + .cloned() + .unwrap_or_default() + } + + pub fn merge_capabilities_for_client( + &mut self, + client_id: &DebugAdapterClientId, + other: &Capabilities, + cx: &mut ModelContext, + ) { + if let Some(capabilities) = self.capabilities.get_mut(client_id) { + *capabilities = capabilities.merge(other.clone()); + + cx.notify(); + } + } + pub fn breakpoints(&self) -> &BTreeMap> { &self.breakpoints } @@ -169,42 +218,467 @@ impl DapStore { ); } - fn shutdown_clients(&mut self, _: &mut ModelContext) -> impl Future { - let shutdown_futures = self - .clients - .drain() - .map(|(_, client_state)| async { - match client_state { - DebugAdapterClientState::Starting(task) => task.await?.shutdown().await.ok(), - DebugAdapterClientState::Running(client) => client.shutdown().await.ok(), + pub fn initialize( + &mut self, + client_id: &DebugAdapterClientId, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + cx.spawn(|this, mut cx| async move { + let capabilities = client + .request::(InitializeRequestArguments { + client_id: Some("zed".to_owned()), + client_name: Some("Zed".to_owned()), + adapter_id: client.adapter().id(), + locale: Some("en-US".to_owned()), + path_format: Some(InitializeRequestArgumentsPathFormat::Path), + supports_variable_type: Some(true), + supports_variable_paging: Some(false), + supports_run_in_terminal_request: Some(false), + supports_memory_references: Some(true), + supports_progress_reporting: Some(false), + supports_invalidated_event: Some(false), + lines_start_at1: Some(true), + columns_start_at1: Some(true), + supports_memory_event: Some(false), + supports_args_can_be_interpreted_by_shell: Some(true), + supports_start_debugging_request: Some(true), + }) + .await?; + + this.update(&mut cx, |store, cx| { + store.capabilities.insert(client.id(), capabilities); + + cx.notify(); + })?; + + // send correct request based on adapter config + match client.config().request { + DebugRequestType::Launch => { + client + .request::(LaunchRequestArguments { + raw: client.request_args(), + }) + .await? } + DebugRequestType::Attach => { + client + .request::(AttachRequestArguments { + raw: client.request_args(), + }) + .await? + } + } + + Ok(()) + }) + } + + pub fn stack_frames( + &mut self, + client_id: &DebugAdapterClientId, + thread_id: u64, + cx: &mut ModelContext, + ) -> Task>> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Client was not found"))); + }; + + cx.spawn(|_, _| async move { + Ok(client + .request::(StackTraceArguments { + thread_id, + start_frame: None, + levels: None, + format: None, + }) + .await? + .stack_frames) + }) + } + + pub fn scopes( + &mut self, + client_id: &DebugAdapterClientId, + stack_frame_id: u64, + cx: &mut ModelContext, + ) -> Task>> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Client was not found"))); + }; + + cx.spawn(|_, _| async move { + Ok(client + .request::(ScopesArguments { + frame_id: stack_frame_id, + }) + .await? + .scopes) + }) + } + + pub fn send_configuration_done( + &self, + client_id: &DebugAdapterClientId, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let capabilities = self.capabilities_by_id(client_id); + + cx.spawn(|_, _| async move { + let support_configuration_done_request = capabilities + .supports_configuration_done_request + .unwrap_or_default(); + + if support_configuration_done_request { + client + .request::(ConfigurationDoneArguments) + .await + } else { + Ok(()) + } + }) + } + + pub fn continue_thread( + &self, + client_id: &DebugAdapterClientId, + thread_id: u64, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + cx.spawn(|_, _| async move { + client + .request::(ContinueArguments { + thread_id, + single_thread: Some(true), + }) + .await?; + + Ok(()) + }) + } + + pub fn step_over( + &self, + client_id: &DebugAdapterClientId, + thread_id: u64, + granularity: SteppingGranularity, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let capabilities = self.capabilities_by_id(client_id); + + let supports_single_thread_execution_requests = capabilities + .supports_single_thread_execution_requests + .unwrap_or_default(); + let supports_stepping_granularity = capabilities + .supports_stepping_granularity + .unwrap_or_default(); + + cx.spawn(|_, _| async move { + client + .request::(NextArguments { + thread_id, + granularity: supports_stepping_granularity.then(|| granularity), + single_thread: supports_single_thread_execution_requests.then(|| true), + }) + .await + }) + } + + pub fn step_in( + &self, + client_id: &DebugAdapterClientId, + thread_id: u64, + granularity: SteppingGranularity, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let capabilities = self.capabilities_by_id(client_id); + + let supports_single_thread_execution_requests = capabilities + .supports_single_thread_execution_requests + .unwrap_or_default(); + let supports_stepping_granularity = capabilities + .supports_stepping_granularity + .unwrap_or_default(); + + cx.spawn(|_, _| async move { + client + .request::(StepInArguments { + thread_id, + granularity: supports_stepping_granularity.then(|| granularity), + single_thread: supports_single_thread_execution_requests.then(|| true), + target_id: None, + }) + .await + }) + } + + pub fn step_out( + &self, + client_id: &DebugAdapterClientId, + thread_id: u64, + granularity: SteppingGranularity, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let capabilities = self.capabilities_by_id(client_id); + + let supports_single_thread_execution_requests = capabilities + .supports_single_thread_execution_requests + .unwrap_or_default(); + let supports_stepping_granularity = capabilities + .supports_stepping_granularity + .unwrap_or_default(); + + cx.spawn(|_, _| async move { + client + .request::(StepOutArguments { + thread_id, + granularity: supports_stepping_granularity.then(|| granularity), + single_thread: supports_single_thread_execution_requests.then(|| true), + }) + .await + }) + } + + pub fn variables( + &self, + client_id: &DebugAdapterClientId, + variables_reference: u64, + cx: &mut ModelContext, + ) -> Task>> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + cx.spawn(|_, _| async move { + Ok(client + .request::(VariablesArguments { + variables_reference, + filter: None, + start: None, + count: None, + format: None, + }) + .await? + .variables) + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn set_variable_value( + &self, + client_id: &DebugAdapterClientId, + stack_frame_id: u64, + variables_reference: u64, + name: String, + value: String, + evaluate_name: Option, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let supports_set_expression = self + .capabilities_by_id(client_id) + .supports_set_expression + .unwrap_or_default(); + + cx.spawn(|_, _| async move { + if let Some(evaluate_name) = supports_set_expression.then(|| evaluate_name).flatten() { + client + .request::(SetExpressionArguments { + expression: evaluate_name, + value, + frame_id: Some(stack_frame_id), + format: None, + }) + .await?; + } else { + client + .request::(SetVariableArguments { + variables_reference, + name, + value, + format: None, + }) + .await?; + } + + Ok(()) + }) + } + + pub fn pause_thread( + &mut self, + client_id: &DebugAdapterClientId, + thread_id: u64, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + cx.spawn(|_, _| async move { client.request::(PauseArguments { thread_id }).await }) + } + + pub fn terminate_threads( + &mut self, + client_id: &DebugAdapterClientId, + thread_ids: Option>, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let capabilities = self.capabilities_by_id(client_id); + + if capabilities + .supports_terminate_threads_request + .unwrap_or_default() + { + cx.spawn(|_, _| async move { + client + .request::(TerminateThreadsArguments { thread_ids }) + .await }) - .collect::>(); + } else { + self.shutdown_client(client_id, cx) + } + } + + pub fn disconnect_client( + &mut self, + client_id: &DebugAdapterClientId, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + cx.spawn(|_, _| async move { + client + .request::(DisconnectArguments { + restart: Some(false), + terminate_debuggee: Some(true), + suspend_debuggee: Some(false), + }) + .await + }) + } + + pub fn restart( + &mut self, + client_id: &DebugAdapterClientId, + args: Option, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + let restart_args = args.unwrap_or(Value::Null); + + cx.spawn(|_, _| async move { + client + .request::(DisconnectArguments { + restart: Some(true), + terminate_debuggee: Some(false), + suspend_debuggee: Some(false), + }) + .await?; + + match client.request_type() { + DebugRequestType::Launch => { + client + .request::(LaunchRequestArguments { raw: restart_args }) + .await? + } + DebugRequestType::Attach => { + client + .request::(AttachRequestArguments { raw: restart_args }) + .await? + } + } + + Ok(()) + }) + } + + fn shutdown_clients(&mut self, cx: &mut ModelContext) -> impl Future { + let mut tasks = Vec::new(); + + let client_ids = self.clients.keys().cloned().collect::>(); + for client_id in client_ids { + tasks.push(self.shutdown_client(&client_id, cx)); + } async move { - futures::future::join_all(shutdown_futures).await; + futures::future::join_all(tasks).await; } } pub fn shutdown_client( &mut self, - client_id: DebugAdapterClientId, + client_id: &DebugAdapterClientId, cx: &mut ModelContext, - ) { - let Some(debug_client) = self.clients.remove(&client_id) else { - return; + ) -> Task> { + let Some(client) = self.clients.remove(&client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); }; - cx.emit(DapStoreEvent::DebugClientStopped(client_id)); + cx.emit(DapStoreEvent::DebugClientStopped(*client_id)); - cx.background_executor() - .spawn(async move { - match debug_client { - DebugAdapterClientState::Starting(task) => task.await?.shutdown().await.ok(), - DebugAdapterClientState::Running(client) => client.shutdown().await.ok(), - } - }) - .detach(); + let capabilities = self.capabilities.remove(client_id); + + cx.notify(); + + cx.spawn(|_, _| async move { + let client = match client { + DebugAdapterClientState::Starting(task) => task.await, + DebugAdapterClientState::Running(client) => Some(client), + }; + + let Some(client) = client else { + return Ok(()); + }; + + if capabilities + .and_then(|c| c.supports_terminate_request) + .unwrap_or_default() + { + let _ = client + .request::(TerminateArguments { + restart: Some(false), + }) + .await; + } + + client.shutdown().await + }) } pub fn toggle_breakpoint_for_buffer( @@ -221,7 +695,42 @@ impl DapStore { breakpoint_set.insert(breakpoint); } - self.send_changed_breakpoints(project_path, buffer_path, buffer_snapshot, cx); + self.send_changed_breakpoints(project_path, buffer_path, buffer_snapshot, cx) + .detach(); + } + + pub fn send_breakpoints( + &self, + client_id: &DebugAdapterClientId, + absolute_file_path: Arc, + breakpoints: Vec, + cx: &mut ModelContext, + ) -> Task> { + let Some(client) = self.client_by_id(client_id) else { + return Task::ready(Err(anyhow!("Could not found client"))); + }; + + cx.spawn(|_, _| async move { + client + .request::(SetBreakpointsArguments { + source: Source { + path: Some(String::from(absolute_file_path.to_string_lossy())), + name: None, + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }, + breakpoints: Some(breakpoints), + source_modified: None, + lines: None, + }) + .await?; + + Ok(()) + }) } pub fn send_changed_breakpoints( @@ -230,15 +739,15 @@ impl DapStore { buffer_path: PathBuf, buffer_snapshot: BufferSnapshot, cx: &mut ModelContext, - ) { + ) -> Task<()> { let clients = self.running_clients().collect::>(); if clients.is_empty() { - return; + return Task::ready(()); } let Some(breakpoints) = self.breakpoints.get(project_path) else { - return; + return Task::ready(()); }; let source_breakpoints = breakpoints @@ -248,18 +757,17 @@ impl DapStore { let mut tasks = Vec::new(); for client in clients { - let buffer_path = buffer_path.clone(); - let source_breakpoints = source_breakpoints.clone(); - tasks.push(async move { - client - .set_breakpoints(Arc::from(buffer_path), source_breakpoints) - .await - }); + tasks.push(self.send_breakpoints( + &client.id(), + Arc::from(buffer_path.clone()), + source_breakpoints.clone(), + cx, + )) } - cx.background_executor() - .spawn(async move { futures::future::join_all(tasks).await }) - .detach() + cx.background_executor().spawn(async move { + futures::future::join_all(tasks).await; + }) } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index af01bc7765..b999780899 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -39,7 +39,7 @@ use debounced_delay::DebouncedDelay; pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, - future::try_join_all, + future::{join_all, try_join_all}, stream::FuturesUnordered, AsyncWriteExt, FutureExt, StreamExt, }; @@ -252,7 +252,6 @@ pub enum Event { Notification(String), LanguageServerPrompt(LanguageServerPromptRequest), LanguageNotFound(Model), - DebugClientStarted(DebugAdapterClientId), DebugClientStopped(DebugAdapterClientId), DebugClientEvent { client_id: DebugAdapterClientId, @@ -644,6 +643,8 @@ impl Project { env: Option>, cx: &mut AppContext, ) -> Model { + let dap_store = DapStore::global(cx); + cx.new_model(|cx: &mut ModelContext| { let (tx, rx) = mpsc::unbounded(); cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) @@ -657,8 +658,6 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let dap_store = cx.new_model(DapStore::new); - let buffer_store = cx.new_model(|cx| { BufferStore::new(worktree_store.clone(), None, dap_store.clone(), cx) }); @@ -849,7 +848,7 @@ impl Project { store })?; - let dap_store = cx.new_model(DapStore::new)?; + let dap_store = cx.update(|cx| DapStore::global(cx))?; let buffer_store = cx.new_model(|cx| { BufferStore::new( @@ -1106,29 +1105,24 @@ impl Project { pub fn send_breakpoints( &self, - client: Arc, + client_id: &DebugAdapterClientId, cx: &mut ModelContext, - ) -> Task> { - cx.spawn(|project, mut cx| async move { - let task = project.update(&mut cx, |project, cx| { - let mut tasks = Vec::new(); + ) -> Task<()> { + let mut tasks = Vec::new(); - for (abs_path, serialized_breakpoints) in project.all_breakpoints(true, cx) { - let source_breakpoints = serialized_breakpoints - .iter() - .map(|bp| bp.to_source_breakpoint()) - .collect::>(); + for (abs_path, serialized_breakpoints) in self.all_breakpoints(true, cx) { + let source_breakpoints = serialized_breakpoints + .iter() + .map(|bp| bp.to_source_breakpoint()) + .collect::>(); - tasks - .push(client.set_breakpoints(abs_path.clone(), source_breakpoints.clone())); - } + tasks.push(self.dap_store.update(cx, |store, cx| { + store.send_breakpoints(client_id, abs_path, source_breakpoints, cx) + })); + } - try_join_all(tasks) - })?; - - task.await?; - - Ok(()) + cx.background_executor().spawn(async move { + join_all(tasks).await; }) } @@ -2314,7 +2308,9 @@ impl Project { ) { match event { DapStoreEvent::DebugClientStarted(client_id) => { - cx.emit(Event::DebugClientStarted(*client_id)); + self.dap_store.update(cx, |store, cx| { + store.initialize(client_id, cx).detach_and_log_err(cx) + }); } DapStoreEvent::DebugClientStopped(client_id) => { cx.emit(Event::DebugClientStopped(*client_id)); diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 3b2631ca8a..a65b4b8493 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -43,7 +43,7 @@ impl HeadlessProject { Task::ready(()), cx.background_executor().clone(), )); - let dap_store = cx.new_model(DapStore::new); + let dap_store = DapStore::global(cx); let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); let buffer_store = cx.new_model(|cx| { From f1f14266356f9ebbc0c4e24656a352d86751b56e Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Sat, 21 Sep 2024 18:38:40 +0200 Subject: [PATCH 267/270] Make CI pass --- crates/debugger_ui/src/debugger_panel.rs | 2 +- crates/remote_server/src/headless_project.rs | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 2e07039573..37bcdd8c84 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -685,7 +685,7 @@ impl DebugPanel { ) { let restart_args = event.clone().and_then(|e| e.restart); - // TODO debugger: remove current hightlights + // TODO debugger: remove current highlights self.dap_store.update(cx, |store, cx| { if restart_args.is_some() { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 973ceaba10..fd63dc2614 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -4,9 +4,12 @@ use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext}; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; use node_runtime::DummyNodeRuntime; use project::{ - buffer_store::{BufferStore, BufferStoreEvent}, dap_store::DapStore, project_settings::SettingsObserver, - search::SearchQuery, worktree_store::WorktreeStore, LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, - WorktreeSettings, + buffer_store::{BufferStore, BufferStoreEvent}, + dap_store::DapStore, + project_settings::SettingsObserver, + search::SearchQuery, + worktree_store::WorktreeStore, + LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, }; use remote::SshSession; use rpc::{ From c26a8f1537835114b030b27501f143362bc588ee Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Sat, 21 Sep 2024 18:42:34 +0200 Subject: [PATCH 268/270] Remove unused dep --- Cargo.lock | 4 ---- crates/dap/Cargo.toml | 1 - crates/editor/Cargo.toml | 1 - crates/project/Cargo.toml | 1 - crates/workspace/Cargo.toml | 1 - 5 files changed, 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 570fa63a22..cce0f6eaf4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3388,7 +3388,6 @@ dependencies = [ "dap-types", "futures 0.3.30", "gpui", - "log", "parking_lot", "schemars", "serde", @@ -3762,7 +3761,6 @@ dependencies = [ "collections", "convert_case 0.6.0", "ctor", - "dap", "db", "emojis", "env_logger", @@ -8510,7 +8508,6 @@ dependencies = [ "language", "log", "lsp", - "multi_buffer", "node_runtime", "parking_lot", "pathdiff", @@ -14144,7 +14141,6 @@ dependencies = [ "client", "clock", "collections", - "dap", "db", "derive_more", "dev_server_projects", diff --git a/crates/dap/Cargo.toml b/crates/dap/Cargo.toml index 63c65ce4a3..3e73958f80 100644 --- a/crates/dap/Cargo.toml +++ b/crates/dap/Cargo.toml @@ -14,7 +14,6 @@ async-trait.workspace = true dap-types = { git = "https://github.com/zed-industries/dap-types" } futures.workspace = true gpui.workspace = true -log.workspace = true parking_lot.workspace = true schemars.workspace = true serde.workspace = true diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index d797924419..b6b22ef64d 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -37,7 +37,6 @@ clock.workspace = true collections.workspace = true convert_case.workspace = true db.workspace = true -dap.workspace = true emojis.workspace = true file_icons.workspace = true futures.workspace = true diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index ad67d483d5..84a221f617 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -71,7 +71,6 @@ terminal.workspace = true text.workspace = true util.workspace = true which.workspace = true -multi_buffer.workspace = true [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 988ddcedaa..1b998eeabe 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -35,7 +35,6 @@ call.workspace = true client.workspace = true clock.workspace = true collections.workspace = true -dap.workspace = true db.workspace = true derive_more.workspace = true fs.workspace = true From 4ddb65bdaa74f3e94459bb86dc4eed3e5db3d2eb Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Sat, 21 Sep 2024 19:25:11 +0200 Subject: [PATCH 269/270] Make test pass again --- crates/debugger_ui/src/debugger_panel.rs | 2 +- crates/debugger_ui/src/lib.rs | 2 -- crates/project/src/dap_store.rs | 15 +-------------- crates/project/src/project.rs | 10 +++++----- 4 files changed, 7 insertions(+), 22 deletions(-) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 37bcdd8c84..4eecfcd840 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -134,7 +134,7 @@ impl DebugPanel { pane, size: px(300.), _subscriptions, - dap_store: DapStore::global(cx), + dap_store: project.read(cx).dap_store(), focus_handle: cx.focus_handle(), show_did_not_stop_warning: false, thread_states: Default::default(), diff --git a/crates/debugger_ui/src/lib.rs b/crates/debugger_ui/src/lib.rs index 6aecbc72d9..b35bdf0d64 100644 --- a/crates/debugger_ui/src/lib.rs +++ b/crates/debugger_ui/src/lib.rs @@ -2,7 +2,6 @@ use dap::debugger_settings::DebuggerSettings; use debugger_panel::{DebugPanel, ToggleFocus}; use debugger_panel_item::DebugPanelItem; use gpui::AppContext; -use project::dap_store::{self}; use settings::Settings; use ui::ViewContext; use workspace::{StartDebugger, Workspace}; @@ -14,7 +13,6 @@ mod variable_list; pub fn init(cx: &mut AppContext) { DebuggerSettings::register(cx); - dap_store::init(cx); cx.observe_new_views( |workspace: &mut Workspace, _cx: &mut ViewContext| { diff --git a/crates/project/src/dap_store.rs b/crates/project/src/dap_store.rs index 2924648671..52cac5a891 100644 --- a/crates/project/src/dap_store.rs +++ b/crates/project/src/dap_store.rs @@ -16,7 +16,7 @@ use dap::{ SteppingGranularity, TerminateArguments, TerminateThreadsArguments, Variable, VariablesArguments, }; -use gpui::{AppContext, Context, EventEmitter, Global, Model, ModelContext, Task}; +use gpui::{EventEmitter, ModelContext, Task}; use language::{Buffer, BufferSnapshot}; use serde_json::Value; use settings::WorktreeId; @@ -58,20 +58,7 @@ pub struct DapStore { impl EventEmitter for DapStore {} -struct GlobalDapStore(Model); - -impl Global for GlobalDapStore {} - -pub fn init(cx: &mut AppContext) { - let store = GlobalDapStore(cx.new_model(DapStore::new)); - cx.set_global(store); -} - impl DapStore { - pub fn global(cx: &AppContext) -> Model { - cx.global::().0.clone() - } - pub fn new(cx: &mut ModelContext) -> Self { cx.on_app_quit(Self::shutdown_clients).detach(); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index e147b12b29..419fdbdb2c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -640,8 +640,6 @@ impl Project { env: Option>, cx: &mut AppContext, ) -> Model { - let dap_store = DapStore::global(cx); - cx.new_model(|cx: &mut ModelContext| { let (tx, rx) = mpsc::unbounded(); cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) @@ -655,6 +653,8 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + let dap_store = cx.new_model(|cx| DapStore::new(cx)); + let buffer_store = cx.new_model(|cx| { BufferStore::new(worktree_store.clone(), None, dap_store.clone(), cx) }); @@ -743,8 +743,6 @@ impl Project { fs: Arc, cx: &mut AppContext, ) -> Model { - let dap_store = DapStore::global(cx); - cx.new_model(|cx: &mut ModelContext| { let (tx, rx) = mpsc::unbounded(); cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) @@ -759,6 +757,8 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + let dap_store = cx.new_model(|cx| DapStore::new(cx)); + let buffer_store = cx.new_model(|cx| { BufferStore::new(worktree_store.clone(), None, dap_store.clone(), cx) }); @@ -916,7 +916,7 @@ impl Project { store })?; - let dap_store = cx.update(|cx| DapStore::global(cx))?; + let dap_store = cx.new_model(|cx| DapStore::new(cx))?; let buffer_store = cx.new_model(|cx| { BufferStore::new( From 8b96ac81380cebf431b27614319f53bce9c8358c Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Sat, 21 Sep 2024 19:39:55 +0200 Subject: [PATCH 270/270] Make clippy pass --- crates/project/src/project.rs | 6 +++--- crates/remote_server/src/headless_project.rs | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 419fdbdb2c..851922e5ee 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -653,7 +653,7 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let dap_store = cx.new_model(|cx| DapStore::new(cx)); + let dap_store = cx.new_model(DapStore::new); let buffer_store = cx.new_model(|cx| { BufferStore::new(worktree_store.clone(), None, dap_store.clone(), cx) @@ -757,7 +757,7 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let dap_store = cx.new_model(|cx| DapStore::new(cx)); + let dap_store = cx.new_model(DapStore::new); let buffer_store = cx.new_model(|cx| { BufferStore::new(worktree_store.clone(), None, dap_store.clone(), cx) @@ -916,7 +916,7 @@ impl Project { store })?; - let dap_store = cx.new_model(|cx| DapStore::new(cx))?; + let dap_store = cx.new_model(DapStore::new)?; let buffer_store = cx.new_model(|cx| { BufferStore::new( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index fd63dc2614..f47dbd9f67 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -43,7 +43,6 @@ impl HeadlessProject { } pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { - let dap_store = DapStore::global(cx); let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let worktree_store = cx.new_model(|cx| { @@ -51,6 +50,8 @@ impl HeadlessProject { store.shared(SSH_PROJECT_ID, session.clone().into(), cx); store }); + + let dap_store = cx.new_model(DapStore::new); let buffer_store = cx.new_model(|cx| { let mut buffer_store = BufferStore::new( worktree_store.clone(),