Compare commits

..

2 Commits

Author SHA1 Message Date
Mikayla Maki
4ac098ce0a wip 2025-12-04 14:20:55 -08:00
Mikayla Maki
8ff95c153e Refactor the inline prompt editor to use a component
co-authored-by: Zed Agent
2025-12-04 11:28:23 -08:00
61 changed files with 1147 additions and 2673 deletions

89
Cargo.lock generated
View File

@@ -2130,15 +2130,30 @@ dependencies = [
"syn 2.0.106",
]
[[package]]
name = "bit-set"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
dependencies = [
"bit-vec 0.6.3",
]
[[package]]
name = "bit-set"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
dependencies = [
"bit-vec",
"bit-vec 0.8.0",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
[[package]]
name = "bit-vec"
version = "0.8.0"
@@ -2317,9 +2332,9 @@ dependencies = [
[[package]]
name = "borrow-or-share"
version = "0.2.4"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc0b364ead1874514c8c2855ab558056ebfeb775653e7ae45ff72f28f8f3166c"
checksum = "3eeab4423108c5d7c744f4d234de88d18d636100093ae04caf4825134b9c3a32"
[[package]]
name = "borsh"
@@ -5405,7 +5420,6 @@ dependencies = [
"tree-sitter-bash",
"tree-sitter-c",
"tree-sitter-html",
"tree-sitter-md",
"tree-sitter-python",
"tree-sitter-rust",
"tree-sitter-typescript",
@@ -5994,11 +6008,22 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]]
name = "fancy-regex"
version = "0.16.2"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f"
checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
dependencies = [
"bit-set",
"bit-set 0.5.3",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "fancy-regex"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
dependencies = [
"bit-set 0.8.0",
"regex-automata",
"regex-syntax",
]
@@ -6220,9 +6245,9 @@ checksum = "8bf7cc16383c4b8d58b9905a8509f02926ce3058053c056376248d958c9df1e8"
[[package]]
name = "fluent-uri"
version = "0.4.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc74ac4d8359ae70623506d512209619e5cf8f347124910440dbc221714b328e"
checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
dependencies = [
"borrow-or-share",
"ref-cast",
@@ -7518,17 +7543,6 @@ dependencies = [
"serde",
]
[[package]]
name = "hashbrown"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash 0.2.0",
]
[[package]]
name = "hashlink"
version = "0.8.4"
@@ -8618,21 +8632,21 @@ dependencies = [
[[package]]
name = "jsonschema"
version = "0.37.4"
version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73c9ffb2b5c56d58030e1b532d8e8389da94590515f118cf35b5cb68e4764a7e"
checksum = "f1b46a0365a611fbf1d2143104dcf910aada96fafd295bab16c60b802bf6fa1d"
dependencies = [
"ahash 0.8.12",
"base64 0.22.1",
"bytecount",
"data-encoding",
"email_address",
"fancy-regex",
"fancy-regex 0.14.0",
"fraction",
"getrandom 0.3.4",
"idna",
"itoa",
"num-cmp",
"num-traits",
"once_cell",
"percent-encoding",
"referencing",
"regex",
@@ -8640,7 +8654,6 @@ dependencies = [
"reqwest 0.12.24",
"serde",
"serde_json",
"unicode-general-category",
"uuid-simd",
]
@@ -10189,7 +10202,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b977c445f26e49757f9aca3631c3b8b836942cb278d69a92e7b80d3b24da632"
dependencies = [
"arrayvec",
"bit-set",
"bit-set 0.8.0",
"bitflags 2.9.4",
"cfg_aliases 0.2.1",
"codespan-reporting 0.12.0",
@@ -13045,7 +13058,7 @@ dependencies = [
"dap",
"dap_adapters",
"extension",
"fancy-regex",
"fancy-regex 0.14.0",
"fs",
"futures 0.3.31",
"fuzzy",
@@ -13916,14 +13929,13 @@ dependencies = [
[[package]]
name = "referencing"
version = "0.37.4"
version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4283168a506f0dcbdce31c9f9cce3129c924da4c6bca46e46707fcb746d2d70c"
checksum = "c8eff4fa778b5c2a57e85c5f2fe3a709c52f0e60d23146e2151cbef5893f420e"
dependencies = [
"ahash 0.8.12",
"fluent-uri",
"getrandom 0.3.4",
"hashbrown 0.16.1",
"once_cell",
"parking_lot",
"percent-encoding",
"serde_json",
@@ -17117,7 +17129,7 @@ dependencies = [
"alacritty_terminal",
"anyhow",
"collections",
"fancy-regex",
"fancy-regex 0.14.0",
"futures 0.3.31",
"gpui",
"itertools 0.14.0",
@@ -17351,12 +17363,12 @@ dependencies = [
[[package]]
name = "tiktoken-rs"
version = "0.9.1"
source = "git+https://github.com/zed-industries/tiktoken-rs?rev=2570c4387a8505fb8f1d3f3557454b474f1e8271#2570c4387a8505fb8f1d3f3557454b474f1e8271"
source = "git+https://github.com/zed-industries/tiktoken-rs?rev=7249f999c5fdf9bf3cc5c288c964454e4dac0c00#7249f999c5fdf9bf3cc5c288c964454e4dac0c00"
dependencies = [
"anyhow",
"base64 0.22.1",
"bstr",
"fancy-regex",
"fancy-regex 0.13.0",
"lazy_static",
"regex",
"rustc-hash 1.1.0",
@@ -18488,12 +18500,6 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce61d488bcdc9bc8b5d1772c404828b17fc481c0a582b5581e95fb233aef503e"
[[package]]
name = "unicode-general-category"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b993bddc193ae5bd0d623b49ec06ac3e9312875fdae725a975c51db1cc1677f"
[[package]]
name = "unicode-ident"
version = "1.0.19"
@@ -18728,6 +18734,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8"
dependencies = [
"outref",
"uuid",
"vsimd",
]

View File

@@ -505,7 +505,7 @@ ec4rs = "1.1"
emojis = "0.6.1"
env_logger = "0.11"
exec = "0.3.1"
fancy-regex = "0.16.0"
fancy-regex = "0.14.0"
fork = "0.4.0"
futures = "0.3"
futures-batch = "0.6.1"
@@ -531,7 +531,7 @@ indoc = "2"
inventory = "0.3.19"
itertools = "0.14.0"
json_dotpath = "1.1"
jsonschema = "0.37.0"
jsonschema = "0.30.0"
jsonwebtoken = "9.3"
jupyter-protocol = "0.10.0"
jupyter-websocket-client = "0.15.0"
@@ -658,7 +658,7 @@ sysinfo = "0.37.0"
take-until = "0.2.0"
tempfile = "3.20.0"
thiserror = "2.0.12"
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "2570c4387a8505fb8f1d3f3557454b474f1e8271" }
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "7249f999c5fdf9bf3cc5c288c964454e4dac0c00" }
time = { version = "0.3", features = [
"macros",
"parsing",

View File

@@ -60,8 +60,7 @@ impl<T: 'static> EventEmitter<PromptEditorEvent> for PromptEditor<T> {}
impl<T: 'static> Render for PromptEditor<T> {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let ui_font_size = ThemeSettings::get_global(cx).ui_font_size(cx);
let mut buttons = Vec::new();
let mut action_buttons = Vec::new();
const RIGHT_PADDING: Pixels = px(9.);
@@ -74,7 +73,7 @@ impl<T: 'static> Render for PromptEditor<T> {
let codegen = codegen.read(cx);
if codegen.alternative_count(cx) > 1 {
buttons.push(self.render_cycle_controls(codegen, cx));
action_buttons.push(self.render_cycle_controls(codegen, cx));
}
let editor_margins = editor_margins.lock();
@@ -85,10 +84,7 @@ impl<T: 'static> Render for PromptEditor<T> {
(left_gutter_width, right_padding)
}
PromptEditorMode::Terminal { .. } => {
// Give the equivalent of the same left-padding that we're using on the right
(Pixels::from(40.0), Pixels::from(24.))
}
PromptEditorMode::Terminal { .. } => (Pixels::from(40.0), Pixels::from(24.)),
};
let bottom_padding = match &self.mode {
@@ -96,7 +92,7 @@ impl<T: 'static> Render for PromptEditor<T> {
PromptEditorMode::Terminal { .. } => rems_from_px(8.0),
};
buttons.extend(self.render_buttons(window, cx));
action_buttons.extend(self.render_buttons(window, cx));
let menu_visible = self.is_completions_menu_visible(cx);
let add_context_button = IconButton::new("add-context", IconName::AtSign)
@@ -109,93 +105,48 @@ impl<T: 'static> Render for PromptEditor<T> {
})
.on_click(cx.listener(move |this, _, window, cx| {
this.trigger_completion_menu(window, cx);
}));
}))
.into_any_element();
let close_button = self.render_close_button(cx);
let error_message = if let CodegenStatus::Error(error) = self.codegen_status(cx) {
Some(SharedString::from(error.to_string()))
} else {
None
};
let editor = self.render_editor(window, cx);
let model_selector = self.model_selector.clone().into_any_element();
v_flex()
.key_context("PromptEditor")
.capture_action(cx.listener(Self::paste))
.bg(cx.theme().colors().editor_background)
.block_mouse_except_scroll()
.gap_0p5()
.border_y_1()
.border_color(cx.theme().status().info_border)
.size_full()
.pt_0p5()
.pb(bottom_padding)
.pr(right_padding)
.cursor(CursorStyle::Arrow)
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
this.model_selector
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
}))
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::cancel))
.on_action(cx.listener(Self::move_up))
.on_action(cx.listener(Self::move_down))
.capture_action(cx.listener(Self::cycle_prev))
.capture_action(cx.listener(Self::cycle_next))
.child(
h_flex()
.items_start()
.cursor(CursorStyle::Arrow)
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
this.model_selector
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
}))
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::cancel))
.on_action(cx.listener(Self::move_up))
.on_action(cx.listener(Self::move_down))
.capture_action(cx.listener(Self::cycle_prev))
.capture_action(cx.listener(Self::cycle_next))
.child(
WithRemSize::new(ui_font_size)
.flex()
.flex_row()
.flex_shrink_0()
.items_center()
.h_full()
.w(left_gutter_width)
.justify_center()
.gap_2()
.child(self.render_close_button(cx))
.map(|el| {
let CodegenStatus::Error(error) = self.codegen_status(cx) else {
return el;
};
let error_message = SharedString::from(error.to_string());
el.child(
div()
.id("error")
.tooltip(Tooltip::text(error_message))
.child(
Icon::new(IconName::XCircle)
.size(IconSize::Small)
.color(Color::Error),
),
)
}),
)
.child(
h_flex()
.w_full()
.justify_between()
.child(div().flex_1().child(self.render_editor(window, cx)))
.child(
WithRemSize::new(ui_font_size)
.flex()
.flex_row()
.items_center()
.gap_1()
.children(buttons),
),
),
)
.child(
WithRemSize::new(ui_font_size)
.flex()
.flex_row()
.items_center()
.child(h_flex().flex_shrink_0().w(left_gutter_width))
.child(
h_flex()
.w_full()
.pl_1()
.items_start()
.justify_between()
.child(add_context_button)
.child(self.model_selector.clone()),
),
PromptEditorLayout::new(
editor,
close_button,
action_buttons,
add_context_button,
model_selector,
)
.error_message(error_message)
.left_gutter_width(left_gutter_width)
.right_padding(right_padding)
.bottom_padding(bottom_padding),
)
}
}
@@ -1174,6 +1125,237 @@ impl GenerationMode {
}
}
/// A stateless layout component for the inline prompt editor.
///
/// This component handles the visual layout of the prompt editor UI without
/// any behavior. It's used by both `PromptEditor` (with interactive elements)
/// and the component preview (with static elements).
#[derive(IntoElement)]
pub struct PromptEditorLayout {
/// The editor element to display
editor: AnyElement,
/// Close button element (left gutter)
close_button: AnyElement,
/// Optional error message to display (left gutter, shown as error icon when present)
error_message: Option<SharedString>,
/// Action buttons (right side: start/stop/accept/restart + cycle controls)
action_buttons: Vec<AnyElement>,
/// Add context button (bottom left, @ button)
add_context_button: AnyElement,
/// Model selector element (bottom right)
model_selector: AnyElement,
/// Left gutter width for alignment
left_gutter_width: Pixels,
/// Right padding
right_padding: Pixels,
/// Bottom padding
bottom_padding: Rems,
}
impl PromptEditorLayout {
pub fn new(
editor: AnyElement,
close_button: AnyElement,
action_buttons: Vec<AnyElement>,
add_context_button: AnyElement,
model_selector: AnyElement,
) -> Self {
Self {
editor,
close_button,
error_message: None,
action_buttons,
add_context_button,
model_selector,
left_gutter_width: px(40.0),
right_padding: px(9.0),
bottom_padding: rems_from_px(2.0),
}
}
pub fn error_message(mut self, error_message: impl Into<Option<SharedString>>) -> Self {
self.error_message = error_message.into();
self
}
pub fn left_gutter_width(mut self, width: Pixels) -> Self {
self.left_gutter_width = width;
self
}
pub fn right_padding(mut self, padding: Pixels) -> Self {
self.right_padding = padding;
self
}
pub fn bottom_padding(mut self, padding: Rems) -> Self {
self.bottom_padding = padding;
self
}
/// Creates a PromptEditorLayout for preview/static rendering.
///
/// This constructor handles creating all the static (non-interactive) buttons
/// based on the codegen status, mode, and other parameters. It's used by the
/// component preview system.
pub fn preview(
editor: AnyElement,
codegen_status: CodegenStatus,
mode: GenerationMode,
edited_since_done: bool,
cx: &App,
) -> Self {
// Create action buttons based on status
let action_buttons = match codegen_status {
CodegenStatus::Idle => {
vec![
Button::new("start", mode.start_label())
.label_size(LabelSize::Small)
.icon(IconName::Return)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.into_any_element(),
]
}
CodegenStatus::Pending => vec![
IconButton::new("stop", IconName::Stop)
.icon_color(Color::Error)
.shape(IconButtonShape::Square)
.into_any_element(),
],
CodegenStatus::Done => {
if edited_since_done {
vec![
IconButton::new("restart", IconName::RotateCw)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
.into_any_element(),
]
} else {
vec![
IconButton::new("accept", IconName::Check)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
.into_any_element(),
]
}
}
CodegenStatus::Error(_) => {
vec![
IconButton::new("restart", IconName::RotateCw)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
.into_any_element(),
]
}
};
let close_button = IconButton::new("cancel", IconName::Close)
.icon_color(Color::Muted)
.shape(IconButtonShape::Square)
.into_any_element();
let add_context_button = IconButton::new("add-context", IconName::AtSign)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.into_any_element();
let model_selector = div()
.text_color(cx.theme().colors().text_muted)
.child("Model Selector")
.into_any_element();
let error_message = if let CodegenStatus::Error(error) = codegen_status {
Some(SharedString::from(error.to_string()))
} else {
None
};
Self::new(
editor,
close_button,
action_buttons,
add_context_button,
model_selector,
)
.error_message(error_message)
}
}
impl RenderOnce for PromptEditorLayout {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let ui_font_size = ThemeSettings::get_global(cx).ui_font_size(cx);
v_flex()
.bg(cx.theme().colors().editor_background)
.gap_0p5()
.border_y_1()
.border_color(cx.theme().status().info_border)
.size_full()
.pt_0p5()
.pb(self.bottom_padding)
.pr(self.right_padding)
.child(
h_flex()
.items_start()
.child(
WithRemSize::new(ui_font_size)
.flex()
.flex_row()
.flex_shrink_0()
.items_center()
.h_full()
.w(self.left_gutter_width)
.justify_center()
.gap_2()
.child(self.close_button)
.when_some(self.error_message, |el, error_message| {
el.child(
div()
.id("error")
.tooltip(Tooltip::text(error_message))
.child(
Icon::new(IconName::XCircle)
.size(IconSize::Small)
.color(Color::Error),
),
)
}),
)
.child(
h_flex()
.w_full()
.justify_between()
.child(div().flex_1().child(self.editor))
.child(
WithRemSize::new(ui_font_size)
.flex()
.flex_row()
.items_center()
.gap_1()
.children(self.action_buttons),
),
),
)
.child(
WithRemSize::new(ui_font_size)
.flex()
.flex_row()
.items_center()
.child(h_flex().flex_shrink_0().w(self.left_gutter_width))
.child(
h_flex()
.w_full()
.pl_1()
.items_start()
.justify_between()
.child(self.add_context_button)
.child(self.model_selector),
),
)
}
}
/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
#[derive(Clone, Debug)]
struct MessageCrease {
@@ -1229,3 +1411,144 @@ fn insert_message_creases(
editor.fold_creases(creases, false, window, cx);
ids
}
mod preview {
use component::{Component, ComponentScope, example_group_with_title, single_example};
use editor::Editor;
use gpui::{AnyElement, App, Window};
use ui::prelude::*;
use super::{CodegenStatus, GenerationMode, PromptEditorLayout};
// View this component preview using `workspace: open component-preview`
#[derive(IntoElement, RegisterComponent)]
struct PromptEditorPreview;
impl Component for PromptEditorPreview {
fn scope() -> ComponentScope {
ComponentScope::Agent
}
fn name() -> &'static str {
"Inline Prompt Editor"
}
fn sort_name() -> &'static str {
"AgentInlinePromptEditor"
}
fn preview(window: &mut Window, cx: &mut App) -> Option<AnyElement> {
let editor = window.use_state(cx, |window, cx| {
let mut editor = Editor::single_line(window, cx);
editor.set_placeholder_text("How can I help?", window, cx);
editor
});
Some(
v_flex()
.gap_6()
.child(example_group_with_title(
"Idle State",
vec![
single_example(
"Generate",
div()
.w(px(600.))
.child(PromptEditorLayout::preview(
editor.clone().into_any_element(),
CodegenStatus::Idle,
GenerationMode::Generate,
false,
cx,
))
.into_any_element(),
),
single_example(
"Transform",
div()
.w(px(600.))
.child(PromptEditorLayout::preview(
editor.clone().into_any_element(),
CodegenStatus::Idle,
GenerationMode::Transform,
false,
cx,
))
.into_any_element(),
),
],
))
.child(example_group_with_title(
"Pending State",
vec![single_example(
"Stop Button",
div()
.w(px(600.))
.child(PromptEditorLayout::preview(
editor.clone().into_any_element(),
CodegenStatus::Pending,
GenerationMode::Generate,
false,
cx,
))
.into_any_element(),
)],
))
.child(example_group_with_title(
"Done State",
vec![
single_example(
"Accept Button",
div()
.w(px(600.))
.child(PromptEditorLayout::preview(
editor.clone().into_any_element(),
CodegenStatus::Done,
GenerationMode::Generate,
false,
cx,
))
.into_any_element(),
),
single_example(
"Edited Since Done (Restart)",
div()
.w(px(600.))
.child(PromptEditorLayout::preview(
editor.clone().into_any_element(),
CodegenStatus::Done,
GenerationMode::Generate,
true,
cx,
))
.into_any_element(),
),
],
))
.child(example_group_with_title(
"Error State",
vec![single_example(
"Error Indicator with Restart",
div()
.w(px(600.))
.child(PromptEditorLayout::preview(
editor.into_any_element(),
CodegenStatus::Error(anyhow::anyhow!("Example error message")),
GenerationMode::Generate,
false,
cx,
))
.into_any_element(),
)],
))
.into_any_element(),
)
}
}
impl RenderOnce for PromptEditorPreview {
fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
div().child("Inline Prompt Editor Preview")
}
}
}

View File

@@ -584,100 +584,41 @@ impl Model {
}
}
pub fn cross_region_inference_id(
&self,
region: &str,
allow_global: bool,
) -> anyhow::Result<String> {
// List derived from here:
// https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system
let model_id = self.request_id();
let supports_global = matches!(
self,
Model::ClaudeOpus4_5
| Model::ClaudeOpus4_5Thinking
| Model::ClaudeHaiku4_5
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking
);
pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result<String> {
let region_group = if region.starts_with("us-gov-") {
"us-gov"
} else if region.starts_with("us-")
|| region.starts_with("ca-")
|| region.starts_with("sa-")
{
if allow_global && supports_global {
"global"
} else {
"us"
}
} else if region.starts_with("us-") {
"us"
} else if region.starts_with("eu-") {
if allow_global && supports_global {
"global"
} else {
"eu"
}
"eu"
} else if region.starts_with("ap-") || region == "me-central-1" || region == "me-south-1" {
if allow_global && supports_global {
"global"
} else {
"apac"
}
"apac"
} else if region.starts_with("ca-") || region.starts_with("sa-") {
// Canada and South America regions - default to US profiles
"us"
} else {
anyhow::bail!("Unsupported Region {region}");
};
match (self, region_group, region) {
(Model::Custom { .. }, _, _) => Ok(self.request_id().into()),
let model_id = self.request_id();
(
Model::ClaudeOpus4_5
| Model::ClaudeOpus4_5Thinking
| Model::ClaudeHaiku4_5
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking,
"global",
_,
) => Ok(format!("{}.{}", region_group, model_id)),
match (self, region_group) {
// Custom models can't have CRI IDs
(Model::Custom { .. }, _) => Ok(self.request_id().into()),
(
Model::Claude3Haiku
| Model::Claude3_5Sonnet
| Model::Claude3_7Sonnet
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking,
"us-gov",
_,
) => Ok(format!("{}.{}", region_group, model_id)),
(
Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking,
"apac",
"ap-southeast-2" | "ap-southeast-4",
) => Ok(format!("au.{}", model_id)),
(
Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking,
"apac",
"ap-northeast-1" | "ap-northeast-3",
) => Ok(format!("jp.{}", model_id)),
(Model::AmazonNovaLite, "us", r) if r.starts_with("ca-") => {
Ok(format!("ca.{}", model_id))
// Models with US Gov only
(Model::Claude3_5Sonnet, "us-gov") | (Model::Claude3Haiku, "us-gov") => {
Ok(format!("{}.{}", region_group, model_id))
}
// Available everywhere
(Model::AmazonNovaLite | Model::AmazonNovaMicro | Model::AmazonNovaPro, _) => {
Ok(format!("{}.{}", region_group, model_id))
}
// Models in US
(
Model::AmazonNovaPremier
| Model::AmazonNovaLite
| Model::AmazonNovaMicro
| Model::AmazonNovaPro
| Model::Claude3_5Haiku
| Model::ClaudeHaiku4_5
| Model::Claude3_5Sonnet
@@ -714,18 +655,16 @@ impl Model {
| Model::PalmyraWriterX4
| Model::PalmyraWriterX5,
"us",
_,
) => Ok(format!("{}.{}", region_group, model_id)),
// Models available in EU
(
Model::AmazonNovaLite
| Model::AmazonNovaMicro
| Model::AmazonNovaPro
| Model::Claude3_5Sonnet
Model::Claude3_5Sonnet
| Model::ClaudeHaiku4_5
| Model::Claude3_7Sonnet
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking
| Model::Claude3Haiku
@@ -734,26 +673,26 @@ impl Model {
| Model::MetaLlama323BInstructV1
| Model::MistralPixtralLarge2502V1,
"eu",
_,
) => Ok(format!("{}.{}", region_group, model_id)),
// Models available in APAC
(
Model::AmazonNovaLite
| Model::AmazonNovaMicro
| Model::AmazonNovaPro
| Model::Claude3_5Sonnet
Model::Claude3_5Sonnet
| Model::Claude3_5SonnetV2
| Model::ClaudeHaiku4_5
| Model::Claude3Haiku
| Model::Claude3Sonnet
| Model::Claude3_7Sonnet
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4
| Model::Claude3Haiku
| Model::Claude3Sonnet,
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking,
"apac",
_,
) => Ok(format!("{}.{}", region_group, model_id)),
_ => Ok(model_id.into()),
// Any other combination is not supported
_ => Ok(self.request_id().into()),
}
}
}
@@ -766,15 +705,15 @@ mod tests {
fn test_us_region_inference_ids() -> anyhow::Result<()> {
// Test US regions
assert_eq!(
Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1", false)?,
Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1")?,
"us.anthropic.claude-3-5-sonnet-20241022-v2:0"
);
assert_eq!(
Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2", false)?,
Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2")?,
"us.anthropic.claude-3-5-sonnet-20241022-v2:0"
);
assert_eq!(
Model::AmazonNovaPro.cross_region_inference_id("us-east-2", false)?,
Model::AmazonNovaPro.cross_region_inference_id("us-east-2")?,
"us.amazon.nova-pro-v1:0"
);
Ok(())
@@ -784,19 +723,19 @@ mod tests {
fn test_eu_region_inference_ids() -> anyhow::Result<()> {
// Test European regions
assert_eq!(
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1", false)?,
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1")?,
"eu.anthropic.claude-sonnet-4-20250514-v1:0"
);
assert_eq!(
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", false)?,
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1")?,
"eu.anthropic.claude-sonnet-4-5-20250929-v1:0"
);
assert_eq!(
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1", false)?,
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?,
"eu.anthropic.claude-3-sonnet-20240229-v1:0"
);
assert_eq!(
Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1", false)?,
Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1")?,
"eu.amazon.nova-micro-v1:0"
);
Ok(())
@@ -806,15 +745,15 @@ mod tests {
fn test_apac_region_inference_ids() -> anyhow::Result<()> {
// Test Asia-Pacific regions
assert_eq!(
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1", false)?,
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1")?,
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
);
assert_eq!(
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2", false)?,
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2")?,
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
);
assert_eq!(
Model::AmazonNovaLite.cross_region_inference_id("ap-south-1", false)?,
Model::AmazonNovaLite.cross_region_inference_id("ap-south-1")?,
"apac.amazon.nova-lite-v1:0"
);
Ok(())
@@ -824,11 +763,11 @@ mod tests {
fn test_gov_region_inference_ids() -> anyhow::Result<()> {
// Test Government regions
assert_eq!(
Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1", false)?,
Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1")?,
"us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0"
);
assert_eq!(
Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1", false)?,
Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1")?,
"us-gov.anthropic.claude-3-haiku-20240307-v1:0"
);
Ok(())
@@ -838,15 +777,15 @@ mod tests {
fn test_meta_models_inference_ids() -> anyhow::Result<()> {
// Test Meta models
assert_eq!(
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1", false)?,
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1")?,
"meta.llama3-70b-instruct-v1:0"
);
assert_eq!(
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1", false)?,
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1")?,
"us.meta.llama3-1-70b-instruct-v1:0"
);
assert_eq!(
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1", false)?,
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1")?,
"eu.meta.llama3-2-1b-instruct-v1:0"
);
Ok(())
@@ -857,11 +796,11 @@ mod tests {
// Mistral models don't follow the regional prefix pattern,
// so they should return their original IDs
assert_eq!(
Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1", false)?,
Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1")?,
"mistral.mistral-large-2402-v1:0"
);
assert_eq!(
Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1", false)?,
Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1")?,
"mistral.mixtral-8x7b-instruct-v0:1"
);
Ok(())
@@ -872,11 +811,11 @@ mod tests {
// AI21 models don't follow the regional prefix pattern,
// so they should return their original IDs
assert_eq!(
Model::AI21J2UltraV1.cross_region_inference_id("us-east-1", false)?,
Model::AI21J2UltraV1.cross_region_inference_id("us-east-1")?,
"ai21.j2-ultra-v1"
);
assert_eq!(
Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1", false)?,
Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1")?,
"ai21.jamba-instruct-v1:0"
);
Ok(())
@@ -887,11 +826,11 @@ mod tests {
// Cohere models don't follow the regional prefix pattern,
// so they should return their original IDs
assert_eq!(
Model::CohereCommandRV1.cross_region_inference_id("us-east-1", false)?,
Model::CohereCommandRV1.cross_region_inference_id("us-east-1")?,
"cohere.command-r-v1:0"
);
assert_eq!(
Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1", false)?,
Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1")?,
"cohere.command-text-v14:7:4k"
);
Ok(())
@@ -911,17 +850,10 @@ mod tests {
// Custom model should return its name unchanged
assert_eq!(
custom_model.cross_region_inference_id("us-east-1", false)?,
custom_model.cross_region_inference_id("us-east-1")?,
"custom.my-model-v1:0"
);
// Test that models without global support fall back to regional when allow_global is true
assert_eq!(
Model::AmazonNovaPro.cross_region_inference_id("us-east-1", true)?,
"us.amazon.nova-pro-v1:0",
"Nova Pro should fall back to regional profile even when allow_global is true"
);
Ok(())
}
@@ -960,28 +892,3 @@ mod tests {
);
}
}
#[test]
fn test_global_inference_ids() -> anyhow::Result<()> {
// Test global inference for models that support it when allow_global is true
assert_eq!(
Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", true)?,
"global.anthropic.claude-sonnet-4-20250514-v1:0"
);
assert_eq!(
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", true)?,
"global.anthropic.claude-sonnet-4-5-20250929-v1:0"
);
assert_eq!(
Model::ClaudeHaiku4_5.cross_region_inference_id("ap-south-1", true)?,
"global.anthropic.claude-haiku-4-5-20251001-v1:0"
);
// Test that regional prefix is used when allow_global is false
assert_eq!(
Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", false)?,
"us.anthropic.claude-sonnet-4-20250514-v1:0"
);
Ok(())
}

View File

@@ -1723,10 +1723,6 @@ impl ProtoClient for Client {
fn is_via_collab(&self) -> bool {
true
}
fn has_wsl_interop(&self) -> bool {
false
}
}
/// prefix for the zed:// url scheme

View File

@@ -121,8 +121,6 @@ CREATE TABLE "project_repositories" (
"merge_message" VARCHAR,
"branch_summary" VARCHAR,
"head_commit_details" VARCHAR,
"remote_upstream_url" VARCHAR,
"remote_origin_url" VARCHAR,
PRIMARY KEY (project_id, id)
);

View File

@@ -1,2 +0,0 @@
ALTER TABLE "project_repositories" ADD COLUMN "remote_upstream_url" VARCHAR;
ALTER TABLE "project_repositories" ADD COLUMN "remote_origin_url" VARCHAR;

View File

@@ -362,8 +362,6 @@ impl Database {
entry_ids: ActiveValue::set("[]".into()),
head_commit_details: ActiveValue::set(None),
merge_message: ActiveValue::set(None),
remote_upstream_url: ActiveValue::set(None),
remote_origin_url: ActiveValue::set(None),
}
}),
)
@@ -513,8 +511,6 @@ impl Database {
serde_json::to_string(&update.current_merge_conflicts).unwrap(),
)),
merge_message: ActiveValue::set(update.merge_message.clone()),
remote_upstream_url: ActiveValue::set(update.remote_upstream_url.clone()),
remote_origin_url: ActiveValue::set(update.remote_origin_url.clone()),
})
.on_conflict(
OnConflict::columns([
@@ -1009,8 +1005,6 @@ impl Database {
is_last_update: true,
merge_message: db_repository_entry.merge_message,
stash_entries: Vec::new(),
remote_upstream_url: db_repository_entry.remote_upstream_url.clone(),
remote_origin_url: db_repository_entry.remote_origin_url.clone(),
});
}
}

View File

@@ -796,8 +796,6 @@ impl Database {
is_last_update: true,
merge_message: db_repository.merge_message,
stash_entries: Vec::new(),
remote_upstream_url: db_repository.remote_upstream_url.clone(),
remote_origin_url: db_repository.remote_origin_url.clone(),
});
}
}

View File

@@ -22,8 +22,6 @@ pub struct Model {
pub branch_summary: Option<String>,
// A JSON object representing the current Head commit values
pub head_commit_details: Option<String>,
pub remote_upstream_url: Option<String>,
pub remote_origin_url: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -469,8 +469,6 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::GetBlobContent>)
.add_request_handler(forward_mutating_project_request::<proto::GitCreateBranch>)
.add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
.add_request_handler(forward_mutating_project_request::<proto::GitCreateRemote>)
.add_request_handler(forward_mutating_project_request::<proto::GitRemoveRemote>)
.add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
.add_message_handler(update_context)

View File

@@ -3518,6 +3518,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
.into_iter()
.map(|(sha, message)| (sha.parse().unwrap(), message.into()))
.collect(),
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
};
client_a.fs().set_blame_for_repo(
Path::new(path!("/my-repo/.git")),
@@ -3602,6 +3603,10 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
for (idx, (buffer, entry)) in entries.iter().flatten().enumerate() {
let details = blame.details_for_entry(*buffer, entry).unwrap();
assert_eq!(details.message, format!("message for idx-{}", idx));
assert_eq!(
details.permalink.unwrap().to_string(),
format!("https://github.com/zed-industries/zed/commit/{}", entry.sha)
);
}
});
});

View File

@@ -118,7 +118,6 @@ tree-sitter-rust.workspace = true
tree-sitter-typescript.workspace = true
tree-sitter-yaml.workspace = true
tree-sitter-bash.workspace = true
tree-sitter-md.workspace = true
unindent.workspace = true
util = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }

View File

@@ -182,7 +182,7 @@ use std::{
iter::{self, Peekable},
mem,
num::NonZeroU32,
ops::{ControlFlow, Deref, DerefMut, Not, Range, RangeInclusive},
ops::{Deref, DerefMut, Not, Range, RangeInclusive},
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
@@ -191,7 +191,7 @@ use std::{
use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
use text::{BufferId, FromAnchor, OffsetUtf16, Rope, ToOffset as _};
use theme::{
AccentColors, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, ThemeSettings,
ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, ThemeSettings,
observe_buffer_font_size_adjustment,
};
use ui::{
@@ -1079,7 +1079,6 @@ pub struct Editor {
show_breakpoints: Option<bool>,
show_wrap_guides: Option<bool>,
show_indent_guides: Option<bool>,
buffers_with_disabled_indent_guides: HashSet<BufferId>,
highlight_order: usize,
highlighted_rows: HashMap<TypeId, Vec<RowHighlight>>,
background_highlights: HashMap<HighlightKey, BackgroundHighlight>,
@@ -1207,17 +1206,11 @@ pub struct Editor {
select_next_is_case_sensitive: Option<bool>,
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
accent_data: Option<AccentData>,
accent_overrides: Vec<SharedString>,
fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
use_base_text_line_numbers: bool,
}
#[derive(Debug, PartialEq)]
struct AccentData {
colors: AccentColors,
overrides: Vec<SharedString>,
}
fn debounce_value(debounce_ms: u64) -> Option<Duration> {
if debounce_ms > 0 {
Some(Duration::from_millis(debounce_ms))
@@ -2205,7 +2198,6 @@ impl Editor {
show_breakpoints: None,
show_wrap_guides: None,
show_indent_guides,
buffers_with_disabled_indent_guides: HashSet::default(),
highlight_order: 0,
highlighted_rows: HashMap::default(),
background_highlights: HashMap::default(),
@@ -2362,7 +2354,7 @@ impl Editor {
lookup_key: None,
select_next_is_case_sensitive: None,
applicable_language_settings: HashMap::default(),
accent_data: None,
accent_overrides: Vec::new(),
fetched_tree_sitter_chunks: HashMap::default(),
use_base_text_line_numbers: false,
};
@@ -2372,7 +2364,7 @@ impl Editor {
}
editor.applicable_language_settings = editor.fetch_applicable_language_settings(cx);
editor.accent_data = editor.fetch_accent_data(cx);
editor.accent_overrides = editor.fetch_accent_overrides(cx);
if let Some(breakpoints) = editor.breakpoint_store.as_ref() {
editor
@@ -8075,17 +8067,10 @@ impl Editor {
if self.edit_prediction_indent_conflict {
let cursor_point = cursor.to_point(&multibuffer);
let mut suggested_indent = None;
multibuffer.suggested_indents_callback(
cursor_point.row..cursor_point.row + 1,
|_, indent| {
suggested_indent = Some(indent);
ControlFlow::Break(())
},
cx,
);
if let Some(indent) = suggested_indent
let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx);
if let Some((_, indent)) = indents.iter().next()
&& indent.len == cursor_point.column
{
self.edit_prediction_indent_conflict = false;
@@ -20092,20 +20077,6 @@ impl Editor {
self.show_indent_guides
}
pub fn disable_indent_guides_for_buffer(
&mut self,
buffer_id: BufferId,
cx: &mut Context<Self>,
) {
self.buffers_with_disabled_indent_guides.insert(buffer_id);
cx.notify();
}
pub fn has_indent_guides_disabled_for_buffer(&self, buffer_id: BufferId) -> bool {
self.buffers_with_disabled_indent_guides
.contains(&buffer_id)
}
pub fn toggle_line_numbers(
&mut self,
_: &ToggleLineNumbers,
@@ -21735,18 +21706,16 @@ impl Editor {
cx.notify();
}
fn fetch_accent_data(&self, cx: &App) -> Option<AccentData> {
fn fetch_accent_overrides(&self, cx: &App) -> Vec<SharedString> {
if !self.mode.is_full() {
return None;
return Vec::new();
}
let theme_settings = theme::ThemeSettings::get_global(cx);
let theme = cx.theme();
let accent_colors = theme.accents().clone();
let accent_overrides = theme_settings
theme_settings
.theme_overrides
.get(theme.name.as_ref())
.get(cx.theme().name.as_ref())
.map(|theme_style| &theme_style.accents)
.into_iter()
.flatten()
@@ -21759,12 +21728,7 @@ impl Editor {
.flatten(),
)
.flat_map(|accent| accent.0.clone())
.collect();
Some(AccentData {
colors: accent_colors,
overrides: accent_overrides,
})
.collect()
}
fn fetch_applicable_language_settings(
@@ -21794,9 +21758,9 @@ impl Editor {
let language_settings_changed = new_language_settings != self.applicable_language_settings;
self.applicable_language_settings = new_language_settings;
let new_accents = self.fetch_accent_data(cx);
let accents_changed = new_accents != self.accent_data;
self.accent_data = new_accents;
let new_accent_overrides = self.fetch_accent_overrides(cx);
let accent_overrides_changed = new_accent_overrides != self.accent_overrides;
self.accent_overrides = new_accent_overrides;
if self.diagnostics_enabled() {
let new_severity = EditorSettings::get_global(cx)
@@ -21870,7 +21834,7 @@ impl Editor {
}
}
if language_settings_changed || accents_changed {
if language_settings_changed || accent_overrides_changed {
self.colorize_brackets(true, cx);
}

View File

@@ -19095,109 +19095,6 @@ async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
);
}
#[gpui::test]
async fn test_document_format_with_prettier_explicit_language(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.formatter = Some(FormatterList::Single(Formatter::Prettier))
});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.settings"), Default::default())
.await;
let project = Project::test(fs, [path!("/file.settings").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let ts_lang = Arc::new(Language::new(
LanguageConfig {
name: "TypeScript".into(),
matcher: LanguageMatcher {
path_suffixes: vec!["ts".to_string()],
..LanguageMatcher::default()
},
prettier_parser_name: Some("typescript".to_string()),
..LanguageConfig::default()
},
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
));
language_registry.add(ts_lang.clone());
update_test_language_settings(cx, |settings| {
settings.defaults.prettier.get_or_insert_default().allowed = Some(true);
});
let test_plugin = "test_plugin";
let _ = language_registry.register_fake_lsp(
"TypeScript",
FakeLspAdapter {
prettier_plugins: vec![test_plugin],
..Default::default()
},
);
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.settings"), cx)
})
.await
.unwrap();
project.update(cx, |project, cx| {
project.set_language_for_buffer(&buffer, ts_lang, cx)
});
let buffer_text = "one\ntwo\nthree\n";
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx));
editor.update_in(cx, |editor, window, cx| {
editor.set_text(buffer_text, window, cx)
});
editor
.update_in(cx, |editor, window, cx| {
editor.perform_format(
project.clone(),
FormatTrigger::Manual,
FormatTarget::Buffers(editor.buffer().read(cx).all_buffers()),
window,
cx,
)
})
.unwrap()
.await;
assert_eq!(
editor.update(cx, |editor, cx| editor.text(cx)),
buffer_text.to_string() + prettier_format_suffix + "\ntypescript",
"Test prettier formatting was not applied to the original buffer text",
);
update_test_language_settings(cx, |settings| {
settings.defaults.formatter = Some(FormatterList::default())
});
let format = editor.update_in(cx, |editor, window, cx| {
editor.perform_format(
project.clone(),
FormatTrigger::Manual,
FormatTarget::Buffers(editor.buffer().read(cx).all_buffers()),
window,
cx,
)
});
format.await.unwrap();
assert_eq!(
editor.update(cx, |editor, cx| editor.text(cx)),
buffer_text.to_string()
+ prettier_format_suffix
+ "\ntypescript\n"
+ prettier_format_suffix
+ "\ntypescript",
"Autoformatting (via test prettier) was not applied to the original buffer text",
);
}
#[gpui::test]
async fn test_addition_reverts(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -27498,65 +27395,6 @@ async fn test_paste_url_from_other_app_creates_markdown_link_over_selected_text(
));
}
#[gpui::test]
async fn test_markdown_list_indent_with_multi_cursor(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into());
let mut cx = EditorTestContext::new(cx).await;
cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx));
cx.set_state(&indoc! {"
- [ ] Item 1
- [ ] Item 1.a
- [ˇ] Item 2
- [ˇ] Item 2.a
- [ˇ] Item 2.b
"
});
cx.update_editor(|editor, window, cx| {
editor.handle_input("X", window, cx);
});
cx.assert_editor_state(indoc! {"
- [ ] Item 1
- [ ] Item 1.a
- [Xˇ] Item 2
- [Xˇ] Item 2.a
- [Xˇ] Item 2.b
"
});
}
#[gpui::test]
async fn test_markdown_list_indent_with_newline(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into());
let mut cx = EditorTestContext::new(cx).await;
cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx));
cx.set_state(indoc! {"
- [x] list item
- [x] sub list itemˇ
"
});
cx.update_editor(|editor, window, cx| {
editor.newline(&Newline, window, cx);
});
cx.assert_editor_state(indoc! {"
- [x] list item
- [x] sub list item
ˇ
"
});
}
#[gpui::test]
async fn test_paste_url_from_zed_copy_creates_markdown_link_over_selected_text(
cx: &mut gpui::TestAppContext,

View File

@@ -3915,8 +3915,6 @@ impl EditorElement {
) -> impl IntoElement {
let editor = self.editor.read(cx);
let multi_buffer = editor.buffer.read(cx);
let is_read_only = self.editor.read(cx).read_only(cx);
let file_status = multi_buffer
.all_diff_hunks_expanded()
.then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx))
@@ -3969,7 +3967,7 @@ impl EditorElement {
.gap_1p5()
.when(is_sticky, |el| el.shadow_md())
.border_1()
.map(|border| {
.map(|div| {
let border_color = if is_selected
&& is_folded
&& focus_handle.contains_focused(window, cx)
@@ -3978,7 +3976,7 @@ impl EditorElement {
} else {
colors.border
};
border.border_color(border_color)
div.border_color(border_color)
})
.bg(colors.editor_subheader_background)
.hover(|style| style.bg(colors.element_hover))
@@ -4058,15 +4056,13 @@ impl EditorElement {
})
.take(1),
)
.when(!is_read_only, |this| {
this.child(
h_flex()
.size_3()
.justify_center()
.flex_shrink_0()
.children(indicator),
)
})
.child(
h_flex()
.size_3()
.justify_center()
.flex_shrink_0()
.children(indicator),
)
.child(
h_flex()
.cursor_pointer()

View File

@@ -508,19 +508,7 @@ impl GitBlame {
let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe());
let blame_buffer = project.blame_buffer(&buffer, None, cx);
let remote_url = project
.git_store()
.read(cx)
.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
.and_then(|(repo, _)| {
repo.read(cx)
.remote_upstream_url
.clone()
.or(repo.read(cx).remote_origin_url.clone())
});
Some(
async move { (id, snapshot, buffer_edits, blame_buffer.await, remote_url) },
)
Some(async move { (id, snapshot, buffer_edits, blame_buffer.await) })
})
.collect::<Vec<_>>()
});
@@ -536,9 +524,13 @@ impl GitBlame {
.await;
let mut res = vec![];
let mut errors = vec![];
for (id, snapshot, buffer_edits, blame, remote_url) in blame {
for (id, snapshot, buffer_edits, blame) in blame {
match blame {
Ok(Some(Blame { entries, messages })) => {
Ok(Some(Blame {
entries,
messages,
remote_url,
})) => {
let entries = build_blame_entry_sum_tree(
entries,
snapshot.max_point().row,

View File

@@ -181,10 +181,6 @@ pub fn indent_guides_in_range(
.buffer_snapshot()
.indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx)
.filter(|indent_guide| {
if editor.has_indent_guides_disabled_for_buffer(indent_guide.buffer_id) {
return false;
}
if editor.is_buffer_folded(indent_guide.buffer_id, cx) {
return false;
}

View File

@@ -6,7 +6,6 @@ pub mod settings;
use core::fmt;
use crate::settings::LspSettings;
use wit::*;
pub use serde_json;
@@ -86,21 +85,19 @@ pub trait Extension: Send + Sync {
/// Returns the initialization options to pass to the specified language server.
fn language_server_initialization_options(
&mut self,
language_server_id: &LanguageServerId,
worktree: &Worktree,
_language_server_id: &LanguageServerId,
_worktree: &Worktree,
) -> Result<Option<serde_json::Value>> {
LspSettings::for_worktree(language_server_id.as_ref(), worktree)
.map(|settings| settings.initialization_options)
Ok(None)
}
/// Returns the workspace configuration options to pass to the language server.
fn language_server_workspace_configuration(
&mut self,
language_server_id: &LanguageServerId,
worktree: &Worktree,
_language_server_id: &LanguageServerId,
_worktree: &Worktree,
) -> Result<Option<serde_json::Value>> {
LspSettings::for_worktree(language_server_id.as_ref(), worktree)
.map(|settings| settings.settings)
Ok(None)
}
/// Returns the initialization options to pass to the other language server.

View File

@@ -50,8 +50,6 @@ pub struct FakeGitRepositoryState {
pub blames: HashMap<RepoPath, Blame>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
/// List of remotes, keys are names and values are URLs
pub remotes: HashMap<String, String>,
pub simulated_index_write_error_message: Option<String>,
pub refs: HashMap<String, String>,
}
@@ -70,7 +68,6 @@ impl FakeGitRepositoryState {
refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
merge_base_contents: Default::default(),
oids: Default::default(),
remotes: HashMap::default(),
}
}
}
@@ -435,13 +432,8 @@ impl GitRepository for FakeGitRepository {
})
}
fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
self.with_state_async(true, move |state| {
if !state.branches.remove(&name) {
bail!("no such branch: {name}");
}
Ok(())
})
fn delete_branch(&self, _name: String) -> BoxFuture<'_, Result<()>> {
unimplemented!()
}
fn blame(&self, path: RepoPath, _content: Rope) -> BoxFuture<'_, Result<git::blame::Blame>> {
@@ -606,19 +598,6 @@ impl GitRepository for FakeGitRepository {
unimplemented!()
}
fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
self.with_state_async(false, move |state| {
let remotes = state
.remotes
.keys()
.map(|r| Remote {
name: r.clone().into(),
})
.collect::<Vec<_>>();
Ok(remotes)
})
}
fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
unimplemented!()
}
@@ -627,6 +606,10 @@ impl GitRepository for FakeGitRepository {
unimplemented!()
}
fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
unimplemented!()
}
fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
future::ready(Ok(Vec::new())).boxed()
}
@@ -700,20 +683,6 @@ impl GitRepository for FakeGitRepository {
fn default_branch(&self) -> BoxFuture<'_, Result<Option<SharedString>>> {
async { Ok(Some("main".into())) }.boxed()
}
fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
self.with_state_async(true, move |state| {
state.remotes.insert(name, url);
Ok(())
})
}
fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
self.with_state_async(true, move |state| {
state.remotes.remove(&name);
Ok(())
})
}
}
#[cfg(test)]

View File

@@ -19,6 +19,7 @@ pub use git2 as libgit;
pub struct Blame {
pub entries: Vec<BlameEntry>,
pub messages: HashMap<Oid, String>,
pub remote_url: Option<String>,
}
#[derive(Clone, Debug, Default)]
@@ -35,6 +36,7 @@ impl Blame {
working_directory: &Path,
path: &RepoPath,
content: &Rope,
remote_url: Option<String>,
) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, content).await?;
let mut entries = parse_git_blame(&output)?;
@@ -51,7 +53,11 @@ impl Blame {
.await
.context("failed to get commit messages")?;
Ok(Self { entries, messages })
Ok(Self {
entries,
messages,
remote_url,
})
}
}

View File

@@ -1,4 +1,3 @@
use std::str::FromStr;
use std::sync::LazyLock;
use derive_more::Deref;
@@ -12,7 +11,7 @@ pub struct RemoteUrl(Url);
static USERNAME_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^[0-9a-zA-Z\-_]+@").expect("Failed to create USERNAME_REGEX"));
impl FromStr for RemoteUrl {
impl std::str::FromStr for RemoteUrl {
type Err = url::ParseError;
fn from_str(input: &str) -> Result<Self, Self::Err> {

View File

@@ -7,15 +7,13 @@ use collections::HashMap;
use futures::future::BoxFuture;
use futures::io::BufWriter;
use futures::{AsyncWriteExt, FutureExt as _, select_biased};
use git2::{BranchType, ErrorCode};
use git2::BranchType;
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString, Task};
use parking_lot::Mutex;
use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use std::collections::HashSet;
use std::ffi::{OsStr, OsString};
use std::process::{ExitStatus, Stdio};
use std::{
@@ -57,12 +55,6 @@ impl Branch {
self.ref_name.starts_with("refs/remotes/")
}
pub fn remote_name(&self) -> Option<&str> {
self.ref_name
.strip_prefix("refs/remotes/")
.and_then(|stripped| stripped.split("/").next())
}
pub fn tracking_status(&self) -> Option<UpstreamTrackingStatus> {
self.upstream
.as_ref()
@@ -598,10 +590,6 @@ pub trait GitRepository: Send + Sync {
fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>>;
fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>>;
fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>>;
/// returns a list of remote branches that contain HEAD
fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<SharedString>>>;
@@ -1397,19 +1385,9 @@ impl GitRepository for RealGitRepository {
branch
} else if let Ok(revision) = repo.find_branch(&name, BranchType::Remote) {
let (_, branch_name) = name.split_once("/").context("Unexpected branch format")?;
let revision = revision.get();
let branch_commit = revision.peel_to_commit()?;
let mut branch = match repo.branch(&branch_name, &branch_commit, false) {
Ok(branch) => branch,
Err(err) if err.code() == ErrorCode::Exists => {
repo.find_branch(&branch_name, BranchType::Local)?
}
Err(err) => {
return Err(err.into());
}
};
let mut branch = repo.branch(&branch_name, &branch_commit, false)?;
branch.set_upstream(Some(&name))?;
branch
} else {
@@ -1425,6 +1403,7 @@ impl GitRepository for RealGitRepository {
self.executor
.spawn(async move {
let branch = branch.await?;
GitBinary::new(git_binary_path, working_directory?, executor)
.run(&["checkout", &branch])
.await?;
@@ -1494,17 +1473,28 @@ impl GitRepository for RealGitRepository {
let git_binary_path = self.any_git_binary_path.clone();
let executor = self.executor.clone();
executor
.spawn(async move {
crate::blame::Blame::for_path(
&git_binary_path,
&working_directory?,
&path,
&content,
)
async move {
let remote_url = if let Some(remote_url) = self.remote_url("upstream").await {
Some(remote_url)
} else if let Some(remote_url) = self.remote_url("origin").await {
Some(remote_url)
} else {
None
};
executor
.spawn(async move {
crate::blame::Blame::for_path(
&git_binary_path,
&working_directory?,
&path,
&content,
remote_url,
)
.await
})
.await
})
.boxed()
}
.boxed()
}
fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<FileHistory>> {
@@ -2003,7 +1993,7 @@ impl GitRepository for RealGitRepository {
let working_directory = working_directory?;
let output = new_smol_command(&git_binary_path)
.current_dir(&working_directory)
.args(["remote", "-v"])
.args(["remote"])
.output()
.await?;
@@ -2012,43 +2002,14 @@ impl GitRepository for RealGitRepository {
"Failed to get all remotes:\n{}",
String::from_utf8_lossy(&output.stderr)
);
let remote_names: HashSet<Remote> = String::from_utf8_lossy(&output.stdout)
.lines()
.filter(|line| !line.is_empty())
.filter_map(|line| {
let mut split_line = line.split_whitespace();
let remote_name = split_line.next()?;
Some(Remote {
name: remote_name.trim().to_string().into(),
})
let remote_names = String::from_utf8_lossy(&output.stdout)
.split('\n')
.filter(|name| !name.is_empty())
.map(|name| Remote {
name: name.trim().to_string().into(),
})
.collect();
Ok(remote_names.into_iter().collect())
})
.boxed()
}
fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
let repo = self.repository.clone();
self.executor
.spawn(async move {
let repo = repo.lock();
repo.remote_delete(&name)?;
Ok(())
})
.boxed()
}
fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
let repo = self.repository.clone();
self.executor
.spawn(async move {
let repo = repo.lock();
repo.remote(&name, url.as_ref())?;
Ok(())
Ok(remote_names)
})
.boxed()
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
use anyhow::{Context as _, Result};
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle};
use editor::{Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer};
use editor::{Addon, Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer};
use git::repository::{CommitDetails, CommitDiff, RepoPath};
use git::{GitHostingProviderRegistry, GitRemote, parse_git_remote_url};
use gpui::{
@@ -11,8 +11,9 @@ use gpui::{
};
use language::{
Anchor, Buffer, Capability, DiskState, File, LanguageRegistry, LineEnding, ReplicaId, Rope,
TextBuffer,
TextBuffer, ToPoint,
};
use multi_buffer::ExcerptInfo;
use multi_buffer::PathKey;
use project::{Project, WorktreeId, git_store::Repository};
use std::{
@@ -21,9 +22,11 @@ use std::{
sync::Arc,
};
use theme::ActiveTheme;
use ui::{Avatar, DiffStat, Tooltip, prelude::*};
use ui::{
Avatar, Button, ButtonCommon, Clickable, Color, Icon, IconName, IconSize, Label,
LabelCommon as _, LabelSize, SharedString, div, h_flex, v_flex,
};
use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff};
use workspace::item::TabTooltipContent;
use workspace::{
Item, ItemHandle, ItemNavHistory, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
Workspace,
@@ -148,10 +151,11 @@ impl CommitView {
let editor = cx.new(|cx| {
let mut editor =
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
editor.disable_inline_diagnostics();
editor.set_expand_all_diff_hunks(cx);
editor.register_addon(CommitViewAddon {
multibuffer: multibuffer.downgrade(),
});
editor
});
let commit_sha = Arc::<str>::from(commit.sha.as_ref());
@@ -258,8 +262,6 @@ impl CommitView {
this.editor.update(cx, |editor, cx| {
editor.disable_header_for_buffer(message_buffer.read(cx).remote_id(), cx);
editor
.disable_indent_guides_for_buffer(message_buffer.read(cx).remote_id(), cx);
editor.insert_blocks(
[BlockProperties {
@@ -355,41 +357,6 @@ impl CommitView {
.into_any()
}
fn calculate_changed_lines(&self, cx: &App) -> (u32, u32) {
let snapshot = self.multibuffer.read(cx).snapshot(cx);
let mut total_additions = 0u32;
let mut total_deletions = 0u32;
let mut seen_buffers = std::collections::HashSet::new();
for (_, buffer, _) in snapshot.excerpts() {
let buffer_id = buffer.remote_id();
if !seen_buffers.insert(buffer_id) {
continue;
}
let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else {
continue;
};
let base_text = diff.base_text();
for hunk in diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) {
let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
total_additions += added_rows;
let base_start = base_text
.offset_to_point(hunk.diff_base_byte_range.start)
.row;
let base_end = base_text.offset_to_point(hunk.diff_base_byte_range.end).row;
let deleted_rows = base_end.saturating_sub(base_start);
total_deletions += deleted_rows;
}
}
(total_additions, total_deletions)
}
fn render_header(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let commit = &self.commit;
let author_name = commit.author_name.clone();
@@ -413,72 +380,46 @@ impl CommitView {
)
});
let (additions, deletions) = self.calculate_changed_lines(cx);
let commit_diff_stat = if additions > 0 || deletions > 0 {
Some(DiffStat::new(
"commit-diff-stat",
additions as usize,
deletions as usize,
))
} else {
None
};
h_flex()
v_flex()
.p_4()
.pl_0()
.gap_4()
.border_b_1()
.border_color(cx.theme().colors().border_variant)
.border_color(cx.theme().colors().border)
.child(
h_flex()
.w(self.editor.read(cx).last_gutter_dimensions().full_width())
.justify_center()
.child(self.render_commit_avatar(&commit.sha, rems_from_px(48.), window, cx)),
)
.child(
h_flex()
.py_4()
.pl_1()
.pr_4()
.w_full()
.items_start()
.justify_between()
.flex_wrap()
.child(
h_flex()
.w(self.editor.read(cx).last_gutter_dimensions().full_width())
.justify_center()
.child(self.render_commit_avatar(
&commit.sha,
gpui::rems(3.0),
window,
cx,
)),
)
.child(
v_flex()
.gap_1()
.child(
h_flex()
.gap_1()
.gap_3()
.items_baseline()
.child(Label::new(author_name).color(Color::Default))
.child(
Label::new(format!("Commit:{}", commit.sha))
.color(Color::Muted)
.size(LabelSize::Small)
.truncate()
.buffer_font(cx),
Label::new(format!("commit {}", commit.sha))
.color(Color::Muted),
),
)
.child(
h_flex()
.gap_1p5()
.child(
Label::new(date_string)
.color(Color::Muted)
.size(LabelSize::Small),
)
.child(
Label::new("")
.color(Color::Ignored)
.size(LabelSize::Small),
)
.children(commit_diff_stat),
),
.child(Label::new(date_string).color(Color::Muted)),
)
.child(div().flex_grow())
.children(github_url.map(|url| {
Button::new("view_on_github", "View on GitHub")
.icon(IconName::Github)
.icon_color(Color::Muted)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.style(ui::ButtonStyle::Subtle)
.on_click(move |_, _, cx| cx.open_url(&url))
})),
)
@@ -773,6 +714,55 @@ impl language::File for GitBlob {
// }
// }
struct CommitViewAddon {
multibuffer: WeakEntity<MultiBuffer>,
}
impl Addon for CommitViewAddon {
fn render_buffer_header_controls(
&self,
excerpt: &ExcerptInfo,
_window: &Window,
cx: &App,
) -> Option<AnyElement> {
let multibuffer = self.multibuffer.upgrade()?;
let snapshot = multibuffer.read(cx).snapshot(cx);
let excerpts = snapshot.excerpts().collect::<Vec<_>>();
let current_idx = excerpts.iter().position(|(id, _, _)| *id == excerpt.id)?;
let (_, _, current_range) = &excerpts[current_idx];
let start_row = current_range.context.start.to_point(&excerpt.buffer).row;
let prev_end_row = if current_idx > 0 {
let (_, prev_buffer, prev_range) = &excerpts[current_idx - 1];
if prev_buffer.remote_id() == excerpt.buffer_id {
prev_range.context.end.to_point(&excerpt.buffer).row
} else {
0
}
} else {
0
};
let skipped_lines = start_row.saturating_sub(prev_end_row);
if skipped_lines > 0 {
Some(
Label::new(format!("{} unchanged lines", skipped_lines))
.color(Color::Muted)
.size(LabelSize::Small)
.into_any_element(),
)
} else {
None
}
}
fn to_any(&self) -> &dyn Any {
self
}
}
async fn build_buffer(
mut text: String,
blob: Arc<dyn File>,
@@ -875,28 +865,13 @@ impl Item for CommitView {
fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
let short_sha = self.commit.sha.get(0..7).unwrap_or(&*self.commit.sha);
let subject = truncate_and_trailoff(self.commit.message.split('\n').next().unwrap(), 20);
format!("{short_sha} {subject}").into()
format!("{short_sha} - {subject}").into()
}
fn tab_tooltip_content(&self, _: &App) -> Option<TabTooltipContent> {
fn tab_tooltip_text(&self, _: &App) -> Option<ui::SharedString> {
let short_sha = self.commit.sha.get(0..16).unwrap_or(&*self.commit.sha);
let subject = self.commit.message.split('\n').next().unwrap();
Some(TabTooltipContent::Custom(Box::new(Tooltip::element({
let subject = subject.to_string();
let short_sha = short_sha.to_string();
move |_, _| {
v_flex()
.child(Label::new(subject.clone()))
.child(
Label::new(short_sha.clone())
.color(Color::Muted)
.size(LabelSize::Small),
)
.into_any_element()
}
}))))
Some(format!("{short_sha} - {subject}").into())
}
fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
@@ -1013,11 +988,12 @@ impl Item for CommitView {
impl Render for CommitView {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let is_stash = self.stash.is_some();
v_flex()
div()
.key_context(if is_stash { "StashDiff" } else { "CommitDiff" })
.size_full()
.bg(cx.theme().colors().editor_background)
.flex()
.flex_col()
.size_full()
.child(self.render_header(window, cx))
.child(div().flex_grow().child(self.editor.clone()))
}
@@ -1037,7 +1013,7 @@ impl EventEmitter<ToolbarItemEvent> for CommitViewToolbar {}
impl Render for CommitViewToolbar {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
div().hidden()
div()
}
}

View File

@@ -3463,6 +3463,7 @@ impl GitPanel {
) -> Option<impl IntoElement> {
let active_repository = self.active_repository.clone()?;
let panel_editor_style = panel_editor_style(true, window, cx);
let enable_coauthors = self.render_co_authors(cx);
let editor_focus_handle = self.commit_editor.focus_handle(cx);
@@ -4771,6 +4772,7 @@ impl RenderOnce for PanelRepoFooter {
const MAX_REPO_LEN: usize = 16;
const LABEL_CHARACTER_BUDGET: usize = MAX_BRANCH_LEN + MAX_REPO_LEN;
const MAX_SHORT_SHA_LEN: usize = 8;
let branch_name = self
.branch
.as_ref()

View File

@@ -1,5 +1,4 @@
use anyhow::Context as _;
use git::repository::{Remote, RemoteCommandOutput};
use linkify::{LinkFinder, LinkKind};
use ui::SharedString;

View File

@@ -26,13 +26,12 @@ pub(crate) struct LinuxDispatcher {
main_thread_id: thread::ThreadId,
}
const MIN_THREADS: usize = 2;
impl LinuxDispatcher {
pub fn new(main_sender: Sender<RunnableVariant>) -> Self {
let (background_sender, background_receiver) = flume::unbounded::<RunnableVariant>();
let thread_count =
std::thread::available_parallelism().map_or(MIN_THREADS, |i| i.get().max(MIN_THREADS));
let thread_count = std::thread::available_parallelism()
.map(|i| i.get())
.unwrap_or(1);
let mut background_threads = (0..thread_count)
.map(|i| {

View File

@@ -1419,7 +1419,7 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
state.repeat.current_keycode = Some(keycode);
let rate = state.repeat.characters_per_second;
let repeat_interval = Duration::from_secs(1) / rate.max(1);
let repeat_interval = Duration::from_secs(1) / rate;
let id = state.repeat.current_id;
state
.loop_handle

View File

@@ -7,7 +7,9 @@ use std::{
use flume::Sender;
use util::ResultExt;
use windows::{
System::Threading::{ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler},
System::Threading::{
ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemPriority,
},
Win32::{
Foundation::{LPARAM, WPARAM},
UI::WindowsAndMessaging::PostMessageW,
@@ -53,7 +55,7 @@ impl WindowsDispatcher {
Ok(())
})
};
ThreadPool::RunAsync(&handler).log_err();
ThreadPool::RunWithPriorityAsync(&handler, WorkItemPriority::High).log_err();
}
fn dispatch_on_threadpool_after(&self, runnable: RunnableVariant, duration: Duration) {

View File

@@ -71,7 +71,6 @@ pub struct AmazonBedrockSettings {
pub profile_name: Option<String>,
pub role_arn: Option<String>,
pub authentication_method: Option<BedrockAuthMethod>,
pub allow_global: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, EnumIter, IntoStaticStr, JsonSchema)]
@@ -240,13 +239,6 @@ impl State {
.or(settings_region)
.unwrap_or(String::from("us-east-1"))
}
fn get_allow_global(&self) -> bool {
self.settings
.as_ref()
.and_then(|s| s.allow_global)
.unwrap_or(false)
}
}
pub struct BedrockLanguageModelProvider {
@@ -553,13 +545,11 @@ impl LanguageModel for BedrockModel {
LanguageModelCompletionError,
>,
> {
let Ok((region, allow_global)) = cx.read_entity(&self.state, |state, _cx| {
(state.get_region(), state.get_allow_global())
}) else {
let Ok(region) = cx.read_entity(&self.state, |state, _cx| state.get_region()) else {
return async move { Err(anyhow::anyhow!("App State Dropped").into()) }.boxed();
};
let model_id = match self.model.cross_region_inference_id(&region, allow_global) {
let model_id = match self.model.cross_region_inference_id(&region) {
Ok(s) => s,
Err(e) => {
return async move { Err(e.into()) }.boxed();

View File

@@ -58,7 +58,6 @@ impl settings::Settings for AllLanguageModelSettings {
profile_name: bedrock.profile,
role_arn: None, // todo(was never a setting for this...)
authentication_method: bedrock.authentication_method.map(Into::into),
allow_global: bedrock.allow_global,
},
deepseek: DeepSeekSettings {
api_url: deepseek.api_url.unwrap(),

View File

@@ -24,5 +24,4 @@ rewrap_prefixes = [
auto_indent_on_paste = false
auto_indent_using_last_non_empty_line = false
tab_size = 2
decrease_indent_pattern = "^\\s*$"
prettier_parser_name = "markdown"

View File

@@ -1,3 +0,0 @@
(list (list_item) @indent)
(list_item (list) @indent)

View File

@@ -23,7 +23,7 @@ use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use settings::Settings;
use smol::lock::OnceCell;
use std::cmp::{Ordering, Reverse};
use std::cmp::Ordering;
use std::env::consts;
use terminal::terminal_settings::TerminalSettings;
use util::command::new_smol_command;
@@ -1101,33 +1101,13 @@ fn get_venv_parent_dir(env: &PythonEnvironment) -> Option<PathBuf> {
venv.parent().map(|parent| parent.to_path_buf())
}
// How far is this venv from the root of our current project?
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
enum SubprojectDistance {
WithinSubproject(Reverse<usize>),
WithinWorktree(Reverse<usize>),
NotInWorktree,
}
fn wr_distance(
wr: &PathBuf,
subroot_relative_path: &RelPath,
venv: Option<&PathBuf>,
) -> SubprojectDistance {
fn wr_distance(wr: &PathBuf, venv: Option<&PathBuf>) -> usize {
if let Some(venv) = venv
&& let Ok(p) = venv.strip_prefix(wr)
{
if subroot_relative_path.components().next().is_some()
&& let Ok(distance) = p
.strip_prefix(subroot_relative_path.as_std_path())
.map(|p| p.components().count())
{
SubprojectDistance::WithinSubproject(Reverse(distance))
} else {
SubprojectDistance::WithinWorktree(Reverse(p.components().count()))
}
p.components().count()
} else {
SubprojectDistance::NotInWorktree
usize::MAX
}
}
@@ -1190,14 +1170,11 @@ impl ToolchainLister for PythonToolchainProvider {
});
// Compare project paths against worktree root
let proj_ordering =
|| {
let lhs_project = lhs.project.clone().or_else(|| get_venv_parent_dir(lhs));
let rhs_project = rhs.project.clone().or_else(|| get_venv_parent_dir(rhs));
wr_distance(&wr, &subroot_relative_path, lhs_project.as_ref()).cmp(
&wr_distance(&wr, &subroot_relative_path, rhs_project.as_ref()),
)
};
let proj_ordering = || {
let lhs_project = lhs.project.clone().or_else(|| get_venv_parent_dir(lhs));
let rhs_project = rhs.project.clone().or_else(|| get_venv_parent_dir(rhs));
wr_distance(&wr, lhs_project.as_ref()).cmp(&wr_distance(&wr, rhs_project.as_ref()))
};
// Compare environment priorities
let priority_ordering = || env_priority(lhs.kind).cmp(&env_priority(rhs.kind));

View File

@@ -43,7 +43,7 @@ use std::{
io,
iter::{self, FromIterator},
mem,
ops::{self, AddAssign, ControlFlow, Range, RangeBounds, Sub, SubAssign},
ops::{self, AddAssign, Range, RangeBounds, Sub, SubAssign},
rc::Rc,
str,
sync::Arc,
@@ -4618,24 +4618,7 @@ impl MultiBufferSnapshot {
cx: &App,
) -> BTreeMap<MultiBufferRow, IndentSize> {
let mut result = BTreeMap::new();
self.suggested_indents_callback(
rows,
|row, indent| {
result.insert(row, indent);
ControlFlow::Continue(())
},
cx,
);
result
}
// move this to be a generator once those are a thing
pub fn suggested_indents_callback(
&self,
rows: impl IntoIterator<Item = u32>,
mut cb: impl FnMut(MultiBufferRow, IndentSize) -> ControlFlow<()>,
cx: &App,
) {
let mut rows_for_excerpt = Vec::new();
let mut cursor = self.cursor::<Point, Point>();
let mut rows = rows.into_iter().peekable();
@@ -4679,17 +4662,16 @@ impl MultiBufferSnapshot {
let buffer_indents = region
.buffer
.suggested_indents(buffer_rows, single_indent_size);
for (row, indent) in buffer_indents {
if cb(
let multibuffer_indents = buffer_indents.into_iter().map(|(row, indent)| {
(
MultiBufferRow(start_multibuffer_row + row - start_buffer_row),
indent,
)
.is_break()
{
return;
}
}
});
result.extend(multibuffer_indents);
}
result
}
pub fn indent_size_for_line(&self, row: MultiBufferRow) -> IndentSize {

View File

@@ -2,8 +2,7 @@ use anyhow::Context as _;
use collections::{HashMap, HashSet};
use fs::Fs;
use gpui::{AsyncApp, Entity};
use language::language_settings::PrettierSettings;
use language::{Buffer, Diff, Language, language_settings::language_settings};
use language::{Buffer, Diff, language_settings::language_settings};
use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime;
use paths::default_prettier_dir;
@@ -350,7 +349,7 @@ impl Prettier {
Self::Real(local) => {
let params = buffer
.update(cx, |buffer, cx| {
let buffer_language = buffer.language().map(|language| language.as_ref());
let buffer_language = buffer.language();
let language_settings = language_settings(buffer_language.map(|l| l.name()), buffer.file(), cx);
let prettier_settings = &language_settings.prettier;
anyhow::ensure!(
@@ -450,7 +449,15 @@ impl Prettier {
})
.collect();
let parser = prettier_parser_name(buffer_path.as_deref(), buffer_language, prettier_settings).context("getting prettier parser")?;
let mut prettier_parser = prettier_settings.parser.as_deref();
if buffer_path.is_none() {
prettier_parser = prettier_parser.or_else(|| buffer_language.and_then(|language| language.prettier_parser_name()));
if prettier_parser.is_none() {
log::error!("Formatting unsaved file with prettier failed. No prettier parser configured for language {buffer_language:?}");
anyhow::bail!("Cannot determine prettier parser for unsaved file");
}
}
let ignore_path = ignore_dir.and_then(|dir| {
let ignore_file = dir.join(".prettierignore");
@@ -468,15 +475,15 @@ impl Prettier {
anyhow::Ok(FormatParams {
text: buffer.text(),
options: FormatOptions {
path: buffer_path,
parser,
parser: prettier_parser.map(ToOwned::to_owned),
plugins,
path: buffer_path,
prettier_options,
ignore_path,
},
})
})?
.context("building prettier request")?;
})?
.context("building prettier request")?;
let response = local
.server
@@ -496,26 +503,7 @@ impl Prettier {
{
Some("rust") => anyhow::bail!("prettier does not support Rust"),
Some(_other) => {
let mut formatted_text = buffer.text() + FORMAT_SUFFIX;
let buffer_language =
buffer.language().map(|language| language.as_ref());
let language_settings = language_settings(
buffer_language.map(|l| l.name()),
buffer.file(),
cx,
);
let prettier_settings = &language_settings.prettier;
let parser = prettier_parser_name(
buffer_path.as_deref(),
buffer_language,
prettier_settings,
)?;
if let Some(parser) = parser {
formatted_text = format!("{formatted_text}\n{parser}");
}
let formatted_text = buffer.text() + FORMAT_SUFFIX;
Ok(buffer.diff(formatted_text, cx))
}
None => panic!("Should not format buffer without a language with prettier"),
@@ -563,40 +551,6 @@ impl Prettier {
}
}
fn prettier_parser_name(
buffer_path: Option<&Path>,
buffer_language: Option<&Language>,
prettier_settings: &PrettierSettings,
) -> anyhow::Result<Option<String>> {
let parser = if buffer_path.is_none() {
let parser = prettier_settings
.parser
.as_deref()
.or_else(|| buffer_language.and_then(|language| language.prettier_parser_name()));
if parser.is_none() {
log::error!(
"Formatting unsaved file with prettier failed. No prettier parser configured for language {buffer_language:?}"
);
anyhow::bail!("Cannot determine prettier parser for unsaved file");
}
parser
} else if let (Some(buffer_language), Some(buffer_path)) = (buffer_language, buffer_path)
&& buffer_path.extension().is_some_and(|extension| {
!buffer_language
.config()
.matcher
.path_suffixes
.contains(&extension.to_string_lossy().into_owned())
})
{
buffer_language.prettier_parser_name()
} else {
prettier_settings.parser.as_deref()
};
Ok(parser.map(ToOwned::to_owned))
}
async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
if let Some(node_modules_location_metadata) = fs

View File

@@ -453,9 +453,7 @@ impl AgentServerStore {
.clone()
.and_then(|settings| settings.custom_command()),
http_client: http_client.clone(),
no_browser: downstream_client
.as_ref()
.is_some_and(|(_, client)| !client.has_wsl_interop()),
is_remote: downstream_client.is_some(),
}),
);
self.external_agents.insert(
@@ -1357,7 +1355,7 @@ struct LocalCodex {
project_environment: Entity<ProjectEnvironment>,
http_client: Arc<dyn HttpClient>,
custom_command: Option<AgentServerCommand>,
no_browser: bool,
is_remote: bool,
}
impl ExternalAgentServer for LocalCodex {
@@ -1377,7 +1375,7 @@ impl ExternalAgentServer for LocalCodex {
.map(|root_dir| Path::new(root_dir))
.unwrap_or(paths::home_dir())
.into();
let no_browser = self.no_browser;
let is_remote = self.is_remote;
cx.spawn(async move |cx| {
let mut env = project_environment
@@ -1390,7 +1388,7 @@ impl ExternalAgentServer for LocalCodex {
})?
.await
.unwrap_or_default();
if no_browser {
if is_remote {
env.insert("NO_BROWSER".to_owned(), "1".to_owned());
}

View File

@@ -472,8 +472,6 @@ impl GitStore {
client.add_entity_request_handler(Self::handle_change_branch);
client.add_entity_request_handler(Self::handle_create_branch);
client.add_entity_request_handler(Self::handle_rename_branch);
client.add_entity_request_handler(Self::handle_create_remote);
client.add_entity_request_handler(Self::handle_remove_remote);
client.add_entity_request_handler(Self::handle_delete_branch);
client.add_entity_request_handler(Self::handle_git_init);
client.add_entity_request_handler(Self::handle_push);
@@ -2276,25 +2274,6 @@ impl GitStore {
Ok(proto::Ack {})
}
async fn handle_create_remote(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GitCreateRemote>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
let remote_name = envelope.payload.remote_name;
let remote_url = envelope.payload.remote_url;
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.create_remote(remote_name, remote_url)
})?
.await??;
Ok(proto::Ack {})
}
async fn handle_delete_branch(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GitDeleteBranch>,
@@ -2313,24 +2292,6 @@ impl GitStore {
Ok(proto::Ack {})
}
async fn handle_remove_remote(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GitRemoveRemote>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
let remote_name = envelope.payload.remote_name;
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.remove_remote(remote_name)
})?
.await??;
Ok(proto::Ack {})
}
async fn handle_show(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GitShow>,
@@ -3296,8 +3257,6 @@ impl RepositorySnapshot {
.iter()
.map(stash_to_proto)
.collect(),
remote_upstream_url: self.remote_upstream_url.clone(),
remote_origin_url: self.remote_origin_url.clone(),
}
}
@@ -3367,8 +3326,6 @@ impl RepositorySnapshot {
.iter()
.map(stash_to_proto)
.collect(),
remote_upstream_url: self.remote_upstream_url.clone(),
remote_origin_url: self.remote_origin_url.clone(),
}
}
@@ -4908,61 +4865,6 @@ impl Repository {
)
}
pub fn create_remote(
&mut self,
remote_name: String,
remote_url: String,
) -> oneshot::Receiver<Result<()>> {
let id = self.id;
self.send_job(
Some(format!("git remote add {remote_name} {remote_url}").into()),
move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
backend.create_remote(remote_name, remote_url).await
}
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
client
.request(proto::GitCreateRemote {
project_id: project_id.0,
repository_id: id.to_proto(),
remote_name,
remote_url,
})
.await?;
Ok(())
}
}
},
)
}
pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
let id = self.id;
self.send_job(
Some(format!("git remove remote {remote_name}").into()),
move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
backend.remove_remote(remote_name).await
}
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
client
.request(proto::GitRemoveRemote {
project_id: project_id.0,
repository_id: id.to_proto(),
remote_name,
})
.await?;
Ok(())
}
}
},
)
}
pub fn get_remotes(
&mut self,
branch_name: Option<String>,
@@ -5000,7 +4902,7 @@ impl Repository {
let remotes = response
.remotes
.into_iter()
.map(|remotes| Remote {
.map(|remotes| git::repository::Remote {
name: remotes.name.into(),
})
.collect();
@@ -5399,8 +5301,6 @@ impl Repository {
cx.emit(RepositoryEvent::StashEntriesChanged)
}
self.snapshot.stash_entries = new_stash_entries;
self.snapshot.remote_upstream_url = update.remote_upstream_url;
self.snapshot.remote_origin_url = update.remote_origin_url;
let edits = update
.removed_statuses
@@ -5960,7 +5860,11 @@ fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::B
.collect::<Vec<_>>();
proto::BlameBufferResponse {
blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
blame_response: Some(proto::blame_buffer_response::BlameResponse {
entries,
messages,
remote_url: blame.remote_url,
}),
}
}
@@ -5997,7 +5901,11 @@ fn deserialize_blame_buffer_response(
.filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
.collect::<HashMap<_, _>>();
Some(Blame { entries, messages })
Some(Blame {
entries,
messages,
remote_url: response.remote_url,
})
}
fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
@@ -6145,6 +6053,7 @@ async fn compute_snapshot(
events.push(RepositoryEvent::BranchChanged);
}
// Used by edit prediction data collection
let remote_origin_url = backend.remote_url("origin").await;
let remote_upstream_url = backend.remote_url("upstream").await;

View File

@@ -93,6 +93,9 @@ enum FindSearchCandidates {
/// based on disk contents of a buffer. This step is not performed for buffers we already have in memory.
confirm_contents_will_match_tx: Sender<MatchingEntry>,
confirm_contents_will_match_rx: Receiver<MatchingEntry>,
/// Of those that contain at least one match (or are already in memory), look for rest of matches (and figure out their ranges).
/// But wait - first, we need to go back to the main thread to open a buffer (& create an entity for it).
get_buffer_for_full_scan_tx: Sender<ProjectPath>,
},
Remote,
OpenBuffersOnly,
@@ -223,7 +226,7 @@ impl Search {
.boxed_local(),
cx.background_spawn(Self::maintain_sorted_search_results(
sorted_search_results_rx,
get_buffer_for_full_scan_tx,
get_buffer_for_full_scan_tx.clone(),
self.limit,
))
.boxed_local(),
@@ -231,6 +234,7 @@ impl Search {
(
FindSearchCandidates::Local {
fs,
get_buffer_for_full_scan_tx,
confirm_contents_will_match_tx,
confirm_contents_will_match_rx,
input_paths_rx,
@@ -589,6 +593,7 @@ impl Worker<'_> {
input_paths_rx,
confirm_contents_will_match_rx,
mut confirm_contents_will_match_tx,
mut get_buffer_for_full_scan_tx,
fs,
) = match self.candidates {
FindSearchCandidates::Local {
@@ -596,15 +601,21 @@ impl Worker<'_> {
input_paths_rx,
confirm_contents_will_match_rx,
confirm_contents_will_match_tx,
get_buffer_for_full_scan_tx,
} => (
input_paths_rx,
confirm_contents_will_match_rx,
confirm_contents_will_match_tx,
get_buffer_for_full_scan_tx,
Some(fs),
),
FindSearchCandidates::Remote | FindSearchCandidates::OpenBuffersOnly => {
(unbounded().1, unbounded().1, unbounded().0, None)
}
FindSearchCandidates::Remote | FindSearchCandidates::OpenBuffersOnly => (
unbounded().1,
unbounded().1,
unbounded().0,
unbounded().0,
None,
),
};
// WorkerA: grabs a request for "find all matches in file/a" <- takes 5 minutes
// right after: WorkerB: grabs a request for "find all matches in file/b" <- takes 5 seconds
@@ -618,6 +629,7 @@ impl Worker<'_> {
open_entries: &self.open_buffers,
fs: fs.as_deref(),
confirm_contents_will_match_tx: &confirm_contents_will_match_tx,
get_buffer_for_full_scan_tx: &get_buffer_for_full_scan_tx,
};
// Whenever we notice that some step of a pipeline is closed, we don't want to close subsequent
// steps straight away. Another worker might be about to produce a value that will
@@ -633,7 +645,10 @@ impl Worker<'_> {
find_first_match = find_first_match.next() => {
if let Some(buffer_with_at_least_one_match) = find_first_match {
handler.handle_find_first_match(buffer_with_at_least_one_match).await;
} else {
get_buffer_for_full_scan_tx = bounded(1).0;
}
},
scan_path = scan_path.next() => {
if let Some(path_to_scan) = scan_path {
@@ -658,6 +673,7 @@ struct RequestHandler<'worker> {
fs: Option<&'worker dyn Fs>,
open_entries: &'worker HashSet<ProjectEntryId>,
confirm_contents_will_match_tx: &'worker Sender<MatchingEntry>,
get_buffer_for_full_scan_tx: &'worker Sender<ProjectPath>,
}
impl RequestHandler<'_> {
@@ -713,8 +729,9 @@ impl RequestHandler<'_> {
_ = maybe!(async move {
let InputPath {
entry,
snapshot,
mut should_scan_tx,
should_scan_tx,
} = req;
if entry.is_fifo || !entry.is_file() {
@@ -737,7 +754,7 @@ impl RequestHandler<'_> {
if self.open_entries.contains(&entry.id) {
// The buffer is already in memory and that's the version we want to scan;
// hence skip the dilly-dally and look for all matches straight away.
should_scan_tx
self.get_buffer_for_full_scan_tx
.send(ProjectPath {
worktree_id: snapshot.id(),
path: entry.path.clone(),

View File

@@ -124,8 +124,6 @@ message UpdateRepository {
optional GitCommitDetails head_commit_details = 11;
optional string merge_message = 12;
repeated StashEntry stash_entries = 13;
optional string remote_upstream_url = 14;
optional string remote_origin_url = 15;
}
message RemoveRepository {
@@ -192,19 +190,6 @@ message GitRenameBranch {
string new_name = 4;
}
message GitCreateRemote {
uint64 project_id = 1;
uint64 repository_id = 2;
string remote_name = 3;
string remote_url = 4;
}
message GitRemoveRemote {
uint64 project_id = 1;
uint64 repository_id = 2;
string remote_name = 3;
}
message GitDeleteBranch {
uint64 project_id = 1;
uint64 repository_id = 2;
@@ -502,8 +487,8 @@ message BlameBufferResponse {
message BlameResponse {
repeated BlameEntry entries = 1;
repeated CommitMessage messages = 2;
optional string remote_url = 4;
reserved 3;
reserved 4;
}
optional BlameResponse blame_response = 5;

View File

@@ -437,18 +437,13 @@ message Envelope {
OpenImageResponse open_image_response = 392;
CreateImageForPeer create_image_for_peer = 393;
GitFileHistory git_file_history = 397;
GitFileHistoryResponse git_file_history_response = 398;
RunGitHook run_git_hook = 399;
GitDeleteBranch git_delete_branch = 400;
ExternalExtensionAgentsUpdated external_extension_agents_updated = 401;
GitCreateRemote git_create_remote = 402;
GitRemoveRemote git_remove_remote = 403;// current max
ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; // current max
}
reserved 87 to 88, 396;

View File

@@ -305,8 +305,6 @@ messages!(
(RemoteMessageResponse, Background),
(AskPassRequest, Background),
(AskPassResponse, Background),
(GitCreateRemote, Background),
(GitRemoveRemote, Background),
(GitCreateBranch, Background),
(GitChangeBranch, Background),
(GitRenameBranch, Background),
@@ -506,8 +504,6 @@ request_messages!(
(GetRemotes, GetRemotesResponse),
(Pull, RemoteMessageResponse),
(AskPassRequest, AskPassResponse),
(GitCreateRemote, Ack),
(GitRemoveRemote, Ack),
(GitCreateBranch, Ack),
(GitChangeBranch, Ack),
(GitRenameBranch, Ack),
@@ -680,8 +676,6 @@ entity_messages!(
GitChangeBranch,
GitRenameBranch,
GitCreateBranch,
GitCreateRemote,
GitRemoveRemote,
CheckForPushedCommits,
GitDiff,
GitInit,

View File

@@ -43,6 +43,7 @@ urlencoding.workspace = true
util.workspace = true
which.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }

View File

@@ -328,15 +328,8 @@ impl RemoteClient {
let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
let client = cx.update(|cx| {
ChannelClient::new(
incoming_rx,
outgoing_tx,
cx,
"client",
remote_connection.has_wsl_interop(),
)
})?;
let client =
cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "client"))?;
let path_style = remote_connection.path_style();
let this = cx.new(|_| Self {
@@ -427,9 +420,8 @@ impl RemoteClient {
outgoing_tx: mpsc::UnboundedSender<Envelope>,
cx: &App,
name: &'static str,
has_wsl_interop: bool,
) -> AnyProtoClient {
ChannelClient::new(incoming_rx, outgoing_tx, cx, name, has_wsl_interop).into()
ChannelClient::new(incoming_rx, outgoing_tx, cx, name).into()
}
pub fn shutdown_processes<T: RequestMessage>(
@@ -929,8 +921,8 @@ impl RemoteClient {
});
let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
let server_client = server_cx
.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server", false));
let server_client =
server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server"));
let connection: Arc<dyn RemoteConnection> = Arc::new(fake::FakeRemoteConnection {
connection_options: opts.clone(),
server_cx: fake::SendableCx::new(server_cx),
@@ -1148,7 +1140,6 @@ pub trait RemoteConnection: Send + Sync {
fn path_style(&self) -> PathStyle;
fn shell(&self) -> String;
fn default_system_shell(&self) -> String;
fn has_wsl_interop(&self) -> bool;
#[cfg(any(test, feature = "test-support"))]
fn simulate_disconnect(&self, _: &AsyncApp) {}
@@ -1197,7 +1188,6 @@ struct ChannelClient {
name: &'static str,
task: Mutex<Task<Result<()>>>,
remote_started: Signal<()>,
has_wsl_interop: bool,
}
impl ChannelClient {
@@ -1206,7 +1196,6 @@ impl ChannelClient {
outgoing_tx: mpsc::UnboundedSender<Envelope>,
cx: &App,
name: &'static str,
has_wsl_interop: bool,
) -> Arc<Self> {
Arc::new_cyclic(|this| Self {
outgoing_tx: Mutex::new(outgoing_tx),
@@ -1222,7 +1211,6 @@ impl ChannelClient {
&cx.to_async(),
)),
remote_started: Signal::new(cx),
has_wsl_interop,
})
}
@@ -1501,10 +1489,6 @@ impl ProtoClient for ChannelClient {
fn is_via_collab(&self) -> bool {
false
}
fn has_wsl_interop(&self) -> bool {
self.has_wsl_interop
}
}
#[cfg(any(test, feature = "test-support"))]
@@ -1668,10 +1652,6 @@ mod fake {
fn default_system_shell(&self) -> String {
"sh".to_owned()
}
fn has_wsl_interop(&self) -> bool {
false
}
}
pub(super) struct Delegate;

View File

@@ -131,7 +131,11 @@ async fn build_remote_server_from_source(
let build_remote_server =
std::env::var("ZED_BUILD_REMOTE_SERVER").unwrap_or("nocompress".into());
if let "false" | "no" | "off" | "0" = &*build_remote_server {
if build_remote_server == "false"
|| build_remote_server == "no"
|| build_remote_server == "off"
|| build_remote_server == "0"
{
return Ok(None);
}

View File

@@ -394,10 +394,6 @@ impl RemoteConnection for SshRemoteConnection {
fn path_style(&self) -> PathStyle {
self.ssh_path_style
}
fn has_wsl_interop(&self) -> bool {
false
}
}
impl SshRemoteConnection {

View File

@@ -47,7 +47,6 @@ pub(crate) struct WslRemoteConnection {
shell: String,
shell_kind: ShellKind,
default_system_shell: String,
has_wsl_interop: bool,
connection_options: WslConnectionOptions,
}
@@ -72,7 +71,6 @@ impl WslRemoteConnection {
shell: String::new(),
shell_kind: ShellKind::Posix,
default_system_shell: String::from("/bin/sh"),
has_wsl_interop: false,
};
delegate.set_status(Some("Detecting WSL environment"), cx);
this.shell = this
@@ -81,15 +79,6 @@ impl WslRemoteConnection {
.context("failed detecting shell")?;
log::info!("Remote shell discovered: {}", this.shell);
this.shell_kind = ShellKind::new(&this.shell, false);
this.has_wsl_interop = this.detect_has_wsl_interop().await.unwrap_or_default();
log::info!(
"Remote has wsl interop {}",
if this.has_wsl_interop {
"enabled"
} else {
"disabled"
}
);
this.platform = this
.detect_platform()
.await
@@ -126,14 +115,6 @@ impl WslRemoteConnection {
.unwrap_or_else(|| "/bin/sh".to_string()))
}
async fn detect_has_wsl_interop(&self) -> Result<bool> {
Ok(self
.run_wsl_command_with_output("cat", &["/proc/sys/fs/binfmt_misc/WSLInterop"])
.await
.inspect_err(|err| log::error!("Failed to detect wsl interop: {err}"))?
.contains("enabled"))
}
async fn windows_path_to_wsl_path(&self, source: &Path) -> Result<String> {
windows_path_to_wsl_path_impl(&self.connection_options, source).await
}
@@ -336,7 +317,6 @@ impl RemoteConnection for WslRemoteConnection {
proxy_args.push(format!("{}={}", env_var, value));
}
}
proxy_args.push(remote_binary_path.display(PathStyle::Posix).into_owned());
proxy_args.push("proxy".to_owned());
proxy_args.push("--identifier".to_owned());
@@ -509,10 +489,6 @@ impl RemoteConnection for WslRemoteConnection {
fn default_system_shell(&self) -> String {
self.default_system_shell.clone()
}
fn has_wsl_interop(&self) -> bool {
self.has_wsl_interop
}
}
/// `wslpath` is a executable available in WSL, it's a linux binary.

View File

@@ -199,7 +199,6 @@ fn start_server(
listeners: ServerListeners,
log_rx: Receiver<Vec<u8>>,
cx: &mut App,
is_wsl_interop: bool,
) -> AnyProtoClient {
// This is the server idle timeout. If no connection comes in this timeout, the server will shut down.
const IDLE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10 * 60);
@@ -319,7 +318,7 @@ fn start_server(
})
.detach();
RemoteClient::proto_client_from_channels(incoming_rx, outgoing_tx, cx, "server", is_wsl_interop)
RemoteClient::proto_client_from_channels(incoming_rx, outgoing_tx, cx, "server")
}
fn init_paths() -> anyhow::Result<()> {
@@ -408,15 +407,8 @@ pub fn execute_run(
HeadlessProject::init(cx);
let is_wsl_interop = if cfg!(target_os = "linux") {
// See: https://learn.microsoft.com/en-us/windows/wsl/filesystems#disable-interoperability
matches!(std::fs::read_to_string("/proc/sys/fs/binfmt_misc/WSLInterop"), Ok(s) if s.contains("enabled"))
} else {
false
};
log::info!("gpui app started, initializing server");
let session = start_server(listeners, log_rx, cx, is_wsl_interop);
let session = start_server(listeners, log_rx, cx);
GitHostingProviderRegistry::set_global(git_hosting_provider_registry, cx);
git_hosting_providers::init(cx);

View File

@@ -59,7 +59,6 @@ pub trait ProtoClient: Send + Sync {
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet>;
fn is_via_collab(&self) -> bool;
fn has_wsl_interop(&self) -> bool;
}
#[derive(Default)]
@@ -511,10 +510,6 @@ impl AnyProtoClient {
},
);
}
pub fn has_wsl_interop(&self) -> bool {
self.0.client.has_wsl_interop()
}
}
fn to_any_envelope<T: EnvelopedMessage>(

View File

@@ -61,7 +61,6 @@ pub struct AmazonBedrockSettingsContent {
pub region: Option<String>,
pub profile: Option<String>,
pub authentication_method: Option<BedrockAuthMethodContent>,
pub allow_global: Option<bool>,
}
#[with_fallible_options]

View File

@@ -0,0 +1,36 @@
use gpui::{IntoElement, Render};
use ui::{Divider, prelude::*, tooltip_container};
pub struct TerminalTooltip {
title: SharedString,
pid: u32,
}
impl TerminalTooltip {
pub fn new(title: impl Into<SharedString>, pid: u32) -> Self {
Self {
title: title.into(),
pid,
}
}
}
impl Render for TerminalTooltip {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
tooltip_container(cx, move |this, _cx| {
this.occlude()
.on_mouse_move(|_, _window, cx| cx.stop_propagation())
.child(
v_flex()
.gap_1()
.child(Label::new(self.title.clone()))
.child(Divider::horizontal())
.child(
Label::new(format!("Process ID (PID): {}", self.pid))
.color(Color::Muted)
.size(LabelSize::Small),
),
)
})
}
}

View File

@@ -4,6 +4,7 @@ pub mod terminal_panel;
mod terminal_path_like_target;
pub mod terminal_scrollbar;
mod terminal_slash_command;
pub mod terminal_tab_tooltip;
use assistant_slash_command::SlashCommandRegistry;
use editor::{EditorSettings, actions::SelectAll, blink_manager::BlinkManager};
@@ -31,8 +32,9 @@ use terminal_panel::TerminalPanel;
use terminal_path_like_target::{hover_path_like_target, open_path_like_target};
use terminal_scrollbar::TerminalScrollHandle;
use terminal_slash_command::TerminalSlashCommand;
use terminal_tab_tooltip::TerminalTooltip;
use ui::{
ContextMenu, Divider, ScrollAxes, Scrollbars, Tooltip, WithScrollbar,
ContextMenu, Icon, IconName, Label, ScrollAxes, Scrollbars, Tooltip, WithScrollbar, h_flex,
prelude::*,
scrollbars::{self, GlobalSetting, ScrollbarVisibility},
};
@@ -1138,24 +1140,14 @@ impl Item for TerminalView {
type Event = ItemEvent;
fn tab_tooltip_content(&self, cx: &App) -> Option<TabTooltipContent> {
Some(TabTooltipContent::Custom(Box::new(Tooltip::element({
let terminal = self.terminal().read(cx);
let title = terminal.title(false);
let pid = terminal.pid_getter()?.fallback_pid();
let terminal = self.terminal().read(cx);
let title = terminal.title(false);
let pid = terminal.pid_getter()?.fallback_pid();
move |_, _| {
v_flex()
.gap_1()
.child(Label::new(title.clone()))
.child(h_flex().flex_grow().child(Divider::horizontal()))
.child(
Label::new(format!("Process ID (PID): {}", pid))
.color(Color::Muted)
.size(LabelSize::Small),
)
.into_any_element()
}
}))))
Some(TabTooltipContent::Custom(Box::new(move |_window, cx| {
cx.new(|_| TerminalTooltip::new(title.clone(), pid.as_u32()))
.into()
})))
}
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {

View File

@@ -588,20 +588,19 @@ impl ToolchainSelector {
.worktree_for_id(worktree_id, cx)?
.read(cx)
.abs_path();
let workspace_id = workspace.database_id()?;
let weak = workspace.weak_handle();
cx.spawn_in(window, async move |workspace, cx| {
let active_toolchain = project
.read_with(cx, |this, cx| {
this.active_toolchain(
ProjectPath {
worktree_id,
path: relative_path.clone(),
},
language_name.clone(),
cx,
)
})?
.await;
let active_toolchain = workspace::WORKSPACE_DB
.toolchain(
workspace_id,
worktree_id,
relative_path.clone(),
language_name.clone(),
)
.await
.ok()
.flatten();
workspace
.update_in(cx, |this, window, cx| {
this.toggle_modal(window, cx, move |window, cx| {
@@ -619,7 +618,6 @@ impl ToolchainSelector {
});
})
.ok();
anyhow::Ok(())
})
.detach();

View File

@@ -1656,6 +1656,49 @@ impl WorkspaceDb {
}
}
pub async fn toolchain(
&self,
workspace_id: WorkspaceId,
worktree_id: WorktreeId,
relative_worktree_path: Arc<RelPath>,
language_name: LanguageName,
) -> Result<Option<Toolchain>> {
self.write(move |this| {
let mut select = this
.select_bound(sql!(
SELECT
name, path, raw_json
FROM toolchains
WHERE
workspace_id = ? AND
language_name = ? AND
worktree_id = ? AND
relative_worktree_path = ?
))
.context("select toolchain")?;
let toolchain: Vec<(String, String, String)> = select((
workspace_id,
language_name.as_ref().to_string(),
worktree_id.to_usize(),
relative_worktree_path.as_unix_str().to_string(),
))?;
Ok(toolchain
.into_iter()
.next()
.and_then(|(name, path, raw_json)| {
Some(Toolchain {
name: name.into(),
path: path.into(),
language_name,
as_json: serde_json::Value::from_str(&raw_json).ok()?,
})
}))
})
.await
}
pub(crate) async fn toolchains(
&self,
workspace_id: WorkspaceId,

View File

@@ -52,7 +52,7 @@ use std::{
fmt,
future::Future,
mem::{self},
ops::{Deref, DerefMut, Range},
ops::{Deref, DerefMut},
path::{Path, PathBuf},
pin::Pin,
sync::{
@@ -428,7 +428,7 @@ impl Worktree {
let mut entry = Entry::new(
RelPath::empty().into(),
&metadata,
ProjectEntryId::new(&next_entry_id),
&next_entry_id,
snapshot.root_char_bag,
None,
);
@@ -2736,30 +2736,13 @@ impl BackgroundScannerState {
}
}
fn entry_id_for(
async fn insert_entry(
&mut self,
next_entry_id: &AtomicUsize,
path: &RelPath,
metadata: &fs::Metadata,
) -> ProjectEntryId {
// If an entry with the same inode was removed from the worktree during this scan,
// then it *might* represent the same file or directory. But the OS might also have
// re-used the inode for a completely different file or directory.
//
// Conditionally reuse the old entry's id:
// * if the mtime is the same, the file was probably been renamed.
// * if the path is the same, the file may just have been updated
if let Some(removed_entry) = self.removed_entries.remove(&metadata.inode) {
if removed_entry.mtime == Some(metadata.mtime) || *removed_entry.path == *path {
return removed_entry.id;
}
} else if let Some(existing_entry) = self.snapshot.entry_for_path(path) {
return existing_entry.id;
}
ProjectEntryId::new(next_entry_id)
}
async fn insert_entry(&mut self, entry: Entry, fs: &dyn Fs, watcher: &dyn Watcher) -> Entry {
mut entry: Entry,
fs: &dyn Fs,
watcher: &dyn Watcher,
) -> Entry {
self.reuse_entry_id(&mut entry);
let entry = self.snapshot.insert_entry(entry, fs);
if entry.path.file_name() == Some(&DOT_GIT) {
self.insert_git_repository(entry.path.clone(), fs, watcher)
@@ -3406,13 +3389,13 @@ impl Entry {
fn new(
path: Arc<RelPath>,
metadata: &fs::Metadata,
id: ProjectEntryId,
next_entry_id: &AtomicUsize,
root_char_bag: CharBag,
canonical_path: Option<Arc<Path>>,
) -> Self {
let char_bag = char_bag_for_path(root_char_bag, &path);
Self {
id,
id: ProjectEntryId::new(next_entry_id),
kind: if metadata.is_dir {
EntryKind::PendingDir
} else {
@@ -3699,10 +3682,8 @@ impl BackgroundScanner {
.await;
if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) {
root_entry.is_ignored = true;
let mut root_entry = root_entry.clone();
state.reuse_entry_id(&mut root_entry);
state
.insert_entry(root_entry, self.fs.as_ref(), self.watcher.as_ref())
.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref())
.await;
}
if root_entry.is_dir() {
@@ -3896,35 +3877,29 @@ impl BackgroundScanner {
abs_paths.dedup_by(|a, b| a.starts_with(b));
{
let snapshot = &self.state.lock().await.snapshot;
abs_paths.retain(|abs_path| {
let abs_path = &SanitizedPath::new(abs_path);
let mut ranges_to_drop = SmallVec::<[Range<usize>; 4]>::new();
fn skip_ix(ranges: &mut SmallVec<[Range<usize>; 4]>, ix: usize) {
if let Some(last_range) = ranges.last_mut()
&& last_range.end == ix
{
last_range.end += 1;
} else {
ranges.push(ix..ix + 1);
}
}
for (ix, abs_path) in abs_paths.iter().enumerate() {
let abs_path = &SanitizedPath::new(&abs_path);
{
let mut is_git_related = false;
let mut dot_git_paths = None;
for ancestor in abs_path.as_path().ancestors() {
let dot_git_paths = self.executor.block(maybe!(async {
let mut path = None;
for ancestor in abs_path.as_path().ancestors() {
if is_git_dir(ancestor, self.fs.as_ref()).await {
let path_in_git_dir = abs_path
.as_path()
.strip_prefix(ancestor)
.expect("stripping off the ancestor");
dot_git_paths = Some((ancestor.to_owned(), path_in_git_dir.to_owned()));
break;
path = Some((ancestor.to_owned(), path_in_git_dir.to_owned()));
break;
}
}
}
path
}));
if let Some((dot_git_abs_path, path_in_git_dir)) = dot_git_paths {
if skipped_files_in_dot_git
@@ -3934,11 +3909,8 @@ impl BackgroundScanner {
path_in_git_dir.starts_with(skipped_git_subdir)
})
{
log::debug!(
"ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories"
);
skip_ix(&mut ranges_to_drop, ix);
continue;
log::debug!("ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories");
return false;
}
is_git_related = true;
@@ -3947,7 +3919,8 @@ impl BackgroundScanner {
}
}
let relative_path = if let Ok(path) = abs_path.strip_prefix(&root_canonical_path)
let relative_path = if let Ok(path) =
abs_path.strip_prefix(&root_canonical_path)
&& let Ok(path) = RelPath::new(path, PathStyle::local())
{
path
@@ -3958,11 +3931,10 @@ impl BackgroundScanner {
);
} else {
log::error!(
"ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
"ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
);
}
skip_ix(&mut ranges_to_drop, ix);
continue;
return false;
};
if abs_path.file_name() == Some(OsStr::new(GITIGNORE)) {
@@ -3986,26 +3958,21 @@ impl BackgroundScanner {
});
if !parent_dir_is_loaded {
log::debug!("ignoring event {relative_path:?} within unloaded directory");
skip_ix(&mut ranges_to_drop, ix);
continue;
return false;
}
if self.settings.is_path_excluded(&relative_path) {
if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}");
}
skip_ix(&mut ranges_to_drop, ix);
continue;
return false;
}
relative_paths.push(relative_path.into_arc());
true
}
for range_to_drop in ranges_to_drop.into_iter().rev() {
abs_paths.drain(range_to_drop);
}
});
}
if relative_paths.is_empty() && dot_git_abs_paths.is_empty() {
return;
}
@@ -4308,7 +4275,7 @@ impl BackgroundScanner {
let mut child_entry = Entry::new(
child_path.clone(),
&child_metadata,
ProjectEntryId::new(&next_entry_id),
&next_entry_id,
root_char_bag,
None,
);
@@ -4495,11 +4462,10 @@ impl BackgroundScanner {
.ignore_stack_for_abs_path(&abs_path, metadata.is_dir, self.fs.as_ref())
.await;
let is_external = !canonical_path.starts_with(&root_canonical_path);
let entry_id = state.entry_id_for(self.next_entry_id.as_ref(), path, &metadata);
let mut fs_entry = Entry::new(
path.clone(),
&metadata,
entry_id,
self.next_entry_id.as_ref(),
state.snapshot.root_char_bag,
if metadata.is_symlink {
Some(canonical_path.as_path().to_path_buf().into())

View File

@@ -1533,175 +1533,6 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
// Tests the behavior of our worktree refresh when a file in a gitignored directory
// is created.
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/root",
json!({
".gitignore": "ignored_dir\n",
"ignored_dir": {
"existing_file.txt": "existing content",
"another_file.txt": "another content",
},
}),
)
.await;
let tree = Worktree::local(
Path::new("/root"),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.read_with(cx, |tree, _| {
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
assert!(ignored_dir.is_ignored);
assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
});
tree.update(cx, |tree, cx| {
tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
})
.await
.unwrap();
tree.read_with(cx, |tree, _| {
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
assert!(ignored_dir.is_ignored);
assert_eq!(ignored_dir.kind, EntryKind::Dir);
assert!(
tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
.is_some()
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
.is_some()
);
});
let entry = tree
.update(cx, |tree, cx| {
tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
})
.await
.unwrap();
assert!(entry.into_included().is_some());
cx.executor().run_until_parked();
tree.read_with(cx, |tree, _| {
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
assert!(ignored_dir.is_ignored);
assert_eq!(
ignored_dir.kind,
EntryKind::Dir,
"ignored_dir should still be loaded, not UnloadedDir"
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
.is_some(),
"existing_file.txt should still be visible"
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
.is_some(),
"another_file.txt should still be visible"
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
.is_some(),
"new_file.txt should be visible"
);
});
}
#[gpui::test]
async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
// Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
// is triggered.
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/root",
json!({
".gitignore": "ignored_dir\n",
"ignored_dir": {
"file1.txt": "content1",
"file2.txt": "content2",
},
}),
)
.await;
let tree = Worktree::local(
Path::new("/root"),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
// Load a file to expand the ignored directory
tree.update(cx, |tree, cx| {
tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
})
.await
.unwrap();
tree.read_with(cx, |tree, _| {
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
assert_eq!(ignored_dir.kind, EntryKind::Dir);
assert!(
tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
.is_some()
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
.is_some()
);
});
fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
assert_eq!(
ignored_dir.kind,
EntryKind::Dir,
"ignored_dir should still be loaded (Dir), not UnloadedDir"
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
.is_some(),
"file1.txt should still be visible after directory fs event"
);
assert!(
tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
.is_some(),
"file2.txt should still be visible after directory fs event"
);
});
}
#[gpui::test(iterations = 100)]
async fn test_random_worktree_operations_during_initial_scan(
cx: &mut TestAppContext,

View File

@@ -22,9 +22,5 @@
<true/>
<key>com.apple.security.personal-information.photos-library</key>
<true/>
<key>com.apple.security.files.user-selected.read-write</key>
<true/>
<key>com.apple.security.files.downloads.read-write</key>
<true/>
</dict>
</plist>

View File

@@ -215,10 +215,6 @@ pub mod git {
Switch,
/// Selects a different repository.
SelectRepo,
/// Filter remotes.
FilterRemotes,
/// Create a git remote.
CreateRemote,
/// Opens the git branch selector.
#[action(deprecated_aliases = ["branches::OpenRecent"])]
Branch,

View File

@@ -89,32 +89,12 @@ To do this:
#### Cross-Region Inference
The Zed implementation of Amazon Bedrock uses [Cross-Region inference](https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html) to improve availability and throughput.
The Zed implementation of Amazon Bedrock uses [Cross-Region inference](https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html) for all the models and region combinations that support it.
With Cross-Region inference, you can distribute traffic across multiple AWS Regions, enabling higher throughput.
##### Regional vs Global Inference Profiles
Bedrock supports two types of cross-region inference profiles:
- **Regional profiles** (default): Route requests within a specific geography (US, EU, APAC). For example, `us-east-1` uses the `us.*` profile which routes across `us-east-1`, `us-east-2`, and `us-west-2`.
- **Global profiles**: Route requests across all commercial AWS Regions for maximum availability and performance.
By default, Zed uses **regional profiles** which keep your data within the same geography. You can opt into global profiles by adding `"allow_global": true` to your Bedrock configuration:
```json [settings]
{
"language_models": {
"bedrock": {
"authentication_method": "named_profile",
"region": "your-aws-region",
"profile": "your-profile-name",
"allow_global": true
}
}
}
```
**Note:** Only select newer models support global inference profiles. See the [AWS Bedrock supported models documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system) for the current list of models that support global inference. If you encounter availability issues with a model in your region, enabling `allow_global` may resolve them.
For example, if you use `Claude Sonnet 3.7 Thinking` from `us-east-1`, it may be processed across the US regions, namely: `us-east-1`, `us-east-2`, or `us-west-2`.
Cross-Region inference requests are kept within the AWS Regions that are part of the geography where the data originally resides.
For example, a request made within the US is kept within the AWS Regions in the US.
Although the data remains stored only in the source Region, your input prompts and output results might move outside of your source Region during cross-Region inference.
All data will be transmitted encrypted across Amazon's secure network.

View File

@@ -2,44 +2,34 @@
PHP support is available through the [PHP extension](https://github.com/zed-extensions/php).
- Tree-sitter: [tree-sitter/tree-sitter-php](https://github.com/tree-sitter/tree-sitter-php)
- Language Server: [phpactor/phpactor](https://github.com/phpactor/phpactor)
- Alternate Language Server: [bmewburn/vscode-intelephense](https://github.com/bmewburn/vscode-intelephense/)
## Install PHP
The PHP extension requires PHP to be installed and available in your `PATH`:
```sh
# macOS via Homebrew
brew install php
# Debian/Ubuntu
sudo apt-get install php-cli
# CentOS 8+/RHEL
sudo dnf install php-cli
# Arch Linux
sudo pacman -S php
# check PHP path
## macOS and Linux
which php
## Windows
where php
```
- Tree-sitter: https://github.com/tree-sitter/tree-sitter-php
- Language Servers:
- [phpactor](https://github.com/phpactor/phpactor)
- [intelephense](https://github.com/bmewburn/vscode-intelephense/)
## Choosing a language server
The PHP extension uses [LSP language servers](https://microsoft.github.io/language-server-protocol) with Phpactor as the default. If you want to use other language servers that support Zed (e.g. Intelephense or PHP Tools), make sure to follow the documentation on how to implement it.
The PHP extension offers both `phpactor` and `intelephense` language server support.
`phpactor` is enabled by default.
### Phpactor
The Zed PHP Extension can install `phpactor` automatically but requires `php` to be installed and available in your path:
```sh
# brew install php # macOS
# sudo apt-get install php # Debian/Ubuntu
# yum install php # CentOS/RHEL
# pacman -S php # Arch Linux
which php
```
### Intelephense
[Intelephense](https://intelephense.com/) is a [proprietary](https://github.com/bmewburn/vscode-intelephense/blob/master/LICENSE.txt#L29) language server for PHP operating under a freemium model. Certain features require purchase of a [premium license](https://intelephense.com/buy).
[Intelephense](https://intelephense.com/) is a [proprietary](https://github.com/bmewburn/vscode-intelephense/blob/master/LICENSE.txt#L29) language server for PHP operating under a freemium model. Certain features require purchase of a [premium license](https://intelephense.com/).
To use Intelephense, add the following to your `settings.json`:
To switch to `intelephense`, add the following to your `settings.json`:
```json [settings]
{
@@ -51,9 +41,7 @@ To use Intelephense, add the following to your `settings.json`:
}
```
To use the premium features, you can place your license file inside your home directory at `~/intelephense/licence.txt` for macOS and Linux, or `%USERPROFILE%/intelephense/licence.txt` on Windows.
Alternatively, you can pass the licence key or a path to a file containing the licence key as an initialization option. To do this, add the following to your `settings.json`:
To use the premium features, you can place your [licence.txt file](https://intelephense.com/faq.html) at `~/intelephense/licence.txt` inside your home directory. Alternatively, you can pass the licence key or a path to a file containing the licence key as an initialization option for the `intelephense` language server. To do this, add the following to your `settings.json`:
```json [settings]
{
@@ -67,67 +55,15 @@ Alternatively, you can pass the licence key or a path to a file containing the l
}
```
### PHP Tools
[PHP Tools](https://www.devsense.com/) is a proprietary language server that offers free and premium features. You need to [purchase a license](https://www.devsense.com/en/purchase) to activate the premium features.
To use PHP Tools, add the following to your `settings.json`:
```json [settings]
{
"languages": {
"PHP": {
"language_servers": ["phptools", "!intelephense", "!phpactor", "..."]
}
}
}
```
To use the premium features, you can add your license in `initialization_options` in your `settings.json`:
```json [settings]
{
"lsp": {
"phptools": {
"initialization_options": {
"0": "your_license_key"
}
}
}
}
```
or, set environment variable `DEVSENSE_PHP_LS_LICENSE` on `.env` file in your project.
```env
DEVSENSE_PHP_LS_LICENSE="your_license_key"
```
Check out the documentation of [PHP Tools for Zed](https://docs.devsense.com/other/zed/) for more details.
### Phpactor
To use Phpactor instead of Intelephense or any other tools, add the following to your `settings.json`:
```json [settings]
{
"languages": {
"PHP": {
"language_servers": ["phpactor", "!intelephense", "!phptools", "..."]
}
}
}
```
## PHPDoc
Zed supports syntax highlighting for PHPDoc comments.
- Tree-sitter: [claytonrcarter/tree-sitter-phpdoc](https://github.com/claytonrcarter/tree-sitter-phpdoc)
## Debugging
## Setting up Xdebug
The PHP extension provides a debug adapter for PHP via Xdebug. There are several ways to use it:
Zeds PHP extension provides a debug adapter for PHP and Xdebug. The adapter name is `Xdebug`. Here a couple ways you can use it:
```json
[
@@ -147,10 +83,10 @@ The PHP extension provides a debug adapter for PHP via Xdebug. There are several
]
```
These are common troubleshooting tips, in case you run into issues:
In case you run into issues:
- Ensure that you have Xdebug installed for the version of PHP youre running.
- Ensure that Xdebug is configured to run in `debug` mode.
- Ensure that Xdebug is actually starting a debugging session.
- Ensure that the host and port matches between Xdebug and Zed.
- Look at the diagnostics log by using the `xdebug_info()` function in the page youre trying to debug.
- ensure that you have Xdebug installed for the version of PHP youre running
- ensure that Xdebug is configured to run in `debug` mode
- ensure that Xdebug is actually starting a debugging session
- check that the host and port matches between Xdebug and Zed
- look at the diagnostics log by using the `xdebug_info()` function in the page youre trying to debug