Compare commits

..

11 Commits

Author SHA1 Message Date
Conrad Irwin
a89156cbbe expr returns 1 when it calculates 0, who knew... 2025-10-24 20:22:58 -06:00
Conrad Irwin
892e8cddcc Why are you failing? 2025-10-24 20:14:01 -06:00
Conrad Irwin
4ed39540e7 re-run 2025-10-24 19:50:56 -06:00
Conrad Irwin
91c509ea5e Clippppppy 2025-10-24 16:55:18 -06:00
Conrad Irwin
54ca5e7c91 Oops 2025-10-24 16:44:40 -06:00
Conrad Irwin
38db09157e New approach to installing nextest? 2025-10-24 16:29:21 -06:00
Conrad Irwin
8a4d85eb82 Try gh_workflow generation 2025-10-24 00:16:12 -06:00
Conrad Irwin
6a983f657a Cache more aggressively 2025-10-23 23:44:51 -06:00
Conrad Irwin
6439ea7fb8 build again 2025-10-23 23:22:20 -06:00
Conrad Irwin
59d8458a24 New cache? 2025-10-23 22:30:18 -06:00
Conrad Irwin
be7c18234d Try new namespace builder 2025-10-23 22:14:17 -06:00
81 changed files with 1394 additions and 3112 deletions

View File

@@ -296,49 +296,53 @@ jobs:
github.repository_owner == 'zed-industries' &&
needs.job_spec.outputs.run_tests == 'true'
runs-on:
- namespace-profile-16x32-ubuntu-2204
- namespace-profile-ubuntu22-x86-16x32-custom
steps:
- name: Add Rust to the PATH
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
run: |
echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
echo "$HOME/.cargo-nextest/bin" >> "$GITHUB_PATH"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
- name: Configure Go and Rust cache
uses: namespacelabs/nscloud-cache-action@v1
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
# cache-provider: "buildjet"
- name: Install Linux dependencies
run: ./script/linux
path: |
/home/runner/.cargo-nextest
/home/runner/.rustup
./target
- name: Configure CI
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
- name: cargo clippy
run: ./script/clippy
- name: Install cargo nextest
shell: bash -euxo pipefail {0}
run: |
cargo install cargo-nextest --locked --root ~/.cargo-nextest
- name: Limit target directory size
env:
MAX_SIZE: ${{ runner.os == 'macOS' && 300 || 100 }}
shell: bash -euxo pipefail {0}
# Use the variable in the run command
run: script/clear-target-dir-if-larger-than ${{ env.MAX_SIZE }}
- name: Run tests
uses: ./.github/actions/run_tests
shell: bash -euxo pipefail {0}
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
- name: Build other binaries and features
run: |
cargo build -p zed
cargo check -p workspace
cargo check -p gpui --examples
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
# to clean up the config file, Ive included the cleanup code here as a precaution.
# While its not strictly necessary at this moment, I believe its better to err on the side of caution.
- name: Clean CI config file
if: always()
run: rm -rf ./../.cargo
- name: cargo clippy
run: ./script/clippy
doctests:
# Nextest currently doesn't support doctests, so run them separately and in parallel.

113
Cargo.lock generated
View File

@@ -4532,6 +4532,7 @@ dependencies = [
"paths",
"serde",
"serde_json",
"shlex",
"smol",
"task",
"util",
@@ -4757,6 +4758,7 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
"shlex",
"sysinfo 0.37.2",
"task",
"tasks_ui",
@@ -4900,6 +4902,18 @@ dependencies = [
"syn 2.0.106",
]
[[package]]
name = "derive_setters"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9"
dependencies = [
"darling 0.20.11",
"proc-macro2",
"quote",
"syn 2.0.106",
]
[[package]]
name = "deunicode"
version = "1.6.2"
@@ -6940,6 +6954,35 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "gh-workflow"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fffeec7bd9dfa73ffe1db38979cca5716ec6ffd944f03fae65fee81f16082ae"
dependencies = [
"async-trait",
"derive_more 2.0.1",
"derive_setters",
"gh-workflow-macros",
"indexmap 2.11.4",
"merge",
"serde",
"serde_json",
"serde_yaml",
"strum_macros 0.27.2",
]
[[package]]
name = "gh-workflow-macros"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5eafb4d2a1005d4ac6d041ce929af10be1de1e1eae478795d9d634b84ccf8191"
dependencies = [
"heck 0.5.0",
"quote",
"syn 2.0.106",
]
[[package]]
name = "gif"
version = "0.13.3"
@@ -7069,6 +7112,7 @@ dependencies = [
"notifications",
"panel",
"picker",
"postage",
"pretty_assertions",
"project",
"schemars 1.0.4",
@@ -9809,6 +9853,28 @@ dependencies = [
"gpui",
]
[[package]]
name = "merge"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10bbef93abb1da61525bbc45eeaff6473a41907d19f8f9aa5168d214e10693e9"
dependencies = [
"merge_derive",
"num-traits",
]
[[package]]
name = "merge_derive"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "209d075476da2e63b4b29e72a2ef627b840589588e71400a25e3565c4f849d07"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "metal"
version = "0.29.0"
@@ -12798,6 +12864,30 @@ dependencies = [
"toml_edit 0.23.7",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn 1.0.109",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro-error-attr2"
version = "2.0.0"
@@ -12924,6 +13014,7 @@ dependencies = [
"settings",
"sha2",
"shellexpand 2.1.2",
"shlex",
"smallvec",
"smol",
"snippet",
@@ -13836,6 +13927,7 @@ dependencies = [
"serde",
"serde_json",
"settings",
"shlex",
"smol",
"tempfile",
"thiserror 2.0.17",
@@ -15221,6 +15313,19 @@ dependencies = [
"syn 2.0.106",
]
[[package]]
name = "serde_yaml"
version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [
"indexmap 2.11.4",
"itoa",
"ryu",
"serde",
"unsafe-libyaml",
]
[[package]]
name = "serial2"
version = "0.2.33"
@@ -18392,6 +18497,12 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "untrusted"
version = "0.9.0"
@@ -20801,6 +20912,7 @@ dependencies = [
"cargo_metadata",
"cargo_toml",
"clap",
"gh-workflow",
"indoc",
"toml 0.8.23",
"toml_edit 0.22.27",
@@ -21032,6 +21144,7 @@ dependencies = [
"file_finder",
"fs",
"futures 0.3.31",
"gh-workflow",
"git",
"git_hosting_providers",
"git_ui",

View File

@@ -506,6 +506,7 @@ fork = "0.2.0"
futures = "0.3"
futures-batch = "0.6.1"
futures-lite = "1.13"
gh-workflow = "0.8.0"
git2 = { version = "0.20.1", default-features = false }
globset = "0.4"
handlebars = "4.3"

View File

@@ -1,106 +0,0 @@
; This file contains a list of people who're interested in reviewing pull requests
; to certain parts of the code-base.
;
; This is mostly used internally for PR assignment, and may change over time.
;
; If you have permission to merge PRs (mostly equivalent to "do you work at Zed Industries"),
; we strongly encourage you to put your name in the "all" bucket, but you can also add yourself
; to other areas too.
<all>
= @ConradIrwin
= @maxdeviant
= @SomeoneToIgnore
= @probably-neb
= @danilo-leal
= @Veykril
= @kubkon
= @p1n3appl3
= @dinocosta
= @smitbarmase
= @cole-miller
vim
= @ConradIrwin
= @probably-neb
= @p1n3appl3
= @dinocosta
gpui
= @mikayla-maki
git
= @cole-miller
= @danilo-leal
linux
= @dvdsk
= @smitbarmase
= @p1n3appl3
= @cole-miller
windows
= @reflectronic
= @localcc
pickers
= @p1n3appl3
= @dvdsk
= @SomeoneToIgnore
audio
= @dvdsk
helix
= @kubkon
terminal
= @kubkon
= @Veykril
debugger
= @kubkon
= @osiewicz
= @Anthony-Eid
extension
= @kubkon
settings_ui
= @probably-neb
= @danilo-leal
= @Anthony-Eid
crashes
= @p1n3appl3
= @Veykril
ai
= @danilo-leal
= @benbrandt
design
= @danilo-leal
multi_buffer
= @Veykril
= @SomeoneToIgnore
lsp
= @osiewicz
= @Veykril
= @smitbarmase
= @SomeoneToIgnore
languages
= @osiewicz
= @Veykril
= @smitbarmase
= @SomeoneToIgnore
project_panel
= @smitbarmase
tasks
= @SomeoneToIgnore
= @Veykril

View File

@@ -1772,9 +1772,6 @@
"allow_rewrap": "anywhere"
},
"Python": {
"code_actions_on_format": {
"source.organizeImports.ruff": true
},
"formatter": {
"language_server": {
"name": "ruff"

View File

@@ -1483,11 +1483,11 @@ impl EditAgentTest {
fs.insert_tree("/root", json!({})).await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let agent_model = SelectedModel::from_str(
&std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()),
&std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-4-sonnet-latest".into()),
)
.unwrap();
let judge_model = SelectedModel::from_str(
&std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()),
&std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-4-sonnet-latest".into()),
)
.unwrap();
@@ -1547,7 +1547,7 @@ impl EditAgentTest {
model.provider_id() == selected_model.provider
&& model.id() == selected_model.model
})
.unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0));
.expect("Model not found");
model
})
}

View File

@@ -6,11 +6,8 @@ use std::sync::Arc;
use acp_thread::AcpThread;
use agent::{ContextServerRegistry, DbThreadMetadata, HistoryEntry, HistoryStore};
use db::kvp::{Dismissable, KEY_VALUE_STORE};
use project::{
ExternalAgentServerName,
agent_server_store::{
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
},
use project::agent_server_store::{
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
};
use serde::{Deserialize, Serialize};
use settings::{
@@ -44,8 +41,6 @@ use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary};
use client::{UserStore, zed_urls};
use cloud_llm_client::{Plan, PlanV1, PlanV2, UsageLimit};
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use extension::ExtensionEvents;
use extension_host::ExtensionStore;
use fs::Fs;
use gpui::{
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
@@ -72,9 +67,7 @@ use workspace::{
};
use zed_actions::{
DecreaseBufferFontSize, IncreaseBufferFontSize, ResetBufferFontSize,
agent::{
OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetAgentZoom, ResetOnboarding,
},
agent::{OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetOnboarding},
assistant::{OpenRulesLibrary, ToggleFocus},
};
@@ -195,13 +188,6 @@ pub fn init(cx: &mut App) {
})
.register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| {
TrialEndUpsell::set_dismissed(false, cx);
})
.register_action(|workspace, _: &ResetAgentZoom, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
panel.update(cx, |panel, cx| {
panel.reset_agent_zoom(window, cx);
});
}
});
},
)
@@ -436,7 +422,6 @@ pub struct AgentPanel {
agent_panel_menu_handle: PopoverMenuHandle<ContextMenu>,
agent_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
agent_navigation_menu: Option<Entity<ContextMenu>>,
_extension_subscription: Option<Subscription>,
width: Option<Pixels>,
height: Option<Pixels>,
zoomed: bool,
@@ -647,24 +632,7 @@ impl AgentPanel {
)
});
// Subscribe to extension events to sync agent servers when extensions change
let extension_subscription = if let Some(extension_events) = ExtensionEvents::try_global(cx)
{
Some(
cx.subscribe(&extension_events, |this, _source, event, cx| match event {
extension::Event::ExtensionInstalled(_)
| extension::Event::ExtensionUninstalled(_)
| extension::Event::ExtensionsInstalledChanged => {
this.sync_agent_servers_from_extensions(cx);
}
_ => {}
}),
)
} else {
None
};
let mut panel = Self {
Self {
active_view,
workspace,
user_store,
@@ -682,7 +650,6 @@ impl AgentPanel {
agent_panel_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu: None,
_extension_subscription: extension_subscription,
width: None,
height: None,
zoomed: false,
@@ -692,11 +659,7 @@ impl AgentPanel {
history_store,
selected_agent: AgentType::default(),
loading: false,
};
// Initial sync of agent servers from extensions
panel.sync_agent_servers_from_extensions(cx);
panel
}
}
pub fn toggle_focus(
@@ -1068,21 +1031,13 @@ impl AgentPanel {
update_settings_file(self.fs.clone(), cx, move |settings, cx| {
let agent_ui_font_size =
ThemeSettings::get_global(cx).agent_ui_font_size(cx) + delta;
let agent_buffer_font_size =
ThemeSettings::get_global(cx).agent_buffer_font_size(cx) + delta;
let _ = settings
.theme
.agent_ui_font_size
.insert(theme::clamp_font_size(agent_ui_font_size).into());
let _ = settings
.theme
.agent_buffer_font_size
.insert(theme::clamp_font_size(agent_buffer_font_size).into());
});
} else {
theme::adjust_agent_ui_font_size(cx, |size| size + delta);
theme::adjust_agent_buffer_font_size(cx, |size| size + delta);
}
}
WhichFontSize::BufferFont => {
@@ -1103,19 +1058,12 @@ impl AgentPanel {
if action.persist {
update_settings_file(self.fs.clone(), cx, move |settings, _| {
settings.theme.agent_ui_font_size = None;
settings.theme.agent_buffer_font_size = None;
});
} else {
theme::reset_agent_ui_font_size(cx);
theme::reset_agent_buffer_font_size(cx);
}
}
pub fn reset_agent_zoom(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
theme::reset_agent_ui_font_size(cx);
theme::reset_agent_buffer_font_size(cx);
}
pub fn toggle_zoom(&mut self, _: &ToggleZoom, window: &mut Window, cx: &mut Context<Self>) {
if self.zoomed {
cx.emit(PanelEvent::ZoomOut);
@@ -1361,31 +1309,6 @@ impl AgentPanel {
self.selected_agent.clone()
}
fn sync_agent_servers_from_extensions(&mut self, cx: &mut Context<Self>) {
if let Some(extension_store) = ExtensionStore::try_global(cx) {
let (manifests, extensions_dir) = {
let store = extension_store.read(cx);
let installed = store.installed_extensions();
let manifests: Vec<_> = installed
.iter()
.map(|(id, entry)| (id.clone(), entry.manifest.clone()))
.collect();
let extensions_dir = paths::extensions_dir().join("installed");
(manifests, extensions_dir)
};
self.project.update(cx, |project, cx| {
project.agent_server_store().update(cx, |store, cx| {
let manifest_refs: Vec<_> = manifests
.iter()
.map(|(id, manifest)| (id.as_ref(), manifest.as_ref()))
.collect();
store.sync_extension_agents(manifest_refs, extensions_dir, cx);
});
});
}
}
pub fn new_agent_thread(
&mut self,
agent: AgentType,
@@ -1821,16 +1744,6 @@ impl AgentPanel {
let agent_server_store = self.project.read(cx).agent_server_store().clone();
let focus_handle = self.focus_handle(cx);
// Get custom icon path for selected agent before building menu (to avoid borrow issues)
let selected_agent_custom_icon =
if let AgentType::Custom { name, .. } = &self.selected_agent {
agent_server_store
.read(cx)
.agent_icon(&ExternalAgentServerName(name.clone()))
} else {
None
};
let active_thread = match &self.active_view {
ActiveView::ExternalAgentThread { thread_view } => {
thread_view.read(cx).as_native_thread(cx)
@@ -1844,7 +1757,12 @@ impl AgentPanel {
{
let focus_handle = focus_handle.clone();
move |_window, cx| {
Tooltip::for_action_in("New…", &ToggleNewThreadMenu, &focus_handle, cx)
Tooltip::for_action_in(
"New…",
&ToggleNewThreadMenu,
&focus_handle,
cx,
)
}
},
)
@@ -1863,7 +1781,8 @@ impl AgentPanel {
let active_thread = active_thread.clone();
Some(ContextMenu::build(window, cx, |menu, _window, cx| {
menu.context(focus_handle.clone())
menu
.context(focus_handle.clone())
.header("Zed Agent")
.when_some(active_thread, |this, active_thread| {
let thread = active_thread.read(cx);
@@ -2020,110 +1939,77 @@ impl AgentPanel {
}),
)
.map(|mut menu| {
let agent_server_store_read = agent_server_store.read(cx);
let agent_names = agent_server_store_read
let agent_names = agent_server_store
.read(cx)
.external_agents()
.filter(|name| {
name.0 != GEMINI_NAME
&& name.0 != CLAUDE_CODE_NAME
&& name.0 != CODEX_NAME
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
})
.cloned()
.collect::<Vec<_>>();
let custom_settings = cx
.global::<SettingsStore>()
.get::<AllAgentServersSettings>(None)
.custom
.clone();
let custom_settings = cx.global::<SettingsStore>().get::<AllAgentServersSettings>(None).custom.clone();
for agent_name in agent_names {
let icon_path = agent_server_store_read.agent_icon(&agent_name);
let mut entry =
ContextMenuEntry::new(format!("New {} Thread", agent_name));
if let Some(icon_path) = icon_path {
entry = entry.custom_icon_path(icon_path);
} else {
entry = entry.icon(IconName::Terminal);
}
entry = entry
.icon_color(Color::Muted)
.disabled(is_via_collab)
.handler({
let workspace = workspace.clone();
let agent_name = agent_name.clone();
let custom_settings = custom_settings.clone();
move |window, cx| {
if let Some(workspace) = workspace.upgrade() {
workspace.update(cx, |workspace, cx| {
if let Some(panel) =
workspace.panel::<AgentPanel>(cx)
{
panel.update(cx, |panel, cx| {
panel.new_agent_thread(
AgentType::Custom {
name: agent_name
.clone()
.into(),
command: custom_settings
.get(&agent_name.0)
.map(|settings| {
settings
.command
.clone()
})
.unwrap_or(
placeholder_command(
),
),
},
window,
cx,
);
});
}
});
menu = menu.item(
ContextMenuEntry::new(format!("New {} Thread", agent_name))
.icon(IconName::Terminal)
.icon_color(Color::Muted)
.disabled(is_via_collab)
.handler({
let workspace = workspace.clone();
let agent_name = agent_name.clone();
let custom_settings = custom_settings.clone();
move |window, cx| {
if let Some(workspace) = workspace.upgrade() {
workspace.update(cx, |workspace, cx| {
if let Some(panel) =
workspace.panel::<AgentPanel>(cx)
{
panel.update(cx, |panel, cx| {
panel.new_agent_thread(
AgentType::Custom {
name: agent_name.clone().into(),
command: custom_settings
.get(&agent_name.0)
.map(|settings| {
settings.command.clone()
})
.unwrap_or(placeholder_command()),
},
window,
cx,
);
});
}
});
}
}
}
});
menu = menu.item(entry);
}),
);
}
menu
})
.separator()
.link(
"Add Other Agents",
OpenBrowser {
url: zed_urls::external_agents_docs(cx),
}
.boxed_clone(),
)
.separator().link(
"Add Other Agents",
OpenBrowser {
url: zed_urls::external_agents_docs(cx),
}
.boxed_clone(),
)
}))
}
});
let selected_agent_label = self.selected_agent.label();
let has_custom_icon = selected_agent_custom_icon.is_some();
let selected_agent = div()
.id("selected_agent_icon")
.when_some(selected_agent_custom_icon, |this, icon_path| {
let label = selected_agent_label.clone();
.when_some(self.selected_agent.icon(), |this, icon| {
this.px(DynamicSpacing::Base02.rems(cx))
.child(Icon::from_path(icon_path).color(Color::Muted))
.child(Icon::new(icon).color(Color::Muted))
.tooltip(move |_window, cx| {
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
Tooltip::with_meta(selected_agent_label.clone(), None, "Selected Agent", cx)
})
})
.when(!has_custom_icon, |this| {
this.when_some(self.selected_agent.icon(), |this, icon| {
let label = selected_agent_label.clone();
this.px(DynamicSpacing::Base02.rems(cx))
.child(Icon::new(icon).color(Color::Muted))
.tooltip(move |_window, cx| {
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
})
})
})
.into_any_element();
h_flex()

View File

@@ -20,7 +20,7 @@ use futures::{
};
use gpui::{AsyncApp, BackgroundExecutor, Task};
use smol::fs;
use util::{ResultExt as _, debug_panic, maybe, paths::PathExt, shell::ShellKind};
use util::{ResultExt as _, debug_panic, maybe, paths::PathExt};
/// Path to the program used for askpass
///
@@ -199,15 +199,9 @@ impl PasswordProxy {
let current_exec =
std::env::current_exe().context("Failed to determine current zed executable path.")?;
// TODO: inferred from the use of powershell.exe in askpass_helper_script
let shell_kind = if cfg!(windows) {
ShellKind::PowerShell
} else {
ShellKind::Posix
};
let askpass_program = ASKPASS_PROGRAM
.get_or_init(|| current_exec)
.try_shell_safe(shell_kind)
.try_shell_safe()
.context("Failed to shell-escape Askpass program path.")?
.to_string();
// Create an askpass script that communicates back to this process.
@@ -349,7 +343,7 @@ fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Pa
format!(
r#"
$ErrorActionPreference = 'Stop';
($args -join [char]0) | & {askpass_program} --askpass={askpass_socket} 2> $null
($args -join [char]0) | & "{askpass_program}" --askpass={askpass_socket} 2> $null
"#,
askpass_socket = askpass_socket.display(),
)

View File

@@ -1162,22 +1162,34 @@ impl BufferDiff {
self.hunks_intersecting_range(start..end, buffer, cx)
}
pub fn set_base_text_buffer(
&mut self,
base_buffer: Entity<language::Buffer>,
buffer: text::BufferSnapshot,
cx: &mut Context<Self>,
) -> oneshot::Receiver<()> {
let base_buffer = base_buffer.read(cx);
let language_registry = base_buffer.language_registry();
let base_buffer = base_buffer.snapshot();
self.set_base_text(base_buffer, language_registry, buffer, cx)
}
/// Used in cases where the change set isn't derived from git.
pub fn set_base_text(
&mut self,
base_text: Option<Arc<String>>,
language: Option<Arc<Language>>,
base_buffer: language::BufferSnapshot,
language_registry: Option<Arc<LanguageRegistry>>,
buffer: text::BufferSnapshot,
cx: &mut Context<Self>,
) -> oneshot::Receiver<()> {
let (tx, rx) = oneshot::channel();
let this = cx.weak_entity();
let base_text = Arc::new(base_buffer.text());
let snapshot = BufferDiffSnapshot::new_with_base_text(
buffer.clone(),
base_text,
language,
Some(base_text),
base_buffer.language().cloned(),
language_registry,
cx,
);

View File

@@ -467,7 +467,6 @@ CREATE TABLE extension_versions (
provides_grammars BOOLEAN NOT NULL DEFAULT FALSE,
provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_agent_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE,
provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE,
provides_snippets BOOLEAN NOT NULL DEFAULT FALSE,

View File

@@ -1,2 +0,0 @@
alter table extension_versions
add column provides_agent_servers bool not null default false

View File

@@ -310,9 +310,6 @@ impl Database {
.provides
.contains(&ExtensionProvides::ContextServers),
),
provides_agent_servers: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::AgentServers),
),
provides_slash_commands: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::SlashCommands),
),
@@ -425,10 +422,6 @@ fn apply_provides_filter(
condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true));
}
if provides_filter.contains(&ExtensionProvides::AgentServers) {
condition = condition.add(extension_version::Column::ProvidesAgentServers.eq(true));
}
if provides_filter.contains(&ExtensionProvides::SlashCommands) {
condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true));
}

View File

@@ -24,7 +24,6 @@ pub struct Model {
pub provides_grammars: bool,
pub provides_language_servers: bool,
pub provides_context_servers: bool,
pub provides_agent_servers: bool,
pub provides_slash_commands: bool,
pub provides_indexed_docs_providers: bool,
pub provides_snippets: bool,
@@ -58,10 +57,6 @@ impl Model {
provides.insert(ExtensionProvides::ContextServers);
}
if self.provides_agent_servers {
provides.insert(ExtensionProvides::AgentServers);
}
if self.provides_slash_commands {
provides.insert(ExtensionProvides::SlashCommands);
}

View File

@@ -16,72 +16,6 @@ test_both_dbs!(
test_extensions_sqlite
);
test_both_dbs!(
test_agent_servers_filter,
test_agent_servers_filter_postgres,
test_agent_servers_filter_sqlite
);
async fn test_agent_servers_filter(db: &Arc<Database>) {
// No extensions initially
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
// Shared timestamp
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time());
// Insert two extensions, only one provides AgentServers
db.insert_extension_versions(
&[
(
"ext_agent_servers",
vec![NewExtensionVersion {
name: "Agent Servers Provider".into(),
version: semver::Version::parse("1.0.0").unwrap(),
description: "has agent servers".into(),
authors: vec!["author".into()],
repository: "org/agent-servers".into(),
schema_version: 1,
wasm_api_version: None,
provides: BTreeSet::from_iter([ExtensionProvides::AgentServers]),
published_at: t0,
}],
),
(
"ext_plain",
vec![NewExtensionVersion {
name: "Plain Extension".into(),
version: semver::Version::parse("0.1.0").unwrap(),
description: "no agent servers".into(),
authors: vec!["author2".into()],
repository: "org/plain".into(),
schema_version: 1,
wasm_api_version: None,
provides: BTreeSet::default(),
published_at: t0,
}],
),
]
.into_iter()
.collect(),
)
.await
.unwrap();
// Filter by AgentServers provides
let provides_filter = BTreeSet::from_iter([ExtensionProvides::AgentServers]);
let filtered = db
.get_extensions(None, Some(&provides_filter), 1, 10)
.await
.unwrap();
// Expect only the extension that declared AgentServers
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].id.as_ref(), "ext_agent_servers");
}
async fn test_extensions(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());

View File

@@ -347,7 +347,6 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::GetColorPresentation>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
.add_request_handler(forward_read_only_project_request::<proto::GetDefaultBranch>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::LspExtExpandMacro>)
@@ -462,8 +461,6 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::BreakpointsForFile>)
.add_request_handler(forward_mutating_project_request::<proto::OpenCommitMessageBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::GitDiff>)
.add_request_handler(forward_mutating_project_request::<proto::GetTreeDiff>)
.add_request_handler(forward_mutating_project_request::<proto::GetBlobContent>)
.add_request_handler(forward_mutating_project_request::<proto::GitCreateBranch>)
.add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
.add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)

View File

@@ -35,6 +35,7 @@ log.workspace = true
paths.workspace = true
serde.workspace = true
serde_json.workspace = true
shlex.workspace = true
smol.workspace = true
task.workspace = true
util.workspace = true

View File

@@ -6,7 +6,7 @@ use gpui::AsyncApp;
use serde_json::Value;
use std::{path::PathBuf, sync::OnceLock};
use task::DebugRequest;
use util::{ResultExt, maybe, shell::ShellKind};
use util::{ResultExt, maybe};
use crate::*;
@@ -67,7 +67,7 @@ impl JsDebugAdapter {
.get("type")
.filter(|value| value == &"node-terminal")?;
let command = configuration.get("command")?.as_str()?.to_owned();
let mut args = ShellKind::Posix.split(&command)?.into_iter();
let mut args = shlex::split(&command)?.into_iter();
let program = args.next()?;
configuration.insert("runtimeExecutable".to_owned(), program.into());
configuration.insert(

View File

@@ -60,6 +60,7 @@ serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
settings.workspace = true
shlex.workspace = true
sysinfo.workspace = true
task.workspace = true
tasks_ui.workspace = true

View File

@@ -9,7 +9,7 @@ use task::ZedDebugConfig;
use util::debug_panic;
use std::sync::Arc;
use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind};
use sysinfo::System;
use ui::{Context, Tooltip, prelude::*};
use ui::{ListItem, ListItemSpacing};
use workspace::{ModalView, Workspace};
@@ -362,12 +362,7 @@ fn get_processes_for_project(project: &Entity<Project>, cx: &mut App) -> Task<Ar
Arc::from(processes.into_boxed_slice())
})
} else {
let refresh_kind = RefreshKind::nothing().with_processes(
ProcessRefreshKind::nothing()
.without_tasks()
.with_cmd(UpdateKind::Always),
);
let mut processes: Box<[_]> = System::new_with_specifics(refresh_kind)
let mut processes: Box<[_]> = System::new_all()
.processes()
.values()
.map(|process| {

View File

@@ -32,7 +32,7 @@ use ui::{
SharedString, Styled, StyledExt, ToggleButton, ToggleState, Toggleable, Tooltip, Window, div,
h_flex, relative, rems, v_flex,
};
use util::{ResultExt, rel_path::RelPath, shell::ShellKind};
use util::{ResultExt, rel_path::RelPath};
use workspace::{ModalView, Workspace, notifications::DetachAndPromptErr, pane};
use crate::{attach_modal::AttachModal, debugger_panel::DebugPanel};
@@ -839,11 +839,7 @@ impl ConfigureMode {
};
}
let command = self.program.read(cx).text(cx);
let mut args = ShellKind::Posix
.split(&command)
.into_iter()
.flatten()
.peekable();
let mut args = shlex::split(&command).into_iter().flatten().peekable();
let mut env = FxHashMap::default();
while args.peek().is_some_and(|arg| arg.contains('=')) {
let arg = args.next().unwrap();
@@ -1269,11 +1265,7 @@ impl PickerDelegate for DebugDelegate {
})
.unwrap_or_default();
let mut args = ShellKind::Posix
.split(&text)
.into_iter()
.flatten()
.peekable();
let mut args = shlex::split(&text).into_iter().flatten().peekable();
let mut env = HashMap::default();
while args.peek().is_some_and(|arg| arg.contains('=')) {
let arg = args.next().unwrap();

View File

@@ -32,6 +32,7 @@ mod lsp_ext;
mod mouse_context_menu;
pub mod movement;
mod persistence;
mod proposed_changes_editor;
mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
@@ -67,12 +68,14 @@ pub use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, PathKey,
RowInfo, ToOffset, ToPoint,
};
pub use proposed_changes_editor::{
ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar,
};
pub use text::Bias;
use ::git::{
Restore,
blame::{BlameEntry, ParsedCommitMessage},
status::FileStatus,
};
use aho_corasick::AhoCorasick;
use anyhow::{Context as _, Result, anyhow};
@@ -844,10 +847,6 @@ pub trait Addon: 'static {
None
}
fn override_status_for_buffer_id(&self, _: BufferId, _: &App) -> Option<FileStatus> {
None
}
fn to_any(&self) -> &dyn std::any::Any;
fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
@@ -10642,20 +10641,6 @@ impl Editor {
}
}
pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
if let Some(status) = self
.addons
.iter()
.find_map(|(_, addon)| addon.override_status_for_buffer_id(buffer_id, cx))
{
return Some(status);
}
self.project
.as_ref()?
.read(cx)
.status_for_buffer_id(buffer_id, cx)
}
pub fn open_active_item_in_terminal(
&mut self,
_: &OpenInTerminal,
@@ -21026,6 +21011,65 @@ impl Editor {
self.searchable
}
fn open_proposed_changes_editor(
&mut self,
_: &OpenProposedChangesEditor,
window: &mut Window,
cx: &mut Context<Self>,
) {
let Some(workspace) = self.workspace() else {
cx.propagate();
return;
};
let selections = self.selections.all::<usize>(&self.display_snapshot(cx));
let multi_buffer = self.buffer.read(cx);
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
let mut new_selections_by_buffer = HashMap::default();
for selection in selections {
for (buffer, range, _) in
multi_buffer_snapshot.range_to_buffer_ranges(selection.start..selection.end)
{
let mut range = range.to_point(buffer);
range.start.column = 0;
range.end.column = buffer.line_len(range.end.row);
new_selections_by_buffer
.entry(multi_buffer.buffer(buffer.remote_id()).unwrap())
.or_insert(Vec::new())
.push(range)
}
}
let proposed_changes_buffers = new_selections_by_buffer
.into_iter()
.map(|(buffer, ranges)| ProposedChangeLocation { buffer, ranges })
.collect::<Vec<_>>();
let proposed_changes_editor = cx.new(|cx| {
ProposedChangesEditor::new(
"Proposed changes",
proposed_changes_buffers,
self.project.clone(),
window,
cx,
)
});
window.defer(cx, move |window, cx| {
workspace.update(cx, |workspace, cx| {
workspace.active_pane().update(cx, |pane, cx| {
pane.add_item(
Box::new(proposed_changes_editor),
true,
true,
None,
window,
cx,
);
});
});
});
}
pub fn open_excerpts_in_split(
&mut self,
_: &OpenExcerptsSplit,

View File

@@ -458,6 +458,7 @@ impl EditorElement {
register_action(editor, window, Editor::toggle_code_actions);
register_action(editor, window, Editor::open_excerpts);
register_action(editor, window, Editor::open_excerpts_in_split);
register_action(editor, window, Editor::open_proposed_changes_editor);
register_action(editor, window, Editor::toggle_soft_wrap);
register_action(editor, window, Editor::toggle_tab_bar);
register_action(editor, window, Editor::toggle_line_numbers);
@@ -3827,7 +3828,13 @@ impl EditorElement {
let multi_buffer = editor.buffer.read(cx);
let file_status = multi_buffer
.all_diff_hunks_expanded()
.then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx))
.then(|| {
editor
.project
.as_ref()?
.read(cx)
.status_for_buffer_id(for_excerpt.buffer_id, cx)
})
.flatten();
let indicator = multi_buffer
.buffer(for_excerpt.buffer_id)

View File

@@ -0,0 +1,523 @@
use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SelectionEffects, SemanticsProvider};
use buffer_diff::BufferDiff;
use collections::{HashMap, HashSet};
use futures::{channel::mpsc, future::join_all};
use gpui::{App, Entity, EventEmitter, Focusable, Render, Subscription, Task};
use language::{Buffer, BufferEvent, BufferRow, Capability};
use multi_buffer::{ExcerptRange, MultiBuffer};
use project::{InvalidationStrategy, Project, lsp_store::CacheInlayHints};
use smol::stream::StreamExt;
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
use text::{BufferId, ToOffset};
use ui::{ButtonLike, KeyBinding, prelude::*};
use workspace::{
Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
item::SaveOptions, searchable::SearchableItemHandle,
};
pub struct ProposedChangesEditor {
editor: Entity<Editor>,
multibuffer: Entity<MultiBuffer>,
title: SharedString,
buffer_entries: Vec<BufferEntry>,
_recalculate_diffs_task: Task<Option<()>>,
recalculate_diffs_tx: mpsc::UnboundedSender<RecalculateDiff>,
}
pub struct ProposedChangeLocation<T> {
pub buffer: Entity<Buffer>,
pub ranges: Vec<Range<T>>,
}
struct BufferEntry {
base: Entity<Buffer>,
branch: Entity<Buffer>,
_subscription: Subscription,
}
pub struct ProposedChangesEditorToolbar {
current_editor: Option<Entity<ProposedChangesEditor>>,
}
struct RecalculateDiff {
buffer: Entity<Buffer>,
debounce: bool,
}
/// A provider of code semantics for branch buffers.
///
/// Requests in edited regions will return nothing, but requests in unchanged
/// regions will be translated into the base buffer's coordinates.
struct BranchBufferSemanticsProvider(Rc<dyn SemanticsProvider>);
impl ProposedChangesEditor {
pub fn new<T: Clone + ToOffset>(
title: impl Into<SharedString>,
locations: Vec<ProposedChangeLocation<T>>,
project: Option<Entity<Project>>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded();
let mut this = Self {
editor: cx.new(|cx| {
let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, window, cx);
editor.set_expand_all_diff_hunks(cx);
editor.set_completion_provider(None);
editor.clear_code_action_providers();
editor.set_semantics_provider(
editor
.semantics_provider()
.map(|provider| Rc::new(BranchBufferSemanticsProvider(provider)) as _),
);
editor
}),
multibuffer,
title: title.into(),
buffer_entries: Vec::new(),
recalculate_diffs_tx,
_recalculate_diffs_task: cx.spawn_in(window, async move |this, cx| {
let mut buffers_to_diff = HashSet::default();
while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await {
buffers_to_diff.insert(recalculate_diff.buffer);
while recalculate_diff.debounce {
cx.background_executor()
.timer(Duration::from_millis(50))
.await;
let mut had_further_changes = false;
while let Ok(next_recalculate_diff) = recalculate_diffs_rx.try_next() {
let next_recalculate_diff = next_recalculate_diff?;
recalculate_diff.debounce &= next_recalculate_diff.debounce;
buffers_to_diff.insert(next_recalculate_diff.buffer);
had_further_changes = true;
}
if !had_further_changes {
break;
}
}
let recalculate_diff_futures = this
.update(cx, |this, cx| {
buffers_to_diff
.drain()
.filter_map(|buffer| {
let buffer = buffer.read(cx);
let base_buffer = buffer.base_buffer()?;
let buffer = buffer.text_snapshot();
let diff =
this.multibuffer.read(cx).diff_for(buffer.remote_id())?;
Some(diff.update(cx, |diff, cx| {
diff.set_base_text_buffer(base_buffer.clone(), buffer, cx)
}))
})
.collect::<Vec<_>>()
})
.ok()?;
join_all(recalculate_diff_futures).await;
}
None
}),
};
this.reset_locations(locations, window, cx);
this
}
pub fn branch_buffer_for_base(&self, base_buffer: &Entity<Buffer>) -> Option<Entity<Buffer>> {
self.buffer_entries.iter().find_map(|entry| {
if &entry.base == base_buffer {
Some(entry.branch.clone())
} else {
None
}
})
}
pub fn set_title(&mut self, title: SharedString, cx: &mut Context<Self>) {
self.title = title;
cx.notify();
}
pub fn reset_locations<T: Clone + ToOffset>(
&mut self,
locations: Vec<ProposedChangeLocation<T>>,
window: &mut Window,
cx: &mut Context<Self>,
) {
// Undo all branch changes
for entry in &self.buffer_entries {
let base_version = entry.base.read(cx).version();
entry.branch.update(cx, |buffer, cx| {
let undo_counts = buffer
.operations()
.iter()
.filter_map(|(timestamp, _)| {
if !base_version.observed(*timestamp) {
Some((*timestamp, u32::MAX))
} else {
None
}
})
.collect();
buffer.undo_operations(undo_counts, cx);
});
}
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.clear(cx);
});
let mut buffer_entries = Vec::new();
let mut new_diffs = Vec::new();
for location in locations {
let branch_buffer;
if let Some(ix) = self
.buffer_entries
.iter()
.position(|entry| entry.base == location.buffer)
{
let entry = self.buffer_entries.remove(ix);
branch_buffer = entry.branch.clone();
buffer_entries.push(entry);
} else {
branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx));
new_diffs.push(cx.new(|cx| {
let mut diff = BufferDiff::new(&branch_buffer.read(cx).snapshot(), cx);
let _ = diff.set_base_text_buffer(
location.buffer.clone(),
branch_buffer.read(cx).text_snapshot(),
cx,
);
diff
}));
buffer_entries.push(BufferEntry {
branch: branch_buffer.clone(),
base: location.buffer.clone(),
_subscription: cx.subscribe(&branch_buffer, Self::on_buffer_event),
});
}
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.push_excerpts(
branch_buffer,
location
.ranges
.into_iter()
.map(|range| ExcerptRange::new(range)),
cx,
);
});
}
self.buffer_entries = buffer_entries;
self.editor.update(cx, |editor, cx| {
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| {
selections.refresh()
});
editor.buffer.update(cx, |buffer, cx| {
for diff in new_diffs {
buffer.add_diff(diff, cx)
}
})
});
}
pub fn recalculate_all_buffer_diffs(&self) {
for (ix, entry) in self.buffer_entries.iter().enumerate().rev() {
self.recalculate_diffs_tx
.unbounded_send(RecalculateDiff {
buffer: entry.branch.clone(),
debounce: ix > 0,
})
.ok();
}
}
fn on_buffer_event(
&mut self,
buffer: Entity<Buffer>,
event: &BufferEvent,
_cx: &mut Context<Self>,
) {
if let BufferEvent::Operation { .. } = event {
self.recalculate_diffs_tx
.unbounded_send(RecalculateDiff {
buffer,
debounce: true,
})
.ok();
}
}
}
impl Render for ProposedChangesEditor {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
div()
.size_full()
.key_context("ProposedChangesEditor")
.child(self.editor.clone())
}
}
impl Focusable for ProposedChangesEditor {
fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
self.editor.focus_handle(cx)
}
}
impl EventEmitter<EditorEvent> for ProposedChangesEditor {}
impl Item for ProposedChangesEditor {
type Event = EditorEvent;
fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
Some(Icon::new(IconName::Diff))
}
fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
self.title.clone()
}
fn as_searchable(&self, _: &Entity<Self>) -> Option<Box<dyn SearchableItemHandle>> {
Some(Box::new(self.editor.clone()))
}
fn act_as_type<'a>(
&'a self,
type_id: TypeId,
self_handle: &'a Entity<Self>,
_: &'a App,
) -> Option<gpui::AnyView> {
if type_id == TypeId::of::<Self>() {
Some(self_handle.to_any())
} else if type_id == TypeId::of::<Editor>() {
Some(self.editor.to_any())
} else {
None
}
}
fn added_to_workspace(
&mut self,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.editor.update(cx, |editor, cx| {
Item::added_to_workspace(editor, workspace, window, cx)
});
}
fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.editor
.update(cx, |editor, cx| editor.deactivated(window, cx));
}
fn navigate(
&mut self,
data: Box<dyn std::any::Any>,
window: &mut Window,
cx: &mut Context<Self>,
) -> bool {
self.editor
.update(cx, |editor, cx| Item::navigate(editor, data, window, cx))
}
fn set_nav_history(
&mut self,
nav_history: workspace::ItemNavHistory,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.editor.update(cx, |editor, cx| {
Item::set_nav_history(editor, nav_history, window, cx)
});
}
fn can_save(&self, cx: &App) -> bool {
self.editor.read(cx).can_save(cx)
}
fn save(
&mut self,
options: SaveOptions,
project: Entity<Project>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
self.editor.update(cx, |editor, cx| {
Item::save(editor, options, project, window, cx)
})
}
}
impl ProposedChangesEditorToolbar {
pub fn new() -> Self {
Self {
current_editor: None,
}
}
fn get_toolbar_item_location(&self) -> ToolbarItemLocation {
if self.current_editor.is_some() {
ToolbarItemLocation::PrimaryRight
} else {
ToolbarItemLocation::Hidden
}
}
}
impl Render for ProposedChangesEditorToolbar {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All"));
match &self.current_editor {
Some(editor) => {
let focus_handle = editor.focus_handle(cx);
let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx);
button_like.child(keybinding).on_click({
move |_event, window, cx| {
focus_handle.dispatch_action(&ApplyAllDiffHunks, window, cx)
}
})
}
None => button_like.disabled(true),
}
}
}
impl EventEmitter<ToolbarItemEvent> for ProposedChangesEditorToolbar {}
impl ToolbarItemView for ProposedChangesEditorToolbar {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn workspace::ItemHandle>,
_window: &mut Window,
_cx: &mut Context<Self>,
) -> workspace::ToolbarItemLocation {
self.current_editor =
active_pane_item.and_then(|item| item.downcast::<ProposedChangesEditor>());
self.get_toolbar_item_location()
}
}
impl BranchBufferSemanticsProvider {
fn to_base(
&self,
buffer: &Entity<Buffer>,
positions: &[text::Anchor],
cx: &App,
) -> Option<Entity<Buffer>> {
let base_buffer = buffer.read(cx).base_buffer()?;
let version = base_buffer.read(cx).version();
if positions
.iter()
.any(|position| !version.observed(position.timestamp))
{
return None;
}
Some(base_buffer)
}
}
impl SemanticsProvider for BranchBufferSemanticsProvider {
fn hover(
&self,
buffer: &Entity<Buffer>,
position: text::Anchor,
cx: &mut App,
) -> Option<Task<Option<Vec<project::Hover>>>> {
let buffer = self.to_base(buffer, &[position], cx)?;
self.0.hover(&buffer, position, cx)
}
fn applicable_inlay_chunks(
&self,
buffer: &Entity<Buffer>,
ranges: &[Range<text::Anchor>],
cx: &mut App,
) -> Vec<Range<BufferRow>> {
self.0.applicable_inlay_chunks(buffer, ranges, cx)
}
fn invalidate_inlay_hints(&self, for_buffers: &HashSet<BufferId>, cx: &mut App) {
self.0.invalidate_inlay_hints(for_buffers, cx);
}
fn inlay_hints(
&self,
invalidate: InvalidationStrategy,
buffer: Entity<Buffer>,
ranges: Vec<Range<text::Anchor>>,
known_chunks: Option<(clock::Global, HashSet<Range<BufferRow>>)>,
cx: &mut App,
) -> Option<HashMap<Range<BufferRow>, Task<anyhow::Result<CacheInlayHints>>>> {
let positions = ranges
.iter()
.flat_map(|range| [range.start, range.end])
.collect::<Vec<_>>();
let buffer = self.to_base(&buffer, &positions, cx)?;
self.0
.inlay_hints(invalidate, buffer, ranges, known_chunks, cx)
}
fn inline_values(
&self,
_: Entity<Buffer>,
_: Range<text::Anchor>,
_: &mut App,
) -> Option<Task<anyhow::Result<Vec<project::InlayHint>>>> {
None
}
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
if let Some(buffer) = self.to_base(buffer, &[], cx) {
self.0.supports_inlay_hints(&buffer, cx)
} else {
false
}
}
fn document_highlights(
&self,
buffer: &Entity<Buffer>,
position: text::Anchor,
cx: &mut App,
) -> Option<Task<anyhow::Result<Vec<project::DocumentHighlight>>>> {
let buffer = self.to_base(buffer, &[position], cx)?;
self.0.document_highlights(&buffer, position, cx)
}
fn definitions(
&self,
buffer: &Entity<Buffer>,
position: text::Anchor,
kind: crate::GotoDefinitionKind,
cx: &mut App,
) -> Option<Task<anyhow::Result<Option<Vec<project::LocationLink>>>>> {
let buffer = self.to_base(buffer, &[position], cx)?;
self.0.definitions(&buffer, position, kind, cx)
}
fn range_for_rename(
&self,
_: &Entity<Buffer>,
_: text::Anchor,
_: &mut App,
) -> Option<Task<anyhow::Result<Option<Range<text::Anchor>>>>> {
None
}
fn perform_rename(
&self,
_: &Entity<Buffer>,
_: text::Anchor,
_: String,
_: &mut App,
) -> Option<Task<anyhow::Result<project::ProjectTransaction>>> {
None
}
}

View File

@@ -82,8 +82,6 @@ pub struct ExtensionManifest {
#[serde(default)]
pub context_servers: BTreeMap<Arc<str>, ContextServerManifestEntry>,
#[serde(default)]
pub agent_servers: BTreeMap<Arc<str>, AgentServerManifestEntry>,
#[serde(default)]
pub slash_commands: BTreeMap<Arc<str>, SlashCommandManifestEntry>,
#[serde(default)]
pub snippets: Option<PathBuf>,
@@ -140,48 +138,6 @@ pub struct LibManifestEntry {
pub version: Option<SemanticVersion>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct AgentServerManifestEntry {
/// Display name for the agent (shown in menus).
pub name: String,
/// Environment variables to set when launching the agent server.
#[serde(default)]
pub env: HashMap<String, String>,
/// Optional icon path (relative to extension root, e.g., "ai.svg").
/// Should be a small SVG icon for display in menus.
#[serde(default)]
pub icon: Option<String>,
/// Per-target configuration for archive-based installation.
/// The key format is "{os}-{arch}" where:
/// - os: "darwin" (macOS), "linux", "windows"
/// - arch: "aarch64" (arm64), "x86_64"
///
/// Example:
/// ```toml
/// [agent_servers.myagent.targets.darwin-aarch64]
/// archive = "https://example.com/myagent-darwin-arm64.zip"
/// cmd = "./myagent"
/// args = ["--serve"]
/// sha256 = "abc123..." # optional
/// ```
pub targets: HashMap<String, TargetConfig>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct TargetConfig {
/// URL to download the archive from (e.g., "https://github.com/owner/repo/releases/download/v1.0.0/myagent-darwin-arm64.zip")
pub archive: String,
/// Command to run (e.g., "./myagent" or "./myagent.exe")
pub cmd: String,
/// Command-line arguments to pass to the agent server.
#[serde(default)]
pub args: Vec<String>,
/// Optional SHA-256 hash of the archive for verification.
/// If not provided and the URL is a GitHub release, we'll attempt to fetch it from GitHub.
#[serde(default)]
pub sha256: Option<String>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub enum ExtensionLibraryKind {
Rust,
@@ -310,7 +266,6 @@ fn manifest_from_old_manifest(
.collect(),
language_servers: Default::default(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -343,7 +298,6 @@ mod tests {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: vec![],
@@ -450,31 +404,4 @@ mod tests {
);
assert!(manifest.allow_exec("docker", &["ps"]).is_err()); // wrong first arg
}
#[test]
fn parse_manifest_with_agent_server_archive_launcher() {
let toml_src = r#"
id = "example.agent-server-ext"
name = "Agent Server Example"
version = "1.0.0"
schema_version = 0
[agent_servers.foo]
name = "Foo Agent"
[agent_servers.foo.targets.linux-x86_64]
archive = "https://example.com/agent-linux-x64.tar.gz"
cmd = "./agent"
args = ["--serve"]
"#;
let manifest: ExtensionManifest = toml::from_str(toml_src).expect("manifest should parse");
assert_eq!(manifest.id.as_ref(), "example.agent-server-ext");
assert!(manifest.agent_servers.contains_key("foo"));
let entry = manifest.agent_servers.get("foo").unwrap();
assert!(entry.targets.contains_key("linux-x86_64"));
let target = entry.targets.get("linux-x86_64").unwrap();
assert_eq!(target.archive, "https://example.com/agent-linux-x64.tar.gz");
assert_eq!(target.cmd, "./agent");
assert_eq!(target.args, vec!["--serve"]);
}
}

View File

@@ -235,21 +235,6 @@ async fn copy_extension_resources(
.with_context(|| "failed to copy icons")?;
}
for (_, agent_entry) in &manifest.agent_servers {
if let Some(icon_path) = &agent_entry.icon {
let source_icon = extension_path.join(icon_path);
let dest_icon = output_dir.join(icon_path);
// Create parent directory if needed
if let Some(parent) = dest_icon.parent() {
fs::create_dir_all(parent)?;
}
fs::copy(&source_icon, &dest_icon)
.with_context(|| format!("failed to copy agent server icon '{}'", icon_path))?;
}
}
if !manifest.languages.is_empty() {
let output_languages_dir = output_dir.join("languages");
fs::create_dir_all(&output_languages_dir)?;

View File

@@ -132,7 +132,6 @@ fn manifest() -> ExtensionManifest {
.into_iter()
.collect(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: vec![ExtensionCapability::ProcessExec(

View File

@@ -107,7 +107,6 @@ mod tests {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: vec![],

View File

@@ -159,7 +159,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
.collect(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -190,7 +189,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -370,7 +368,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),

View File

@@ -66,7 +66,6 @@ pub fn init(cx: &mut App) {
ExtensionCategoryFilter::ContextServers => {
ExtensionProvides::ContextServers
}
ExtensionCategoryFilter::AgentServers => ExtensionProvides::AgentServers,
ExtensionCategoryFilter::SlashCommands => ExtensionProvides::SlashCommands,
ExtensionCategoryFilter::IndexedDocsProviders => {
ExtensionProvides::IndexedDocsProviders
@@ -190,7 +189,6 @@ fn extension_provides_label(provides: ExtensionProvides) -> &'static str {
ExtensionProvides::Grammars => "Grammars",
ExtensionProvides::LanguageServers => "Language Servers",
ExtensionProvides::ContextServers => "MCP Servers",
ExtensionProvides::AgentServers => "Agent Servers",
ExtensionProvides::SlashCommands => "Slash Commands",
ExtensionProvides::IndexedDocsProviders => "Indexed Docs Providers",
ExtensionProvides::Snippets => "Snippets",

View File

@@ -9,10 +9,7 @@ use git::{
AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository,
GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode,
},
status::{
DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
UnmergedStatus,
},
status::{FileStatus, GitStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task, TaskLabel};
use ignore::gitignore::GitignoreBuilder;
@@ -44,9 +41,6 @@ pub struct FakeGitRepositoryState {
pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
// everything in commit contents is in oids
pub merge_base_contents: HashMap<RepoPath, Oid>,
pub oids: HashMap<Oid, String>,
pub blames: HashMap<RepoPath, Blame>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
@@ -66,8 +60,6 @@ impl FakeGitRepositoryState {
branches: Default::default(),
simulated_index_write_error_message: Default::default(),
refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
merge_base_contents: Default::default(),
oids: Default::default(),
}
}
}
@@ -118,13 +110,6 @@ impl GitRepository for FakeGitRepository {
.boxed()
}
fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
self.with_state_async(false, move |state| {
state.oids.get(&oid).cloned().context("oid does not exist")
})
.boxed()
}
fn load_commit(
&self,
_commit: String,
@@ -155,34 +140,6 @@ impl GitRepository for FakeGitRepository {
None
}
fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
let mut entries = HashMap::default();
self.with_state_async(false, |state| {
for (path, content) in &state.head_contents {
let status = if let Some((oid, original)) = state
.merge_base_contents
.get(path)
.map(|oid| (oid, &state.oids[oid]))
{
if original == content {
continue;
}
TreeDiffStatus::Modified { old: *oid }
} else {
TreeDiffStatus::Added
};
entries.insert(path.clone(), status);
}
for (path, oid) in &state.merge_base_contents {
if !entries.contains_key(path) {
entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
}
}
Ok(TreeDiff { entries })
})
.boxed()
}
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
self.with_state_async(false, |state| {
Ok(revs
@@ -566,7 +523,7 @@ impl GitRepository for FakeGitRepository {
let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
async move {
executor.simulate_random_delay().await;
let oid = git::Oid::random(&mut executor.rng());
let oid = Oid::random(&mut executor.rng());
let entry = fs.entry(&repository_dir_path)?;
checkpoints.lock().insert(oid, entry);
Ok(GitRepositoryCheckpoint { commit_sha: oid })
@@ -622,7 +579,7 @@ impl GitRepository for FakeGitRepository {
}
fn default_branch(&self) -> BoxFuture<'_, Result<Option<SharedString>>> {
async { Ok(Some("main".into())) }.boxed()
unimplemented!()
}
}

View File

@@ -1752,26 +1752,6 @@ impl FakeFs {
.unwrap();
}
pub fn set_merge_base_content_for_repo(
&self,
dot_git: &Path,
contents_by_path: &[(&str, String)],
) {
self.with_git_state(dot_git, true, |state| {
use git::Oid;
state.merge_base_contents.clear();
let oids = (1..)
.map(|n| n.to_string())
.map(|n| Oid::from_bytes(n.repeat(20).as_bytes()).unwrap());
for ((path, content), oid) in contents_by_path.iter().zip(oids) {
state.merge_base_contents.insert(repo_path(path), oid);
state.oids.insert(oid, content.clone());
}
})
.unwrap();
}
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) {
self.with_git_state(dot_git, true, |state| {
state.blames.clear();

View File

@@ -1,6 +1,6 @@
use crate::commit::parse_git_diff_name_status;
use crate::stash::GitStash;
use crate::status::{DiffTreeType, GitStatus, StatusCode, TreeDiff};
use crate::status::{GitStatus, StatusCode};
use crate::{Oid, SHORT_SHA_LENGTH};
use anyhow::{Context as _, Result, anyhow, bail};
use collections::HashMap;
@@ -350,7 +350,6 @@ pub trait GitRepository: Send + Sync {
///
/// Also returns `None` for symlinks.
fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>>;
fn load_blob_content(&self, oid: Oid) -> BoxFuture<'_, Result<String>>;
fn set_index_text(
&self,
@@ -380,7 +379,6 @@ pub trait GitRepository: Send + Sync {
fn merge_message(&self) -> BoxFuture<'_, Option<String>>;
fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>>;
fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>>;
fn stash_entries(&self) -> BoxFuture<'_, Result<GitStash>>;
@@ -910,17 +908,6 @@ impl GitRepository for RealGitRepository {
.boxed()
}
fn load_blob_content(&self, oid: Oid) -> BoxFuture<'_, Result<String>> {
let repo = self.repository.clone();
self.executor
.spawn(async move {
let repo = repo.lock();
let content = repo.find_blob(oid.0)?.content().to_owned();
Ok(String::from_utf8(content)?)
})
.boxed()
}
fn set_index_text(
&self,
path: RepoPath,
@@ -1073,50 +1060,6 @@ impl GitRepository for RealGitRepository {
})
}
fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
let git_binary_path = self.any_git_binary_path.clone();
let working_directory = match self.working_directory() {
Ok(working_directory) => working_directory,
Err(e) => return Task::ready(Err(e)).boxed(),
};
let mut args = vec![
OsString::from("--no-optional-locks"),
OsString::from("diff-tree"),
OsString::from("-r"),
OsString::from("-z"),
OsString::from("--no-renames"),
];
match request {
DiffTreeType::MergeBase { base, head } => {
args.push("--merge-base".into());
args.push(OsString::from(base.as_str()));
args.push(OsString::from(head.as_str()));
}
DiffTreeType::Since { base, head } => {
args.push(OsString::from(base.as_str()));
args.push(OsString::from(head.as_str()));
}
}
self.executor
.spawn(async move {
let output = new_smol_command(&git_binary_path)
.current_dir(working_directory)
.args(args)
.output()
.await?;
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
stdout.parse()
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("git status failed: {stderr}");
}
})
.boxed()
}
fn stash_entries(&self) -> BoxFuture<'_, Result<GitStash>> {
let git_binary_path = self.any_git_binary_path.clone();
let working_directory = self.working_directory();
@@ -1884,23 +1827,13 @@ impl GitRepository for RealGitRepository {
return Ok(output);
}
if let Ok(output) = git.run(&["symbolic-ref", "refs/remotes/origin/HEAD"]).await {
return Ok(output
.strip_prefix("refs/remotes/origin/")
.map(|s| SharedString::from(s.to_owned())));
}
let output = git
.run(&["symbolic-ref", "refs/remotes/origin/HEAD"])
.await?;
if let Ok(default_branch) = git.run(&["config", "init.defaultBranch"]).await {
if git.run(&["rev-parse", &default_branch]).await.is_ok() {
return Ok(Some(default_branch.into()));
}
}
if git.run(&["rev-parse", "master"]).await.is_ok() {
return Ok(Some("master".into()));
}
Ok(None)
Ok(output
.strip_prefix("refs/remotes/origin/")
.map(|s| SharedString::from(s.to_owned())))
})
.boxed()
}

View File

@@ -1,7 +1,5 @@
use crate::{Oid, repository::RepoPath};
use anyhow::{Result, anyhow};
use collections::HashMap;
use gpui::SharedString;
use crate::repository::RepoPath;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::{str::FromStr, sync::Arc};
use util::{ResultExt, rel_path::RelPath};
@@ -192,11 +190,7 @@ impl FileStatus {
}
pub fn is_deleted(self) -> bool {
let FileStatus::Tracked(tracked) = self else {
return false;
};
tracked.index_status == StatusCode::Deleted && tracked.worktree_status != StatusCode::Added
|| tracked.worktree_status == StatusCode::Deleted
matches!(self, FileStatus::Tracked(tracked) if matches!((tracked.index_status, tracked.worktree_status), (StatusCode::Deleted, _) | (_, StatusCode::Deleted)))
}
pub fn is_untracked(self) -> bool {
@@ -492,128 +486,3 @@ impl Default for GitStatus {
}
}
}
pub enum DiffTreeType {
MergeBase {
base: SharedString,
head: SharedString,
},
Since {
base: SharedString,
head: SharedString,
},
}
impl DiffTreeType {
pub fn base(&self) -> &SharedString {
match self {
DiffTreeType::MergeBase { base, .. } => base,
DiffTreeType::Since { base, .. } => base,
}
}
pub fn head(&self) -> &SharedString {
match self {
DiffTreeType::MergeBase { head, .. } => head,
DiffTreeType::Since { head, .. } => head,
}
}
}
#[derive(Debug, PartialEq)]
pub struct TreeDiff {
pub entries: HashMap<RepoPath, TreeDiffStatus>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum TreeDiffStatus {
Added,
Modified { old: Oid },
Deleted { old: Oid },
}
impl FromStr for TreeDiff {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self> {
let mut fields = s.split('\0');
let mut parsed = HashMap::default();
while let Some((status, path)) = fields.next().zip(fields.next()) {
let path = RepoPath(RelPath::unix(path)?.into());
let mut fields = status.split(" ").skip(2);
let old_sha = fields
.next()
.ok_or_else(|| anyhow!("expected to find old_sha"))?
.to_owned()
.parse()?;
let _new_sha = fields
.next()
.ok_or_else(|| anyhow!("expected to find new_sha"))?;
let status = fields
.next()
.and_then(|s| {
if s.len() == 1 {
s.as_bytes().first()
} else {
None
}
})
.ok_or_else(|| anyhow!("expected to find status"))?;
let result = match StatusCode::from_byte(*status)? {
StatusCode::Modified => TreeDiffStatus::Modified { old: old_sha },
StatusCode::Added => TreeDiffStatus::Added,
StatusCode::Deleted => TreeDiffStatus::Deleted { old: old_sha },
_status => continue,
};
parsed.insert(path, result);
}
Ok(Self { entries: parsed })
}
}
#[cfg(test)]
mod tests {
use crate::{
repository::RepoPath,
status::{TreeDiff, TreeDiffStatus},
};
#[test]
fn test_tree_diff_parsing() {
let input = ":000000 100644 0000000000000000000000000000000000000000 0062c311b8727c3a2e3cd7a41bc9904feacf8f98 A\x00.zed/settings.json\x00".to_owned() +
":100644 000000 bb3e9ed2e97a8c02545bae243264d342c069afb3 0000000000000000000000000000000000000000 D\x00README.md\x00" +
":100644 100644 42f097005a1f21eb2260fad02ec8c991282beee8 a437d85f63bb8c62bd78f83f40c506631fabf005 M\x00parallel.go\x00";
let output: TreeDiff = input.parse().unwrap();
assert_eq!(
output,
TreeDiff {
entries: [
(
RepoPath::new(".zed/settings.json").unwrap(),
TreeDiffStatus::Added,
),
(
RepoPath::new("README.md").unwrap(),
TreeDiffStatus::Deleted {
old: "bb3e9ed2e97a8c02545bae243264d342c069afb3".parse().unwrap()
}
),
(
RepoPath::new("parallel.go").unwrap(),
TreeDiffStatus::Modified {
old: "42f097005a1f21eb2260fad02ec8c991282beee8".parse().unwrap(),
}
),
]
.into_iter()
.collect()
}
)
}
}

View File

@@ -44,6 +44,7 @@ multi_buffer.workspace = true
notifications.workspace = true
panel.workspace = true
picker.workspace = true
postage.workspace = true
project.workspace = true
schemars.workspace = true
serde.workspace = true

View File

@@ -4,15 +4,16 @@ use crate::{
git_panel_settings::GitPanelSettings,
remote_button::{render_publish_button, render_push_button},
};
use anyhow::{Context as _, Result, anyhow};
use anyhow::Result;
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus};
use collections::{HashMap, HashSet};
use editor::{
Addon, Editor, EditorEvent, SelectionEffects,
Editor, EditorEvent, SelectionEffects,
actions::{GoToHunk, GoToPreviousHunk},
multibuffer_context_lines,
scroll::Autoscroll,
};
use futures::StreamExt;
use git::{
Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus},
@@ -26,23 +27,18 @@ use language::{Anchor, Buffer, Capability, OffsetRangeExt};
use multi_buffer::{MultiBuffer, PathKey};
use project::{
Project, ProjectPath,
git_store::{
Repository,
branch_diff::{self, BranchDiffEvent, DiffBase},
},
git_store::{GitStore, GitStoreEvent, Repository, RepositoryEvent},
};
use settings::{Settings, SettingsStore};
use std::any::{Any, TypeId};
use std::ops::Range;
use std::sync::Arc;
use theme::ActiveTheme;
use ui::{KeyBinding, Tooltip, prelude::*, vertical_divider};
use util::{ResultExt as _, rel_path::RelPath};
use util::ResultExt as _;
use workspace::{
CloseActiveItem, ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
item::{BreadcrumbText, Item, ItemEvent, ItemHandle, SaveOptions, TabContentParams},
notifications::NotifyTaskExt,
searchable::SearchableItemHandle,
};
@@ -52,24 +48,30 @@ actions!(
/// Shows the diff between the working directory and the index.
Diff,
/// Adds files to the git staging area.
Add,
/// Shows the diff between the working directory and your default
/// branch (typically main or master).
BranchDiff
Add
]
);
pub struct ProjectDiff {
project: Entity<Project>,
multibuffer: Entity<MultiBuffer>,
branch_diff: Entity<branch_diff::BranchDiff>,
editor: Entity<Editor>,
buffer_diff_subscriptions: HashMap<Arc<RelPath>, (Entity<BufferDiff>, Subscription)>,
git_store: Entity<GitStore>,
buffer_diff_subscriptions: HashMap<RepoPath, (Entity<BufferDiff>, Subscription)>,
workspace: WeakEntity<Workspace>,
focus_handle: FocusHandle,
update_needed: postage::watch::Sender<()>,
pending_scroll: Option<PathKey>,
_task: Task<Result<()>>,
_subscription: Subscription,
_git_store_subscription: Subscription,
}
#[derive(Debug)]
struct DiffBuffer {
path_key: PathKey,
buffer: Entity<Buffer>,
diff: Entity<BufferDiff>,
file_status: FileStatus,
}
const CONFLICT_SORT_PREFIX: u64 = 1;
@@ -79,7 +81,6 @@ const NEW_SORT_PREFIX: u64 = 3;
impl ProjectDiff {
pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context<Workspace>) {
workspace.register_action(Self::deploy);
workspace.register_action(Self::deploy_branch_diff);
workspace.register_action(|workspace, _: &Add, window, cx| {
Self::deploy(workspace, &Diff, window, cx);
});
@@ -95,40 +96,6 @@ impl ProjectDiff {
Self::deploy_at(workspace, None, window, cx)
}
fn deploy_branch_diff(
workspace: &mut Workspace,
_: &BranchDiff,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
telemetry::event!("Git Branch Diff Opened");
let project = workspace.project().clone();
let existing = workspace
.items_of_type::<Self>(cx)
.find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Merge { .. }));
if let Some(existing) = existing {
workspace.activate_item(&existing, true, true, window, cx);
return;
}
let workspace = cx.entity();
window
.spawn(cx, async move |cx| {
let this = cx
.update(|window, cx| {
Self::new_with_default_branch(project, workspace.clone(), window, cx)
})?
.await?;
workspace
.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(Box::new(this), None, true, window, cx);
})
.ok();
anyhow::Ok(())
})
.detach_and_notify_err(window, cx);
}
pub fn deploy_at(
workspace: &mut Workspace,
entry: Option<GitStatusEntry>,
@@ -143,10 +110,7 @@ impl ProjectDiff {
"Action"
}
);
let existing = workspace
.items_of_type::<Self>(cx)
.find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Head));
let project_diff = if let Some(existing) = existing {
let project_diff = if let Some(existing) = workspace.item_of_type::<Self>(cx) {
workspace.activate_item(&existing, true, true, window, cx);
existing
} else {
@@ -175,54 +139,11 @@ impl ProjectDiff {
})
}
fn new_with_default_branch(
project: Entity<Project>,
workspace: Entity<Workspace>,
window: &mut Window,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
let Some(repo) = project.read(cx).git_store().read(cx).active_repository() else {
return Task::ready(Err(anyhow!("No active repository")));
};
let main_branch = repo.update(cx, |repo, _| repo.default_branch());
window.spawn(cx, async move |cx| {
let main_branch = main_branch
.await??
.context("Could not determine default branch")?;
let branch_diff = cx.new_window_entity(|window, cx| {
branch_diff::BranchDiff::new(
DiffBase::Merge {
base_ref: main_branch,
},
project.clone(),
window,
cx,
)
})?;
cx.new_window_entity(|window, cx| {
Self::new_impl(branch_diff, project, workspace, window, cx)
})
})
}
fn new(
project: Entity<Project>,
workspace: Entity<Workspace>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let branch_diff =
cx.new(|cx| branch_diff::BranchDiff::new(DiffBase::Head, project.clone(), window, cx));
Self::new_impl(branch_diff, project, workspace, window, cx)
}
fn new_impl(
branch_diff: Entity<branch_diff::BranchDiff>,
project: Entity<Project>,
workspace: Entity<Workspace>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let focus_handle = cx.focus_handle();
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
@@ -232,25 +153,9 @@ impl ProjectDiff {
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
diff_display_editor.disable_diagnostics(cx);
diff_display_editor.set_expand_all_diff_hunks(cx);
match branch_diff.read(cx).diff_base() {
DiffBase::Head => {
diff_display_editor.register_addon(GitPanelAddon {
workspace: workspace.downgrade(),
});
}
DiffBase::Merge { .. } => {
diff_display_editor.register_addon(BranchDiffAddon {
branch_diff: branch_diff.clone(),
});
diff_display_editor.start_temporary_diff_override();
diff_display_editor.set_render_diff_hunk_controls(
Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()),
cx,
);
//
}
}
diff_display_editor.register_addon(GitPanelAddon {
workspace: workspace.downgrade(),
});
diff_display_editor
});
window.defer(cx, {
@@ -267,71 +172,71 @@ impl ProjectDiff {
cx.subscribe_in(&editor, window, Self::handle_editor_event)
.detach();
let branch_diff_subscription = cx.subscribe_in(
&branch_diff,
let git_store = project.read(cx).git_store().clone();
let git_store_subscription = cx.subscribe_in(
&git_store,
window,
move |this, _git_store, event, window, cx| match event {
BranchDiffEvent::FileListChanged => {
this._task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
move |this, _git_store, event, _window, _cx| match event {
GitStoreEvent::ActiveRepositoryChanged(_)
| GitStoreEvent::RepositoryUpdated(
_,
RepositoryEvent::StatusesChanged { full_scan: _ },
true,
)
| GitStoreEvent::ConflictsUpdated => {
*this.update_needed.borrow_mut() = ();
}
_ => {}
},
);
let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
let mut was_collapse_untracked_diff =
GitPanelSettings::get_global(cx).collapse_untracked_diff;
cx.observe_global_in::<SettingsStore>(window, move |this, window, cx| {
cx.observe_global::<SettingsStore>(move |this, cx| {
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
let is_collapse_untracked_diff =
GitPanelSettings::get_global(cx).collapse_untracked_diff;
if is_sort_by_path != was_sort_by_path
|| is_collapse_untracked_diff != was_collapse_untracked_diff
{
this._task = {
window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
}
*this.update_needed.borrow_mut() = ();
}
was_sort_by_path = is_sort_by_path;
was_collapse_untracked_diff = is_collapse_untracked_diff;
})
.detach();
let task = window.spawn(cx, {
let (mut send, recv) = postage::watch::channel::<()>();
let worker = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
async |cx| Self::handle_status_updates(this, recv, cx).await
});
// Kick off a refresh immediately
*send.borrow_mut() = ();
Self {
project,
git_store: git_store.clone(),
workspace: workspace.downgrade(),
branch_diff,
focus_handle,
editor,
multibuffer,
buffer_diff_subscriptions: Default::default(),
pending_scroll: None,
_task: task,
_subscription: branch_diff_subscription,
update_needed: send,
_task: worker,
_git_store_subscription: git_store_subscription,
}
}
pub fn diff_base<'a>(&'a self, cx: &'a App) -> &'a DiffBase {
self.branch_diff.read(cx).diff_base()
}
pub fn move_to_entry(
&mut self,
entry: GitStatusEntry,
window: &mut Window,
cx: &mut Context<Self>,
) {
let Some(git_repo) = self.branch_diff.read(cx).repo() else {
let Some(git_repo) = self.git_store.read(cx).active_repository() else {
return;
};
let repo = git_repo.read(cx);
@@ -461,28 +366,77 @@ impl ProjectDiff {
}
}
fn load_buffers(&mut self, cx: &mut Context<Self>) -> Vec<Task<Result<DiffBuffer>>> {
let Some(repo) = self.git_store.read(cx).active_repository() else {
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.clear(cx);
});
self.buffer_diff_subscriptions.clear();
return vec![];
};
let mut previous_paths = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
let mut result = vec![];
repo.update(cx, |repo, cx| {
for entry in repo.cached_status() {
if !entry.status.has_changes() {
continue;
}
let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path, cx)
else {
continue;
};
let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
previous_paths.remove(&path_key);
let load_buffer = self
.project
.update(cx, |project, cx| project.open_buffer(project_path, cx));
let project = self.project.clone();
result.push(cx.spawn(async move |_, cx| {
let buffer = load_buffer.await?;
let changes = project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?;
Ok(DiffBuffer {
path_key,
buffer,
diff: changes,
file_status: entry.status,
})
}));
}
});
self.multibuffer.update(cx, |multibuffer, cx| {
for path in previous_paths {
self.buffer_diff_subscriptions
.remove(&path.path.clone().into());
multibuffer.remove_excerpts_for_path(path, cx);
}
});
result
}
fn register_buffer(
&mut self,
path_key: PathKey,
file_status: FileStatus,
buffer: Entity<Buffer>,
diff: Entity<BufferDiff>,
diff_buffer: DiffBuffer,
window: &mut Window,
cx: &mut Context<Self>,
) {
if self.branch_diff.read(cx).diff_base().is_merge_base() {
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.add_diff(diff.clone(), cx);
});
}
let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| {
this._task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
let path_key = diff_buffer.path_key.clone();
let buffer = diff_buffer.buffer.clone();
let diff = diff_buffer.diff.clone();
let subscription = cx.subscribe(&diff, move |this, _, _, _| {
*this.update_needed.borrow_mut() = ();
});
self.buffer_diff_subscriptions
.insert(path_key.path.clone(), (diff.clone(), subscription));
.insert(path_key.path.clone().into(), (diff.clone(), subscription));
let conflict_addon = self
.editor
@@ -526,8 +480,8 @@ impl ProjectDiff {
});
}
if is_excerpt_newly_added
&& (file_status.is_deleted()
|| (file_status.is_untracked()
&& (diff_buffer.file_status.is_deleted()
|| (diff_buffer.file_status.is_untracked()
&& GitPanelSettings::get_global(cx).collapse_untracked_diff))
{
editor.fold_buffer(snapshot.text.remote_id(), cx)
@@ -552,51 +506,26 @@ impl ProjectDiff {
}
}
pub async fn refresh(this: WeakEntity<Self>, cx: &mut AsyncWindowContext) -> Result<()> {
let mut path_keys = Vec::new();
let buffers_to_load = this.update(cx, |this, cx| {
let (repo, buffers_to_load) = this.branch_diff.update(cx, |branch_diff, cx| {
let load_buffers = branch_diff.load_buffers(cx);
(branch_diff.repo().cloned(), load_buffers)
});
let mut previous_paths = this.multibuffer.read(cx).paths().collect::<HashSet<_>>();
if let Some(repo) = repo {
let repo = repo.read(cx);
path_keys = Vec::with_capacity(buffers_to_load.len());
for entry in buffers_to_load.iter() {
let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx);
let path_key =
PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
previous_paths.remove(&path_key);
path_keys.push(path_key)
pub async fn handle_status_updates(
this: WeakEntity<Self>,
mut recv: postage::watch::Receiver<()>,
cx: &mut AsyncWindowContext,
) -> Result<()> {
while (recv.next().await).is_some() {
let buffers_to_load = this.update(cx, |this, cx| this.load_buffers(cx))?;
for buffer_to_load in buffers_to_load {
if let Some(buffer) = buffer_to_load.await.log_err() {
cx.update(|window, cx| {
this.update(cx, |this, cx| this.register_buffer(buffer, window, cx))
.ok();
})?;
}
}
this.multibuffer.update(cx, |multibuffer, cx| {
for path in previous_paths {
this.buffer_diff_subscriptions.remove(&path.path);
multibuffer.remove_excerpts_for_path(path, cx);
}
});
buffers_to_load
})?;
for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) {
if let Some((buffer, diff)) = entry.load.await.log_err() {
cx.update(|window, cx| {
this.update(cx, |this, cx| {
this.register_buffer(path_key, entry.file_status, buffer, diff, window, cx)
})
.ok();
})?;
}
this.update(cx, |this, cx| {
this.pending_scroll.take();
cx.notify();
})?;
}
this.update(cx, |this, cx| {
this.pending_scroll.take();
cx.notify();
})?;
Ok(())
}
@@ -665,8 +594,8 @@ impl Item for ProjectDiff {
Some("Project Diff".into())
}
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
Label::new(self.tab_content_text(0, cx))
fn tab_content(&self, params: TabContentParams, _window: &Window, _: &App) -> AnyElement {
Label::new("Uncommitted Changes")
.color(if params.selected {
Color::Default
} else {
@@ -675,11 +604,8 @@ impl Item for ProjectDiff {
.into_any_element()
}
fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString {
match self.branch_diff.read(cx).diff_base() {
DiffBase::Head => "Uncommitted Changes".into(),
DiffBase::Merge { base_ref } => format!("Changes since {}", base_ref).into(),
}
fn tab_content_text(&self, _detail: usize, _: &App) -> SharedString {
"Uncommitted Changes".into()
}
fn telemetry_event_text(&self) -> Option<&'static str> {
@@ -876,47 +802,30 @@ impl SerializableItem for ProjectDiff {
}
fn deserialize(
project: Entity<Project>,
_project: Entity<Project>,
workspace: WeakEntity<Workspace>,
workspace_id: workspace::WorkspaceId,
item_id: workspace::ItemId,
_workspace_id: workspace::WorkspaceId,
_item_id: workspace::ItemId,
window: &mut Window,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
window.spawn(cx, async move |cx| {
let diff_base = persistence::PROJECT_DIFF_DB.get_diff_base(item_id, workspace_id)?;
let diff = cx.update(|window, cx| {
let branch_diff = cx
.new(|cx| branch_diff::BranchDiff::new(diff_base, project.clone(), window, cx));
let workspace = workspace.upgrade().context("workspace gone")?;
anyhow::Ok(
cx.new(|cx| ProjectDiff::new_impl(branch_diff, project, workspace, window, cx)),
)
})??;
Ok(diff)
workspace.update_in(cx, |workspace, window, cx| {
let workspace_handle = cx.entity();
cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx))
})
})
}
fn serialize(
&mut self,
workspace: &mut Workspace,
item_id: workspace::ItemId,
_workspace: &mut Workspace,
_item_id: workspace::ItemId,
_closing: bool,
_window: &mut Window,
cx: &mut Context<Self>,
_cx: &mut Context<Self>,
) -> Option<Task<Result<()>>> {
let workspace_id = workspace.database_id()?;
let diff_base = self.diff_base(cx).clone();
Some(cx.background_spawn({
async move {
persistence::PROJECT_DIFF_DB
.save_diff_base(item_id, workspace_id, diff_base.clone())
.await
}
}))
None
}
fn should_serialize(&self, _: &Self::Event) -> bool {
@@ -924,80 +833,6 @@ impl SerializableItem for ProjectDiff {
}
}
mod persistence {
use anyhow::Context as _;
use db::{
sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
sqlez_macros::sql,
};
use project::git_store::branch_diff::DiffBase;
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
pub struct ProjectDiffDb(ThreadSafeConnection);
impl Domain for ProjectDiffDb {
const NAME: &str = stringify!(ProjectDiffDb);
const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE project_diffs(
workspace_id INTEGER,
item_id INTEGER UNIQUE,
diff_base TEXT,
PRIMARY KEY(workspace_id, item_id),
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
)];
}
db::static_connection!(PROJECT_DIFF_DB, ProjectDiffDb, [WorkspaceDb]);
impl ProjectDiffDb {
pub async fn save_diff_base(
&self,
item_id: ItemId,
workspace_id: WorkspaceId,
diff_base: DiffBase,
) -> anyhow::Result<()> {
self.write(move |connection| {
let sql_stmt = sql!(
INSERT OR REPLACE INTO project_diffs(item_id, workspace_id, diff_base) VALUES (?, ?, ?)
);
let diff_base_str = serde_json::to_string(&diff_base)?;
let mut query = connection.exec_bound::<(ItemId, WorkspaceId, String)>(sql_stmt)?;
query((item_id, workspace_id, diff_base_str)).context(format!(
"exec_bound failed to execute or parse for: {}",
sql_stmt
))
})
.await
}
pub fn get_diff_base(
&self,
item_id: ItemId,
workspace_id: WorkspaceId,
) -> anyhow::Result<DiffBase> {
let sql_stmt =
sql!(SELECT diff_base FROM project_diffs WHERE item_id = ?AND workspace_id = ?);
let diff_base_str = self.select_row_bound::<(ItemId, WorkspaceId), String>(sql_stmt)?(
(item_id, workspace_id),
)
.context(::std::format!(
"Error in get_diff_base, select_row_bound failed to execute or parse for: {}",
sql_stmt
))?;
let Some(diff_base_str) = diff_base_str else {
return Ok(DiffBase::Head);
};
serde_json::from_str(&diff_base_str).context("deserializing diff base")
}
}
}
pub struct ProjectDiffToolbar {
project_diff: Option<WeakEntity<ProjectDiff>>,
workspace: WeakEntity<Workspace>,
@@ -1062,7 +897,6 @@ impl ToolbarItemView for ProjectDiffToolbar {
) -> ToolbarItemLocation {
self.project_diff = active_pane_item
.and_then(|item| item.act_as::<ProjectDiff>(cx))
.filter(|item| item.read(cx).diff_base(cx) == &DiffBase::Head)
.map(|entity| entity.downgrade());
if self.project_diff.is_some() {
ToolbarItemLocation::PrimaryRight
@@ -1532,42 +1366,18 @@ fn merge_anchor_ranges<'a>(
})
}
struct BranchDiffAddon {
branch_diff: Entity<branch_diff::BranchDiff>,
}
impl Addon for BranchDiffAddon {
fn to_any(&self) -> &dyn std::any::Any {
self
}
fn override_status_for_buffer_id(
&self,
buffer_id: language::BufferId,
cx: &App,
) -> Option<FileStatus> {
self.branch_diff
.read(cx)
.status_for_buffer_id(buffer_id, cx)
}
}
#[cfg(test)]
mod tests {
use collections::HashMap;
use db::indoc;
use editor::test::editor_test_context::{EditorTestContext, assert_state_with_diff};
use git::status::{TrackedStatus, UnmergedStatus, UnmergedStatusCode};
use git::status::{UnmergedStatus, UnmergedStatusCode};
use gpui::TestAppContext;
use project::FakeFs;
use serde_json::json;
use settings::SettingsStore;
use std::path::Path;
use unindent::Unindent as _;
use util::{
path,
rel_path::{RelPath, rel_path},
};
use util::{path, rel_path::rel_path};
use super::*;
@@ -2205,99 +2015,6 @@ mod tests {
);
}
#[gpui::test]
async fn test_branch_diff(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/project"),
json!({
".git": {},
"a.txt": "C",
"b.txt": "new",
"c.txt": "in-merge-base-and-work-tree",
"d.txt": "created-in-head",
}),
)
.await;
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
let diff = cx
.update(|window, cx| {
ProjectDiff::new_with_default_branch(project.clone(), workspace, window, cx)
})
.await
.unwrap();
cx.run_until_parked();
fs.set_head_for_repo(
Path::new(path!("/project/.git")),
&[("a.txt", "B".into()), ("d.txt", "created-in-head".into())],
"sha",
);
// fs.set_index_for_repo(dot_git, index_state);
fs.set_merge_base_content_for_repo(
Path::new(path!("/project/.git")),
&[
("a.txt", "A".into()),
("c.txt", "in-merge-base-and-work-tree".into()),
],
);
cx.run_until_parked();
let editor = diff.read_with(cx, |diff, _| diff.editor.clone());
assert_state_with_diff(
&editor,
cx,
&"
- A
+ ˇC
+ new
+ created-in-head"
.unindent(),
);
let statuses: HashMap<Arc<RelPath>, Option<FileStatus>> =
editor.update(cx, |editor, cx| {
editor
.buffer()
.read(cx)
.all_buffers()
.iter()
.map(|buffer| {
(
buffer.read(cx).file().unwrap().path().clone(),
editor.status_for_buffer_id(buffer.read(cx).remote_id(), cx),
)
})
.collect()
});
assert_eq!(
statuses,
HashMap::from_iter([
(
rel_path("a.txt").into_arc(),
Some(FileStatus::Tracked(TrackedStatus {
index_status: git::status::StatusCode::Modified,
worktree_status: git::status::StatusCode::Modified
}))
),
(rel_path("b.txt").into_arc(), Some(FileStatus::Untracked)),
(
rel_path("d.txt").into_arc(),
Some(FileStatus::Tracked(TrackedStatus {
index_status: git::status::StatusCode::Added,
worktree_status: git::status::StatusCode::Added
}))
)
])
);
}
#[gpui::test]
async fn test_update_on_uncommit(cx: &mut TestAppContext) {
init_test(cx);

View File

@@ -17,7 +17,6 @@ pub fn make_file_finder_include_ignored_an_enum(value: &mut Value) -> Result<()>
Value::Bool(true) => Value::String("all".to_string()),
Value::Bool(false) => Value::String("indexed".to_string()),
Value::Null => Value::String("smart".to_string()),
Value::String(s) if s == "all" || s == "indexed" || s == "smart" => return Ok(()),
_ => anyhow::bail!("Expected include_ignored to be a boolean or null"),
};
Ok(())

View File

@@ -366,13 +366,7 @@ mod tests {
#[track_caller]
fn assert_migrate_settings(input: &str, output: Option<&str>) {
let migrated = migrate_settings(input).unwrap();
assert_migrated_correctly(migrated.clone(), output);
// expect that rerunning the migration does not result in another migration
if let Some(migrated) = migrated {
let rerun = migrate_settings(&migrated).unwrap();
assert_migrated_correctly(rerun, None);
}
assert_migrated_correctly(migrated, output);
}
#[track_caller]
@@ -382,13 +376,7 @@ mod tests {
output: Option<&str>,
) {
let migrated = run_migrations(input, migrations).unwrap();
assert_migrated_correctly(migrated.clone(), output);
// expect that rerunning the migration does not result in another migration
if let Some(migrated) = migrated {
let rerun = run_migrations(&migrated, migrations).unwrap();
assert_migrated_correctly(rerun, None);
}
assert_migrated_correctly(migrated, output);
}
#[test]

View File

@@ -72,6 +72,7 @@ serde_json.workspace = true
settings.workspace = true
sha2.workspace = true
shellexpand.workspace = true
shlex.workspace = true
smallvec.workspace = true
smol.workspace = true
snippet.workspace = true

View File

@@ -1,7 +1,6 @@
use std::{
any::Any,
borrow::Borrow,
collections::HashSet,
path::{Path, PathBuf},
str::FromStr as _,
sync::Arc,
@@ -127,198 +126,13 @@ enum AgentServerStoreState {
pub struct AgentServerStore {
state: AgentServerStoreState,
external_agents: HashMap<ExternalAgentServerName, Box<dyn ExternalAgentServer>>,
agent_icons: HashMap<ExternalAgentServerName, SharedString>,
}
pub struct AgentServersUpdated;
impl EventEmitter<AgentServersUpdated> for AgentServerStore {}
#[cfg(test)]
mod ext_agent_tests {
use super::*;
use std::fmt::Write as _;
// Helper to build a store in Collab mode so we can mutate internal maps without
// needing to spin up a full project environment.
fn collab_store() -> AgentServerStore {
AgentServerStore {
state: AgentServerStoreState::Collab,
external_agents: HashMap::default(),
agent_icons: HashMap::default(),
}
}
// A simple fake that implements ExternalAgentServer without needing async plumbing.
struct NoopExternalAgent;
impl ExternalAgentServer for NoopExternalAgent {
fn get_command(
&mut self,
_root_dir: Option<&str>,
_extra_env: HashMap<String, String>,
_status_tx: Option<watch::Sender<SharedString>>,
_new_version_available_tx: Option<watch::Sender<Option<String>>>,
_cx: &mut AsyncApp,
) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
Task::ready(Ok((
AgentServerCommand {
path: PathBuf::from("noop"),
args: Vec::new(),
env: None,
},
"".to_string(),
None,
)))
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
#[test]
fn external_agent_server_name_display() {
let name = ExternalAgentServerName(SharedString::from("Ext: Tool"));
let mut s = String::new();
write!(&mut s, "{name}").unwrap();
assert_eq!(s, "Ext: Tool");
}
#[test]
fn sync_extension_agents_removes_previous_extension_entries() {
let mut store = collab_store();
// Seed with a couple of agents that will be replaced by extensions
store.external_agents.insert(
ExternalAgentServerName(SharedString::from("foo-agent")),
Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
);
store.external_agents.insert(
ExternalAgentServerName(SharedString::from("bar-agent")),
Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
);
store.external_agents.insert(
ExternalAgentServerName(SharedString::from("custom")),
Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
);
// Simulate the removal phase: if we're syncing extensions that provide
// "foo-agent" and "bar-agent", those should be removed first
let extension_agent_names: HashSet<String> =
["foo-agent".to_string(), "bar-agent".to_string()]
.into_iter()
.collect();
let keys_to_remove: Vec<_> = store
.external_agents
.keys()
.filter(|name| extension_agent_names.contains(name.0.as_ref()))
.cloned()
.collect();
for key in keys_to_remove {
store.external_agents.remove(&key);
}
// Only the custom entry should remain.
let remaining: Vec<_> = store
.external_agents
.keys()
.map(|k| k.0.to_string())
.collect();
assert_eq!(remaining, vec!["custom".to_string()]);
}
}
impl AgentServerStore {
/// Synchronizes extension-provided agent servers with the store.
pub fn sync_extension_agents<'a, I>(
&mut self,
manifests: I,
extensions_dir: PathBuf,
cx: &mut Context<Self>,
) where
I: IntoIterator<Item = (&'a str, &'a extension::ExtensionManifest)>,
{
// Collect manifests first so we can iterate twice
let manifests: Vec<_> = manifests.into_iter().collect();
// Remove existing extension-provided agents by tracking which ones we're about to add
let extension_agent_names: HashSet<_> = manifests
.iter()
.flat_map(|(_, manifest)| manifest.agent_servers.keys().map(|k| k.to_string()))
.collect();
let keys_to_remove: Vec<_> = self
.external_agents
.keys()
.filter(|name| {
// Remove if it matches an extension agent name from any extension
extension_agent_names.contains(name.0.as_ref())
})
.cloned()
.collect();
for key in &keys_to_remove {
self.external_agents.remove(key);
self.agent_icons.remove(key);
}
// Insert agent servers from extension manifests
match &self.state {
AgentServerStoreState::Local {
project_environment,
fs,
http_client,
..
} => {
for (ext_id, manifest) in manifests {
for (agent_name, agent_entry) in &manifest.agent_servers {
let display = SharedString::from(agent_entry.name.clone());
// Store absolute icon path if provided, resolving symlinks for dev extensions
if let Some(icon) = &agent_entry.icon {
let icon_path = extensions_dir.join(ext_id).join(icon);
// Canonicalize to resolve symlinks (dev extensions are symlinked)
let absolute_icon_path = icon_path
.canonicalize()
.unwrap_or(icon_path)
.to_string_lossy()
.to_string();
self.agent_icons.insert(
ExternalAgentServerName(display.clone()),
SharedString::from(absolute_icon_path),
);
}
// Archive-based launcher (download from URL)
self.external_agents.insert(
ExternalAgentServerName(display),
Box::new(LocalExtensionArchiveAgent {
fs: fs.clone(),
http_client: http_client.clone(),
project_environment: project_environment.clone(),
extension_id: Arc::from(ext_id),
agent_id: agent_name.clone(),
targets: agent_entry.targets.clone(),
env: agent_entry.env.clone(),
}) as Box<dyn ExternalAgentServer>,
);
}
}
}
_ => {
// Only local projects support local extension agents
}
}
cx.emit(AgentServersUpdated);
}
pub fn agent_icon(&self, name: &ExternalAgentServerName) -> Option<SharedString> {
self.agent_icons.get(name).cloned()
}
pub fn init_remote(session: &AnyProtoClient) {
session.add_entity_message_handler(Self::handle_external_agents_updated);
session.add_entity_message_handler(Self::handle_loading_status_updated);
@@ -388,7 +202,7 @@ impl AgentServerStore {
.gemini
.as_ref()
.and_then(|settings| settings.ignore_system_version)
.unwrap_or(false),
.unwrap_or(true),
}),
);
self.external_agents.insert(
@@ -465,9 +279,7 @@ impl AgentServerStore {
_subscriptions: [subscription],
},
external_agents: Default::default(),
agent_icons: Default::default(),
};
if let Some(_events) = extension::ExtensionEvents::try_global(cx) {}
this.agent_servers_settings_changed(cx);
this
}
@@ -476,7 +288,7 @@ impl AgentServerStore {
// Set up the builtin agents here so they're immediately available in
// remote projects--we know that the HeadlessProject on the other end
// will have them.
let external_agents: [(ExternalAgentServerName, Box<dyn ExternalAgentServer>); 3] = [
let external_agents = [
(
CLAUDE_CODE_NAME.into(),
Box::new(RemoteExternalAgentServer {
@@ -507,15 +319,16 @@ impl AgentServerStore {
new_version_available_tx: None,
}) as Box<dyn ExternalAgentServer>,
),
];
]
.into_iter()
.collect();
Self {
state: AgentServerStoreState::Remote {
project_id,
upstream_client,
},
external_agents: external_agents.into_iter().collect(),
agent_icons: HashMap::default(),
external_agents,
}
}
@@ -523,7 +336,6 @@ impl AgentServerStore {
Self {
state: AgentServerStoreState::Collab,
external_agents: Default::default(),
agent_icons: Default::default(),
}
}
@@ -580,7 +392,7 @@ impl AgentServerStore {
envelope: TypedEnvelope<proto::GetAgentServerCommand>,
mut cx: AsyncApp,
) -> Result<proto::AgentServerCommand> {
let (command, root_dir, login_command) = this
let (command, root_dir, login) = this
.update(&mut cx, |this, cx| {
let AgentServerStoreState::Local {
downstream_client, ..
@@ -654,7 +466,7 @@ impl AgentServerStore {
.map(|env| env.into_iter().collect())
.unwrap_or_default(),
root_dir: root_dir,
login: login_command.map(|cmd| cmd.to_proto()),
login: login.map(|login| login.to_proto()),
})
}
@@ -999,7 +811,9 @@ impl ExternalAgentServer for RemoteExternalAgentServer {
env: Some(command.env),
},
root_dir,
None,
response
.login
.map(|login| task::SpawnInTerminal::from_proto(login)),
))
})
}
@@ -1145,7 +959,7 @@ impl ExternalAgentServer for LocalClaudeCode {
.unwrap_or_default();
env.insert("ANTHROPIC_API_KEY".into(), "".into());
let (mut command, login_command) = if let Some(mut custom_command) = custom_command {
let (mut command, login) = if let Some(mut custom_command) = custom_command {
env.extend(custom_command.env.unwrap_or_default());
custom_command.env = Some(env);
(custom_command, None)
@@ -1186,11 +1000,7 @@ impl ExternalAgentServer for LocalClaudeCode {
};
command.env.get_or_insert_default().extend(extra_env);
Ok((
command,
root_dir.to_string_lossy().into_owned(),
login_command,
))
Ok((command, root_dir.to_string_lossy().into_owned(), login))
})
}
@@ -1270,15 +1080,10 @@ impl ExternalAgentServer for LocalCodex {
.into_iter()
.find(|asset| asset.name == asset_name)
.with_context(|| format!("no asset found matching `{asset_name:?}`"))?;
// Strip "sha256:" prefix from digest if present (GitHub API format)
let digest = asset
.digest
.as_deref()
.and_then(|d| d.strip_prefix("sha256:").or(Some(d)));
::http_client::github_download::download_server_binary(
&*http,
&asset.browser_download_url,
digest,
asset.digest.as_deref(),
&version_dir,
if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") {
AssetKind::Zip
@@ -1322,7 +1127,11 @@ impl ExternalAgentServer for LocalCodex {
pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp";
fn get_platform_info() -> Option<(&'static str, &'static str, &'static str)> {
/// Assemble Codex release URL for the current OS/arch and the given version number.
/// Returns None if the current target is unsupported.
/// Example output:
/// https://github.com/zed-industries/codex-acp/releases/download/v{version}/codex-acp-{version}-{arch}-{platform}.{ext}
fn asset_name(version: &str) -> Option<String> {
let arch = if cfg!(target_arch = "x86_64") {
"x86_64"
} else if cfg!(target_arch = "aarch64") {
@@ -1348,220 +1157,14 @@ fn get_platform_info() -> Option<(&'static str, &'static str, &'static str)> {
"tar.gz"
};
Some((arch, platform, ext))
}
fn asset_name(version: &str) -> Option<String> {
let (arch, platform, ext) = get_platform_info()?;
Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}"))
}
struct LocalExtensionArchiveAgent {
fs: Arc<dyn Fs>,
http_client: Arc<dyn HttpClient>,
project_environment: Entity<ProjectEnvironment>,
extension_id: Arc<str>,
agent_id: Arc<str>,
targets: HashMap<String, extension::TargetConfig>,
env: HashMap<String, String>,
}
struct LocalCustomAgent {
project_environment: Entity<ProjectEnvironment>,
command: AgentServerCommand,
}
impl ExternalAgentServer for LocalExtensionArchiveAgent {
fn get_command(
&mut self,
root_dir: Option<&str>,
extra_env: HashMap<String, String>,
_status_tx: Option<watch::Sender<SharedString>>,
_new_version_available_tx: Option<watch::Sender<Option<String>>>,
cx: &mut AsyncApp,
) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
let fs = self.fs.clone();
let http_client = self.http_client.clone();
let project_environment = self.project_environment.downgrade();
let extension_id = self.extension_id.clone();
let agent_id = self.agent_id.clone();
let targets = self.targets.clone();
let base_env = self.env.clone();
let root_dir: Arc<Path> = root_dir
.map(|root_dir| Path::new(root_dir))
.unwrap_or(paths::home_dir())
.into();
cx.spawn(async move |cx| {
// Get project environment
let mut env = project_environment
.update(cx, |project_environment, cx| {
project_environment.get_local_directory_environment(
&Shell::System,
root_dir.clone(),
cx,
)
})?
.await
.unwrap_or_default();
// Merge manifest env and extra env
env.extend(base_env);
env.extend(extra_env);
let cache_key = format!("{}/{}", extension_id, agent_id);
let dir = paths::data_dir().join("external_agents").join(&cache_key);
fs.create_dir(&dir).await?;
// Determine platform key
let os = if cfg!(target_os = "macos") {
"darwin"
} else if cfg!(target_os = "linux") {
"linux"
} else if cfg!(target_os = "windows") {
"windows"
} else {
anyhow::bail!("unsupported OS");
};
let arch = if cfg!(target_arch = "aarch64") {
"aarch64"
} else if cfg!(target_arch = "x86_64") {
"x86_64"
} else {
anyhow::bail!("unsupported architecture");
};
let platform_key = format!("{}-{}", os, arch);
let target_config = targets.get(&platform_key).with_context(|| {
format!(
"no target specified for platform '{}'. Available platforms: {}",
platform_key,
targets
.keys()
.map(|k| k.as_str())
.collect::<Vec<_>>()
.join(", ")
)
})?;
let archive_url = &target_config.archive;
// Use URL as version identifier for caching
// Hash the URL to get a stable directory name
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
archive_url.hash(&mut hasher);
let url_hash = hasher.finish();
let version_dir = dir.join(format!("v_{:x}", url_hash));
if !fs.is_dir(&version_dir).await {
// Determine SHA256 for verification
let sha256 = if let Some(provided_sha) = &target_config.sha256 {
// Use provided SHA256
Some(provided_sha.clone())
} else if archive_url.starts_with("https://github.com/") {
// Try to fetch SHA256 from GitHub API
// Parse URL to extract repo and tag/file info
// Format: https://github.com/owner/repo/releases/download/tag/file.zip
if let Some(caps) = archive_url.strip_prefix("https://github.com/") {
let parts: Vec<&str> = caps.split('/').collect();
if parts.len() >= 6 && parts[2] == "releases" && parts[3] == "download" {
let repo = format!("{}/{}", parts[0], parts[1]);
let tag = parts[4];
let filename = parts[5..].join("/");
// Try to get release info from GitHub
if let Ok(release) = ::http_client::github::get_release_by_tag_name(
&repo,
tag,
http_client.clone(),
)
.await
{
// Find matching asset
if let Some(asset) =
release.assets.iter().find(|a| a.name == filename)
{
// Strip "sha256:" prefix if present
asset.digest.as_ref().and_then(|d| {
d.strip_prefix("sha256:")
.map(|s| s.to_string())
.or_else(|| Some(d.clone()))
})
} else {
None
}
} else {
None
}
} else {
None
}
} else {
None
}
} else {
None
};
// Determine archive type from URL
let asset_kind = if archive_url.ends_with(".zip") {
AssetKind::Zip
} else if archive_url.ends_with(".tar.gz") || archive_url.ends_with(".tgz") {
AssetKind::TarGz
} else {
anyhow::bail!("unsupported archive type in URL: {}", archive_url);
};
// Download and extract
::http_client::github_download::download_server_binary(
&*http_client,
archive_url,
sha256.as_deref(),
&version_dir,
asset_kind,
)
.await?;
}
// Validate and resolve cmd path
let cmd = &target_config.cmd;
if cmd.contains("..") {
anyhow::bail!("command path cannot contain '..': {}", cmd);
}
let cmd_path = if cmd.starts_with("./") || cmd.starts_with(".\\") {
// Relative to extraction directory
version_dir.join(&cmd[2..])
} else {
// On PATH
anyhow::bail!("command must be relative (start with './'): {}", cmd);
};
anyhow::ensure!(
fs.is_file(&cmd_path).await,
"Missing command {} after extraction",
cmd_path.to_string_lossy()
);
let command = AgentServerCommand {
path: cmd_path,
args: target_config.args.clone(),
env: Some(env),
};
Ok((command, root_dir.to_string_lossy().into_owned(), None))
})
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl ExternalAgentServer for LocalCustomAgent {
fn get_command(
&mut self,
@@ -1600,6 +1203,42 @@ impl ExternalAgentServer for LocalCustomAgent {
}
}
#[cfg(test)]
mod tests {
#[test]
fn assembles_codex_release_url_for_current_target() {
let version_number = "0.1.0";
// This test fails the build if we are building a version of Zed
// which does not have a known build of codex-acp, to prevent us
// from accidentally doing a release on a new target without
// realizing that codex-acp support will not work on that target!
//
// Additionally, it verifies that our logic for assembling URLs
// correctly resolves to a known-good URL on each of our targets.
let allowed = [
"codex-acp-0.1.0-aarch64-apple-darwin.tar.gz",
"codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz",
"codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz",
"codex-acp-0.1.0-x86_64-apple-darwin.tar.gz",
"codex-acp-0.1.0-x86_64-pc-windows-msvc.zip",
"codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz",
];
if let Some(url) = super::asset_name(version_number) {
assert!(
allowed.contains(&url.as_str()),
"Assembled asset name {} not in allowed list",
url
);
} else {
panic!(
"This target does not have a known codex-acp release! We should fix this by building a release of codex-acp for this target, as otherwise codex-acp will not be usable with this Zed build."
);
}
}
}
pub const GEMINI_NAME: &'static str = "gemini";
pub const CLAUDE_CODE_NAME: &'static str = "claude";
pub const CODEX_NAME: &'static str = "codex";
@@ -1692,200 +1331,3 @@ impl settings::Settings for AllAgentServersSettings {
}
}
}
#[cfg(test)]
mod extension_agent_tests {
use super::*;
use gpui::TestAppContext;
use std::sync::Arc;
#[test]
fn extension_agent_constructs_proper_display_names() {
// Verify the display name format for extension-provided agents
let name1 = ExternalAgentServerName(SharedString::from("Extension: Agent"));
assert!(name1.0.contains(": "));
let name2 = ExternalAgentServerName(SharedString::from("MyExt: MyAgent"));
assert_eq!(name2.0, "MyExt: MyAgent");
// Non-extension agents shouldn't have the separator
let custom = ExternalAgentServerName(SharedString::from("custom"));
assert!(!custom.0.contains(": "));
}
struct NoopExternalAgent;
impl ExternalAgentServer for NoopExternalAgent {
fn get_command(
&mut self,
_root_dir: Option<&str>,
_extra_env: HashMap<String, String>,
_status_tx: Option<watch::Sender<SharedString>>,
_new_version_available_tx: Option<watch::Sender<Option<String>>>,
_cx: &mut AsyncApp,
) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
Task::ready(Ok((
AgentServerCommand {
path: PathBuf::from("noop"),
args: Vec::new(),
env: None,
},
"".to_string(),
None,
)))
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
#[test]
fn sync_removes_only_extension_provided_agents() {
let mut store = AgentServerStore {
state: AgentServerStoreState::Collab,
external_agents: HashMap::default(),
agent_icons: HashMap::default(),
};
// Seed with extension agents (contain ": ") and custom agents (don't contain ": ")
store.external_agents.insert(
ExternalAgentServerName(SharedString::from("Ext1: Agent1")),
Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
);
store.external_agents.insert(
ExternalAgentServerName(SharedString::from("Ext2: Agent2")),
Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
);
store.external_agents.insert(
ExternalAgentServerName(SharedString::from("custom-agent")),
Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
);
// Simulate removal phase
let keys_to_remove: Vec<_> = store
.external_agents
.keys()
.filter(|name| name.0.contains(": "))
.cloned()
.collect();
for key in keys_to_remove {
store.external_agents.remove(&key);
}
// Only custom-agent should remain
assert_eq!(store.external_agents.len(), 1);
assert!(
store
.external_agents
.contains_key(&ExternalAgentServerName(SharedString::from("custom-agent")))
);
}
#[test]
fn archive_launcher_constructs_with_all_fields() {
use extension::AgentServerManifestEntry;
let mut env = HashMap::default();
env.insert("GITHUB_TOKEN".into(), "secret".into());
let mut targets = HashMap::default();
targets.insert(
"darwin-aarch64".to_string(),
extension::TargetConfig {
archive:
"https://github.com/owner/repo/releases/download/v1.0.0/agent-darwin-arm64.zip"
.into(),
cmd: "./agent".into(),
args: vec![],
sha256: None,
},
);
let _entry = AgentServerManifestEntry {
name: "GitHub Agent".into(),
targets,
env,
icon: None,
};
// Verify display name construction
let expected_name = ExternalAgentServerName(SharedString::from("GitHub Agent"));
assert_eq!(expected_name.0, "GitHub Agent");
}
#[gpui::test]
async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) {
let fs = fs::FakeFs::new(cx.background_executor.clone());
let http_client = http_client::FakeHttpClient::with_404_response();
let project_environment = cx.new(|cx| crate::ProjectEnvironment::new(None, cx));
let agent = LocalExtensionArchiveAgent {
fs,
http_client,
project_environment,
extension_id: Arc::from("my-extension"),
agent_id: Arc::from("my-agent"),
targets: {
let mut map = HashMap::default();
map.insert(
"darwin-aarch64".to_string(),
extension::TargetConfig {
archive: "https://example.com/my-agent-darwin-arm64.zip".into(),
cmd: "./my-agent".into(),
args: vec!["--serve".into()],
sha256: None,
},
);
map
},
env: {
let mut map = HashMap::default();
map.insert("PORT".into(), "8080".into());
map
},
};
// Verify agent is properly constructed
assert_eq!(agent.extension_id.as_ref(), "my-extension");
assert_eq!(agent.agent_id.as_ref(), "my-agent");
assert_eq!(agent.env.get("PORT"), Some(&"8080".to_string()));
assert!(agent.targets.contains_key("darwin-aarch64"));
}
#[test]
fn sync_extension_agents_registers_archive_launcher() {
use extension::AgentServerManifestEntry;
let expected_name = ExternalAgentServerName(SharedString::from("Release Agent"));
assert_eq!(expected_name.0, "Release Agent");
// Verify the manifest entry structure for archive-based installation
let mut env = HashMap::default();
env.insert("API_KEY".into(), "secret".into());
let mut targets = HashMap::default();
targets.insert(
"linux-x86_64".to_string(),
extension::TargetConfig {
archive: "https://github.com/org/project/releases/download/v2.1.0/release-agent-linux-x64.tar.gz".into(),
cmd: "./release-agent".into(),
args: vec!["serve".into()],
sha256: None,
},
);
let manifest_entry = AgentServerManifestEntry {
name: "Release Agent".into(),
targets: targets.clone(),
env,
icon: None,
};
// Verify target config is present
assert!(manifest_entry.targets.contains_key("linux-x86_64"));
let target = manifest_entry.targets.get("linux-x86_64").unwrap();
assert_eq!(target.cmd, "./release-agent");
}
}

View File

@@ -909,14 +909,7 @@ impl BufferStore {
};
cx.spawn(async move |this, cx| {
task.await?;
this.update(cx, |this, cx| {
old_file.clone().and_then(|file| {
this.path_to_buffer_id.remove(&ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path().clone(),
})
});
this.update(cx, |_, cx| {
cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
})
})

View File

@@ -4,7 +4,7 @@ use language::Buffer;
use remote::RemoteClient;
use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID};
use std::{collections::VecDeque, path::Path, sync::Arc};
use task::{Shell, shell_to_proto};
use task::Shell;
use util::ResultExt;
use worktree::Worktree;
@@ -198,7 +198,7 @@ impl ProjectEnvironment {
.proto_client()
.request(proto::GetDirectoryEnvironment {
project_id: REMOTE_SERVER_PROJECT_ID,
shell: Some(shell_to_proto(shell.clone())),
shell: Some(shell.clone().to_proto()),
directory: abs_path.to_string_lossy().to_string(),
});
cx.spawn(async move |_, _| {

View File

@@ -1,4 +1,3 @@
pub mod branch_diff;
mod conflict_set;
pub mod git_traversal;
@@ -31,8 +30,7 @@ use git::{
},
stash::{GitStash, StashEntry},
status::{
DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
UnmergedStatus, UnmergedStatusCode,
FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
},
};
use gpui::{
@@ -57,7 +55,6 @@ use std::{
mem,
ops::Range,
path::{Path, PathBuf},
str::FromStr,
sync::{
Arc,
atomic::{self, AtomicU64},
@@ -435,8 +432,6 @@ impl GitStore {
client.add_entity_request_handler(Self::handle_askpass);
client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
client.add_entity_request_handler(Self::handle_git_diff);
client.add_entity_request_handler(Self::handle_tree_diff);
client.add_entity_request_handler(Self::handle_get_blob_content);
client.add_entity_request_handler(Self::handle_open_unstaged_diff);
client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
client.add_entity_message_handler(Self::handle_update_diff_bases);
@@ -624,52 +619,6 @@ impl GitStore {
cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
pub fn open_diff_since(
&mut self,
oid: Option<git::Oid>,
buffer: Entity<Buffer>,
repo: Entity<Repository>,
languages: Arc<LanguageRegistry>,
cx: &mut Context<Self>,
) -> Task<Result<Entity<BufferDiff>>> {
cx.spawn(async move |this, cx| {
let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
let content = match oid {
None => None,
Some(oid) => Some(
repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
.await?,
),
};
let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
buffer_diff
.update(cx, |buffer_diff, cx| {
buffer_diff.set_base_text(
content.map(Arc::new),
buffer_snapshot.language().cloned(),
Some(languages.clone()),
buffer_snapshot.text,
cx,
)
})?
.await?;
let unstaged_diff = this
.update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
.await?;
buffer_diff.update(cx, |buffer_diff, _| {
buffer_diff.set_secondary_diff(unstaged_diff);
})?;
this.update(cx, |_, cx| {
cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
.detach();
})?;
Ok(buffer_diff)
})
}
pub fn open_uncommitted_diff(
&mut self,
buffer: Entity<Buffer>,
@@ -2219,75 +2168,6 @@ impl GitStore {
Ok(proto::GitDiffResponse { diff })
}
async fn handle_tree_diff(
this: Entity<Self>,
request: TypedEnvelope<proto::GetTreeDiff>,
mut cx: AsyncApp,
) -> Result<proto::GetTreeDiffResponse> {
let repository_id = RepositoryId(request.payload.repository_id);
let diff_type = if request.payload.is_merge {
DiffTreeType::MergeBase {
base: request.payload.base.into(),
head: request.payload.head.into(),
}
} else {
DiffTreeType::Since {
base: request.payload.base.into(),
head: request.payload.head.into(),
}
};
let diff = this
.update(&mut cx, |this, cx| {
let repository = this.repositories().get(&repository_id)?;
Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
})?
.context("missing repository")?
.await??;
Ok(proto::GetTreeDiffResponse {
entries: diff
.entries
.into_iter()
.map(|(path, status)| proto::TreeDiffStatus {
path: path.0.to_proto(),
status: match status {
TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
TreeDiffStatus::Modified { .. } => {
proto::tree_diff_status::Status::Modified.into()
}
TreeDiffStatus::Deleted { .. } => {
proto::tree_diff_status::Status::Deleted.into()
}
},
oid: match status {
TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
Some(old.to_string())
}
TreeDiffStatus::Added => None,
},
})
.collect(),
})
}
async fn handle_get_blob_content(
this: Entity<Self>,
request: TypedEnvelope<proto::GetBlobContent>,
mut cx: AsyncApp,
) -> Result<proto::GetBlobContentResponse> {
let oid = git::Oid::from_str(&request.payload.oid)?;
let repository_id = RepositoryId(request.payload.repository_id);
let content = this
.update(&mut cx, |this, cx| {
let repository = this.repositories().get(&repository_id)?;
Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
})?
.context("missing repository")?
.await?;
Ok(proto::GetBlobContentResponse { content })
}
async fn handle_open_unstaged_diff(
this: Entity<Self>,
request: TypedEnvelope<proto::OpenUnstagedDiff>,
@@ -4423,62 +4303,6 @@ impl Repository {
})
}
pub fn diff_tree(
&mut self,
diff_type: DiffTreeType,
_cx: &App,
) -> oneshot::Receiver<Result<TreeDiff>> {
let repository_id = self.snapshot.id;
self.send_job(None, move |repo, _cx| async move {
match repo {
RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
RepositoryState::Remote { client, project_id } => {
let response = client
.request(proto::GetTreeDiff {
project_id: project_id.0,
repository_id: repository_id.0,
is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
base: diff_type.base().to_string(),
head: diff_type.head().to_string(),
})
.await?;
let entries = response
.entries
.into_iter()
.filter_map(|entry| {
let status = match entry.status() {
proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
proto::tree_diff_status::Status::Modified => {
TreeDiffStatus::Modified {
old: git::Oid::from_str(
&entry.oid.context("missing oid").log_err()?,
)
.log_err()?,
}
}
proto::tree_diff_status::Status::Deleted => {
TreeDiffStatus::Deleted {
old: git::Oid::from_str(
&entry.oid.context("missing oid").log_err()?,
)
.log_err()?,
}
}
};
Some((
RepoPath(RelPath::from_proto(&entry.path).log_err()?),
status,
))
})
.collect();
Ok(TreeDiff { entries })
}
}
})
}
pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
let id = self.id;
self.send_job(None, move |repo, _cx| async move {
@@ -4951,25 +4775,6 @@ impl Repository {
cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
}
fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
let repository_id = self.snapshot.id;
let rx = self.send_job(None, move |state, _| async move {
match state {
RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
RepositoryState::Remote { client, project_id } => {
let response = client
.request(proto::GetBlobContent {
project_id: project_id.to_proto(),
repository_id: repository_id.0,
oid: oid.to_string(),
})
.await?;
Ok(response.content)
}
}
});
cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
}
fn paths_changed(
&mut self,

View File

@@ -1,386 +0,0 @@
use anyhow::Result;
use buffer_diff::BufferDiff;
use collections::HashSet;
use futures::StreamExt;
use git::{
repository::RepoPath,
status::{DiffTreeType, FileStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus},
};
use gpui::{
App, AsyncWindowContext, Context, Entity, EventEmitter, SharedString, Subscription, Task,
WeakEntity, Window,
};
use language::Buffer;
use text::BufferId;
use util::ResultExt;
use crate::{
Project,
git_store::{GitStoreEvent, Repository, RepositoryEvent},
};
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub enum DiffBase {
Head,
Merge { base_ref: SharedString },
}
impl DiffBase {
pub fn is_merge_base(&self) -> bool {
matches!(self, DiffBase::Merge { .. })
}
}
pub struct BranchDiff {
diff_base: DiffBase,
repo: Option<Entity<Repository>>,
project: Entity<Project>,
base_commit: Option<SharedString>,
head_commit: Option<SharedString>,
tree_diff: Option<TreeDiff>,
_subscription: Subscription,
update_needed: postage::watch::Sender<()>,
_task: Task<()>,
}
pub enum BranchDiffEvent {
FileListChanged,
}
impl EventEmitter<BranchDiffEvent> for BranchDiff {}
impl BranchDiff {
pub fn new(
source: DiffBase,
project: Entity<Project>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let git_store = project.read(cx).git_store().clone();
let git_store_subscription = cx.subscribe_in(
&git_store,
window,
move |this, _git_store, event, _window, cx| match event {
GitStoreEvent::ActiveRepositoryChanged(_)
| GitStoreEvent::RepositoryUpdated(
_,
RepositoryEvent::StatusesChanged { full_scan: _ },
true,
)
| GitStoreEvent::ConflictsUpdated => {
cx.emit(BranchDiffEvent::FileListChanged);
*this.update_needed.borrow_mut() = ();
}
_ => {}
},
);
let (send, recv) = postage::watch::channel::<()>();
let worker = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::handle_status_updates(this, recv, cx).await
});
let repo = git_store.read(cx).active_repository();
Self {
diff_base: source,
repo,
project,
tree_diff: None,
base_commit: None,
head_commit: None,
_subscription: git_store_subscription,
_task: worker,
update_needed: send,
}
}
pub fn diff_base(&self) -> &DiffBase {
&self.diff_base
}
pub async fn handle_status_updates(
this: WeakEntity<Self>,
mut recv: postage::watch::Receiver<()>,
cx: &mut AsyncWindowContext,
) {
Self::reload_tree_diff(this.clone(), cx).await.log_err();
while recv.next().await.is_some() {
let Ok(needs_update) = this.update(cx, |this, cx| {
let mut needs_update = false;
let active_repo = this
.project
.read(cx)
.git_store()
.read(cx)
.active_repository();
if active_repo != this.repo {
needs_update = true;
this.repo = active_repo;
} else if let Some(repo) = this.repo.as_ref() {
repo.update(cx, |repo, _| {
if let Some(branch) = &repo.branch
&& let DiffBase::Merge { base_ref } = &this.diff_base
&& let Some(commit) = branch.most_recent_commit.as_ref()
&& &branch.ref_name == base_ref
&& this.base_commit.as_ref() != Some(&commit.sha)
{
this.base_commit = Some(commit.sha.clone());
needs_update = true;
}
if repo.head_commit.as_ref().map(|c| &c.sha) != this.head_commit.as_ref() {
this.head_commit = repo.head_commit.as_ref().map(|c| c.sha.clone());
needs_update = true;
}
})
}
needs_update
}) else {
return;
};
if needs_update {
Self::reload_tree_diff(this.clone(), cx).await.log_err();
}
}
}
pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
let (repo, path) = self
.project
.read(cx)
.git_store()
.read(cx)
.repository_and_path_for_buffer_id(buffer_id, cx)?;
if self.repo() == Some(&repo) {
return self.merge_statuses(
repo.read(cx)
.status_for_path(&path)
.map(|status| status.status),
self.tree_diff
.as_ref()
.and_then(|diff| diff.entries.get(&path)),
);
}
None
}
pub fn merge_statuses(
&self,
diff_from_head: Option<FileStatus>,
diff_from_merge_base: Option<&TreeDiffStatus>,
) -> Option<FileStatus> {
match (diff_from_head, diff_from_merge_base) {
(None, None) => None,
(Some(diff_from_head), None) => Some(diff_from_head),
(Some(diff_from_head @ FileStatus::Unmerged(_)), _) => Some(diff_from_head),
// file does not exist in HEAD
// but *does* exist in work-tree
// and *does* exist in merge-base
(
Some(FileStatus::Untracked)
| Some(FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
worktree_status: _,
})),
Some(_),
) => Some(FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Modified,
worktree_status: StatusCode::Modified,
})),
// file exists in HEAD
// but *does not* exist in work-tree
(Some(diff_from_head), Some(diff_from_merge_base)) if diff_from_head.is_deleted() => {
match diff_from_merge_base {
TreeDiffStatus::Added => None, // unchanged, didn't exist in merge base or worktree
_ => Some(diff_from_head),
}
}
// file exists in HEAD
// and *does* exist in work-tree
(Some(FileStatus::Tracked(_)), Some(tree_status)) => {
Some(FileStatus::Tracked(TrackedStatus {
index_status: match tree_status {
TreeDiffStatus::Added { .. } => StatusCode::Added,
_ => StatusCode::Modified,
},
worktree_status: match tree_status {
TreeDiffStatus::Added => StatusCode::Added,
_ => StatusCode::Modified,
},
}))
}
(_, Some(diff_from_merge_base)) => {
Some(diff_status_to_file_status(diff_from_merge_base))
}
}
}
pub async fn reload_tree_diff(
this: WeakEntity<Self>,
cx: &mut AsyncWindowContext,
) -> Result<()> {
let task = this.update(cx, |this, cx| {
let DiffBase::Merge { base_ref } = this.diff_base.clone() else {
return None;
};
let Some(repo) = this.repo.as_ref() else {
this.tree_diff.take();
return None;
};
repo.update(cx, |repo, cx| {
Some(repo.diff_tree(
DiffTreeType::MergeBase {
base: base_ref,
head: "HEAD".into(),
},
cx,
))
})
})?;
let Some(task) = task else { return Ok(()) };
let diff = task.await??;
this.update(cx, |this, cx| {
this.tree_diff = Some(diff);
cx.emit(BranchDiffEvent::FileListChanged);
cx.notify();
})
}
pub fn repo(&self) -> Option<&Entity<Repository>> {
self.repo.as_ref()
}
pub fn load_buffers(&mut self, cx: &mut Context<Self>) -> Vec<DiffBuffer> {
let mut output = Vec::default();
let Some(repo) = self.repo.clone() else {
return output;
};
self.project.update(cx, |_project, cx| {
let mut seen = HashSet::default();
for item in repo.read(cx).cached_status() {
seen.insert(item.repo_path.clone());
let branch_diff = self
.tree_diff
.as_ref()
.and_then(|t| t.entries.get(&item.repo_path))
.cloned();
let status = self
.merge_statuses(Some(item.status), branch_diff.as_ref())
.unwrap();
if !status.has_changes() {
continue;
}
let Some(project_path) =
repo.read(cx).repo_path_to_project_path(&item.repo_path, cx)
else {
continue;
};
let task = Self::load_buffer(branch_diff, project_path, repo.clone(), cx);
output.push(DiffBuffer {
repo_path: item.repo_path.clone(),
load: task,
file_status: item.status,
});
}
let Some(tree_diff) = self.tree_diff.as_ref() else {
return;
};
for (path, branch_diff) in tree_diff.entries.iter() {
if seen.contains(&path) {
continue;
}
let Some(project_path) = repo.read(cx).repo_path_to_project_path(&path, cx) else {
continue;
};
let task =
Self::load_buffer(Some(branch_diff.clone()), project_path, repo.clone(), cx);
let file_status = diff_status_to_file_status(branch_diff);
output.push(DiffBuffer {
repo_path: path.clone(),
load: task,
file_status,
});
}
});
output
}
fn load_buffer(
branch_diff: Option<git::status::TreeDiffStatus>,
project_path: crate::ProjectPath,
repo: Entity<Repository>,
cx: &Context<'_, Project>,
) -> Task<Result<(Entity<Buffer>, Entity<BufferDiff>)>> {
let task = cx.spawn(async move |project, cx| {
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
let languages = project.update(cx, |project, _cx| project.languages().clone())?;
let changes = if let Some(entry) = branch_diff {
let oid = match entry {
git::status::TreeDiffStatus::Added { .. } => None,
git::status::TreeDiffStatus::Modified { old, .. }
| git::status::TreeDiffStatus::Deleted { old } => Some(old),
};
project
.update(cx, |project, cx| {
project.git_store().update(cx, |git_store, cx| {
git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx)
})
})?
.await?
} else {
project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?
};
Ok((buffer, changes))
});
task
}
}
fn diff_status_to_file_status(branch_diff: &git::status::TreeDiffStatus) -> FileStatus {
let file_status = match branch_diff {
git::status::TreeDiffStatus::Added { .. } => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Added,
}),
git::status::TreeDiffStatus::Modified { .. } => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Modified,
worktree_status: StatusCode::Modified,
}),
git::status::TreeDiffStatus::Deleted { .. } => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Deleted,
}),
};
file_status
}
#[derive(Debug)]
pub struct DiffBuffer {
pub repo_path: RepoPath,
pub file_status: FileStatus,
pub load: Task<Result<(Entity<Buffer>, Entity<BufferDiff>)>>,
}

View File

@@ -657,7 +657,6 @@ impl LspCommand for GetLspRunnables {
);
task_template.args.extend(cargo.cargo_args);
if !cargo.executable_args.is_empty() {
let shell_kind = task_template.shell.shell_kind(cfg!(windows));
task_template.args.push("--".to_string());
task_template.args.extend(
cargo
@@ -683,7 +682,7 @@ impl LspCommand for GetLspRunnables {
// That bit is not auto-expanded when using single quotes.
// Escape extra cargo args unconditionally as those are unlikely to contain `~`.
.flat_map(|extra_arg| {
shell_kind.try_quote(&extra_arg).map(|s| s.to_string())
shlex::try_quote(&extra_arg).ok().map(|s| s.to_string())
}),
);
}

View File

@@ -40,7 +40,7 @@ use crate::{
git_store::GitStore,
lsp_store::{SymbolLocation, log_store::LogKind},
};
pub use agent_server_store::{AgentServerStore, AgentServersUpdated, ExternalAgentServerName};
pub use agent_server_store::{AgentServerStore, AgentServersUpdated};
pub use git_store::{
ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},

View File

@@ -4251,73 +4251,6 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) {
assert_eq!(opened_buffer, buffer);
}
#[gpui::test]
async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
fs.insert_tree(
path!("/dir"),
json!({
"data_a.txt": "data about a"
}),
)
.await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/dir/data_a.txt"), cx)
})
.await
.unwrap();
buffer.update(cx, |buffer, cx| {
buffer.edit([(11..12, "b")], None, cx);
});
// Save buffer's contents as a new file and confirm that the buffer's now
// associated with `data_b.txt` instead of `data_a.txt`, confirming that the
// file associated with the buffer has now been updated to `data_b.txt`
project
.update(cx, |project, cx| {
let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
let new_path = ProjectPath {
worktree_id,
path: rel_path("data_b.txt").into(),
};
project.save_buffer_as(buffer.clone(), new_path, cx)
})
.await
.unwrap();
buffer.update(cx, |buffer, cx| {
assert_eq!(
buffer.file().unwrap().full_path(cx),
Path::new("dir/data_b.txt")
)
});
// Open the original `data_a.txt` file, confirming that its contents are
// unchanged and the resulting buffer's associated file is `data_a.txt`.
let original_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/dir/data_a.txt"), cx)
})
.await
.unwrap();
original_buffer.update(cx, |buffer, cx| {
assert_eq!(buffer.text(), "data about a");
assert_eq!(
buffer.file().unwrap().full_path(cx),
Path::new("dir/data_a.txt")
)
});
}
#[gpui::test(retries = 5)]
async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
use worktree::WorktreeModelHandle as _;

View File

@@ -167,19 +167,18 @@ impl Project {
match remote_client {
Some(remote_client) => match activation_script.clone() {
activation_script if !activation_script.is_empty() => {
let separator = shell_kind.sequential_commands_separator();
let activation_script =
activation_script.join(&format!("{separator} "));
let activation_script = activation_script.join("; ");
let to_run = format_to_run();
let shell = remote_client
.read(cx)
.shell()
.unwrap_or_else(get_default_system_shell);
let arg = format!("{activation_script}{separator} {to_run}");
let args = shell_kind.args_for_shell(false, arg);
let args =
vec!["-c".to_owned(), format!("{activation_script}; {to_run}")];
create_remote_shell(
Some((&shell, &args)),
Some((
&remote_client
.read(cx)
.shell()
.unwrap_or_else(get_default_system_shell),
&args,
)),
env,
path,
remote_client,
@@ -548,7 +547,7 @@ fn create_remote_shell(
Shell::WithArguments {
program: command.program,
args: command.args,
title_override: Some(format!("{} — Terminal", host)),
title_override: Some(format!("{} — Terminal", host).into()),
},
command.env,
))

View File

@@ -472,37 +472,3 @@ message GetDefaultBranch {
message GetDefaultBranchResponse {
optional string branch = 1;
}
message GetTreeDiff {
uint64 project_id = 1;
uint64 repository_id = 2;
bool is_merge = 3;
string base = 4;
string head = 5;
}
message GetTreeDiffResponse {
repeated TreeDiffStatus entries = 1;
}
message TreeDiffStatus {
enum Status {
ADDED = 0;
MODIFIED = 1;
DELETED = 2;
}
Status status = 1;
string path = 2;
optional string oid = 3;
}
message GetBlobContent {
uint64 project_id = 1;
uint64 repository_id = 2;
string oid =3;
}
message GetBlobContentResponse {
string content = 1;
}

View File

@@ -421,13 +421,7 @@ message Envelope {
RemoteStarted remote_started = 381;
GetDirectoryEnvironment get_directory_environment = 382;
DirectoryEnvironment directory_environment = 383;
GetTreeDiff get_tree_diff = 384;
GetTreeDiffResponse get_tree_diff_response = 385;
GetBlobContent get_blob_content = 386;
GetBlobContentResponse get_blob_content_response = 387; // current max
DirectoryEnvironment directory_environment = 383; // current max
}
reserved 87 to 88;

View File

@@ -316,10 +316,6 @@ messages!(
(PullWorkspaceDiagnostics, Background),
(GetDefaultBranch, Background),
(GetDefaultBranchResponse, Background),
(GetTreeDiff, Background),
(GetTreeDiffResponse, Background),
(GetBlobContent, Background),
(GetBlobContentResponse, Background),
(GitClone, Background),
(GitCloneResponse, Background),
(ToggleLspLogs, Background),
@@ -501,8 +497,6 @@ request_messages!(
(GetDocumentDiagnostics, GetDocumentDiagnosticsResponse),
(PullWorkspaceDiagnostics, Ack),
(GetDefaultBranch, GetDefaultBranchResponse),
(GetBlobContent, GetBlobContentResponse),
(GetTreeDiff, GetTreeDiffResponse),
(GitClone, GitCloneResponse),
(ToggleLspLogs, Ack),
(GetDirectoryEnvironment, DirectoryEnvironment),
@@ -665,8 +659,6 @@ entity_messages!(
GetDocumentDiagnostics,
PullWorkspaceDiagnostics,
GetDefaultBranch,
GetTreeDiff,
GetBlobContent,
GitClone,
GetAgentServerCommand,
ExternalAgentsUpdated,

View File

@@ -34,6 +34,7 @@ rpc = { workspace = true, features = ["gpui"] }
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
shlex.workspace = true
smol.workspace = true
tempfile.workspace = true
thiserror.workspace = true

View File

@@ -203,6 +203,17 @@ impl AsMut<Child> for MasterProcess {
}
}
macro_rules! shell_script {
($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{
format!(
$fmt,
$(
$name = shlex::try_quote($arg).unwrap()
),+
)
}};
}
#[async_trait(?Send)]
impl RemoteConnection for SshRemoteConnection {
async fn kill(&self) -> Result<()> {
@@ -727,23 +738,21 @@ impl SshRemoteConnection {
delegate.set_status(Some("Extracting remote development server"), cx);
let server_mode = 0o755;
let shell_kind = ShellKind::Posix;
let orig_tmp_path = tmp_path.display(self.path_style());
let server_mode = format!("{:o}", server_mode);
let server_mode = shell_kind
.try_quote(&server_mode)
.context("shell quoting")?;
let dst_path = dst_path.display(self.path_style());
let dst_path = shell_kind.try_quote(&dst_path).context("shell quoting")?;
let script = if let Some(tmp_path) = orig_tmp_path.strip_suffix(".gz") {
format!(
shell_script!(
"gunzip -f {orig_tmp_path} && chmod {server_mode} {tmp_path} && mv {tmp_path} {dst_path}",
server_mode = &format!("{:o}", server_mode),
dst_path = &dst_path.display(self.path_style()),
)
} else {
format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",)
shell_script!(
"chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",
server_mode = &format!("{:o}", server_mode),
dst_path = &dst_path.display(self.path_style())
)
};
let args = shell_kind.args_for_shell(false, script.to_string());
self.socket.run_command("sh", &args).await?;
self.socket.run_command("sh", &["-c", &script]).await?;
Ok(())
}
@@ -877,12 +886,8 @@ impl SshSocket {
// into a machine. You must use `cd` to get back to $HOME.
// You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
fn ssh_command(&self, program: &str, args: &[impl AsRef<str>]) -> process::Command {
let shell_kind = ShellKind::Posix;
let mut command = util::command::new_smol_command("ssh");
let mut to_run = shell_kind
.try_quote(program)
.expect("shell quoting")
.into_owned();
let mut to_run = shlex::try_quote(program).unwrap().into_owned();
for arg in args {
// We're trying to work with: sh, bash, zsh, fish, tcsh, ...?
debug_assert!(
@@ -890,10 +895,9 @@ impl SshSocket {
"multiline arguments do not work in all shells"
);
to_run.push(' ');
to_run.push_str(&shell_kind.try_quote(arg.as_ref()).expect("shell quoting"));
to_run.push_str(&shlex::try_quote(arg.as_ref()).unwrap());
}
let separator = shell_kind.sequential_commands_separator();
let to_run = format!("cd{separator} {to_run}");
let to_run = format!("cd; {to_run}");
self.ssh_options(&mut command, true)
.arg(self.connection_options.ssh_url())
.arg("-T")
@@ -902,7 +906,7 @@ impl SshSocket {
command
}
async fn run_command(&self, program: &str, args: &[impl AsRef<str>]) -> Result<String> {
async fn run_command(&self, program: &str, args: &[&str]) -> Result<String> {
let output = self.ssh_command(program, args).output().await?;
anyhow::ensure!(
output.status.success(),
@@ -1076,10 +1080,7 @@ impl SshConnectionOptions {
"-w",
];
let mut tokens = ShellKind::Posix
.split(input)
.context("invalid input")?
.into_iter();
let mut tokens = shlex::split(input).context("invalid input")?.into_iter();
'outer: while let Some(arg) = tokens.next() {
if ALLOWED_OPTS.contains(&(&arg as &str)) {
@@ -1242,7 +1243,6 @@ fn build_command(
) -> Result<CommandTemplate> {
use std::fmt::Write as _;
let shell_kind = ShellKind::new(ssh_shell, false);
let mut exec = String::new();
if let Some(working_dir) = working_dir {
let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string();
@@ -1252,38 +1252,29 @@ fn build_command(
const TILDE_PREFIX: &'static str = "~/";
if working_dir.starts_with(TILDE_PREFIX) {
let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
write!(exec, "cd \"$HOME/{working_dir}\" && ",)?;
write!(exec, "cd \"$HOME/{working_dir}\" && ",).unwrap();
} else {
write!(exec, "cd \"{working_dir}\" && ",)?;
write!(exec, "cd \"{working_dir}\" && ",).unwrap();
}
} else {
write!(exec, "cd && ")?;
write!(exec, "cd && ").unwrap();
};
write!(exec, "exec env ")?;
write!(exec, "exec env ").unwrap();
for (k, v) in input_env.iter() {
write!(
exec,
"{}={} ",
k,
shell_kind.try_quote(v).context("shell quoting")?
)?;
if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
write!(exec, "{}={} ", k, v).unwrap();
}
}
if let Some(input_program) = input_program {
write!(
exec,
"{}",
shell_kind
.try_quote(&input_program)
.context("shell quoting")?
)?;
write!(exec, "{}", shlex::try_quote(&input_program).unwrap()).unwrap();
for arg in input_args {
let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
write!(exec, " {}", &arg)?;
let arg = shlex::try_quote(&arg)?;
write!(exec, " {}", &arg).unwrap();
}
} else {
write!(exec, "{ssh_shell} -l")?;
write!(exec, "{ssh_shell} -l").unwrap();
};
let mut args = Vec::new();

View File

@@ -2,7 +2,7 @@ use crate::{
RemoteClientDelegate, RemotePlatform,
remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions},
};
use anyhow::{Context, Result, anyhow, bail};
use anyhow::{Result, anyhow, bail};
use async_trait::async_trait;
use collections::HashMap;
use futures::channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender};
@@ -441,7 +441,6 @@ impl RemoteConnection for WslRemoteConnection {
bail!("WSL shares the network interface with the host system");
}
let shell_kind = ShellKind::new(&self.shell, false);
let working_dir = working_dir
.map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string())
.unwrap_or("~".to_string());
@@ -449,26 +448,19 @@ impl RemoteConnection for WslRemoteConnection {
let mut exec = String::from("exec env ");
for (k, v) in env.iter() {
write!(
exec,
"{}={} ",
k,
shell_kind.try_quote(v).context("shell quoting")?
)?;
if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
write!(exec, "{}={} ", k, v).unwrap();
}
}
if let Some(program) = program {
write!(
exec,
"{}",
shell_kind.try_quote(&program).context("shell quoting")?
)?;
write!(exec, "{}", shlex::try_quote(&program)?).unwrap();
for arg in args {
let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
write!(exec, " {}", &arg)?;
let arg = shlex::try_quote(&arg)?;
write!(exec, " {}", &arg).unwrap();
}
} else {
write!(&mut exec, "{} -l", self.shell)?;
write!(&mut exec, "{} -l", self.shell).unwrap();
}
let wsl_args = if let Some(user) = &self.connection_options.user {

View File

@@ -32,7 +32,7 @@ use std::{
path::{Path, PathBuf},
sync::{Arc, atomic::AtomicUsize},
};
use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind};
use sysinfo::System;
use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
use worktree::Worktree;
@@ -747,16 +747,9 @@ impl HeadlessProject {
_cx: AsyncApp,
) -> Result<proto::GetProcessesResponse> {
let mut processes = Vec::new();
let refresh_kind = RefreshKind::nothing().with_processes(
ProcessRefreshKind::nothing()
.without_tasks()
.with_cmd(UpdateKind::Always),
);
let system = System::new_all();
for process in System::new_with_specifics(refresh_kind)
.processes()
.values()
{
for (_pid, process) in system.processes() {
let name = process.name().to_string_lossy().into_owned();
let command = process
.cmd()
@@ -781,7 +774,7 @@ impl HeadlessProject {
envelope: TypedEnvelope<proto::GetDirectoryEnvironment>,
mut cx: AsyncApp,
) -> Result<proto::DirectoryEnvironment> {
let shell = task::shell_from_proto(envelope.payload.shell.context("missing shell")?)?;
let shell = task::Shell::from_proto(envelope.payload.shell.context("missing shell")?)?;
let directory = PathBuf::from(envelope.payload.directory);
let environment = this
.update(&mut cx, |this, cx| {

View File

@@ -42,7 +42,6 @@ pub enum ExtensionProvides {
Grammars,
LanguageServers,
ContextServers,
AgentServers,
SlashCommands,
IndexedDocsProviders,
Snippets,

View File

@@ -4500,7 +4500,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
title: "Program",
description: "The shell program to use.",
field: Box::new(SettingField {
json_path: Some("terminal.shell"),
json_path: Some("terminal.shell.program"),
pick: |settings_content| {
match settings_content.terminal.as_ref()?.project.shell.as_ref() {
Some(settings::Shell::Program(program)) => Some(program),

View File

@@ -6,10 +6,10 @@ use editor::{Editor, EditorEvent};
use feature_flags::FeatureFlag;
use fuzzy::StringMatchCandidate;
use gpui::{
Action, App, ClipboardItem, DEFAULT_ADDITIONAL_WINDOW_SIZE, Div, Entity, FocusHandle,
Focusable, Global, ListState, ReadGlobal as _, ScrollHandle, Stateful, Subscription, Task,
TitlebarOptions, UniformListScrollHandle, Window, WindowBounds, WindowHandle, WindowOptions,
actions, div, list, point, prelude::*, px, uniform_list,
Action, App, DEFAULT_ADDITIONAL_WINDOW_SIZE, Div, Entity, FocusHandle, Focusable, Global,
ListState, ReadGlobal as _, ScrollHandle, Stateful, Subscription, Task, TitlebarOptions,
UniformListScrollHandle, Window, WindowBounds, WindowHandle, WindowOptions, actions, div, list,
point, prelude::*, px, uniform_list,
};
use heck::ToTitleCase as _;
use project::{Project, WorktreeId};
@@ -18,13 +18,13 @@ use schemars::JsonSchema;
use serde::Deserialize;
use settings::{Settings, SettingsContent, SettingsStore};
use std::{
any::{type_name, Any, TypeId},
any::{Any, TypeId, type_name},
cell::RefCell,
collections::HashMap,
num::{NonZero, NonZeroU32},
ops::Range,
rc::Rc,
sync::{atomic::AtomicI32, Arc, LazyLock, RwLock},
sync::{Arc, LazyLock, RwLock},
};
use title_bar::platform_title_bar::PlatformTitleBar;
use ui::{
@@ -512,10 +512,43 @@ pub fn open_settings_editor(
return;
}
settings_window.search_bar.update(cx, |editor, cx| {
editor.set_text(format!("#{path}"), window, cx);
});
settings_window.update_matches(cx);
settings_window.current_file = SettingsUiFile::User;
settings_window.build_ui(window, cx);
let mut item_info = None;
'search: for (nav_entry_index, entry) in settings_window.navbar_entries.iter().enumerate() {
if entry.is_root {
continue;
}
let page_index = entry.page_index;
let header_index = entry
.item_index
.expect("non-root entries should have an item index");
for item_index in header_index + 1..settings_window.pages[page_index].items.len() {
let item = &settings_window.pages[page_index].items[item_index];
if let SettingsPageItem::SectionHeader(_) = item {
break;
}
if let SettingsPageItem::SettingItem(item) = item {
if item.field.json_path() == Some(path) {
if !item.files.contains(USER) {
log::error!("Found item {}, but it is not a user setting", path);
return;
}
item_info = Some((item_index, nav_entry_index));
break 'search;
}
}
}
}
let Some((item_index, navbar_entry_index)) = item_info else {
log::error!("Failed to find item for {}", path);
return;
};
settings_window.open_navbar_entry_page(navbar_entry_index);
window.focus(&settings_window.focus_handle_for_content_element(item_index, cx));
settings_window.scroll_to_content_item(item_index, window, cx);
}
let existing_window = cx
@@ -640,14 +673,13 @@ pub struct SettingsWindow {
struct SearchIndex {
bm25_engine: bm25::SearchEngine<usize>,
fuzzy_match_candidates: Vec<StringMatchCandidate>,
key_lut: Vec<SearchKeyLUTEntry>,
key_lut: Vec<SearchItemKey>,
}
struct SearchKeyLUTEntry {
struct SearchItemKey {
page_index: usize,
header_index: usize,
item_index: usize,
json_path: Option<&'static str>,
}
struct SubPage {
@@ -901,47 +933,17 @@ fn render_settings_item(
let (found_in_file, _) = setting_item.field.file_set_in(file.clone(), cx);
let file_set_in = SettingsUiFile::from_settings(found_in_file.clone());
let clipboard_has_link = cx
.read_from_clipboard()
.and_then(|entry| entry.text())
.map_or(false, |maybe_url| {
maybe_url.strip_prefix("zed://settings/") == setting_item.field.json_path()
});
let (link_icon, link_icon_color) = if clipboard_has_link {
(IconName::Check, Color::Success)
} else {
(IconName::Hash, Color::Muted)
};
h_flex()
.id(setting_item.title)
.relative()
.min_w_0()
.justify_between()
.child(
v_flex()
.w_1_2()
.group("setting-item")
.child(
h_flex()
.w_full()
.gap_1()
.ml_neg_8()
// .group_hover("setting-item", |s| s.gap_10())
.child(
IconButton::new("copy-link-btn", link_icon)
.icon_color(link_icon_color)
.icon_size(IconSize::Small)
.shape(IconButtonShape::Square)
.tooltip(Tooltip::text("Copy Link"))
.when_some(setting_item.field.json_path(), |this, path| {
this.on_click(cx.listener(move |_, _, _, cx| {
let link = format!("zed://settings/{}", path);
cx.write_to_clipboard(ClipboardItem::new_string(link));
cx.notify();
}))
})
)
.child(Label::new(SharedString::new_static(setting_item.title)))
.when_some(
setting_item
@@ -984,38 +986,6 @@ fn render_settings_item(
),
)
.child(control)
// .when(sub_page_stack().is_empty(), |this| {
// this.child(
// div()
// .visible_on_hover("setting-item")
// .absolute()
// .top_0()
// .left_neg_5(
// )
// .child({
// IconButton::new("copy-link-btn", link_icon)
// .icon_color(link_icon_color)
// .icon_size(IconSize::Small)
// .shape(IconButtonShape::Square)
// .tooltip(Tooltip::text("Copy Link"))
// .when_some(
// setting_item.field.json_path(),
// |this, path| {
// this.on_click(cx.listener(
// move |_, _, _, cx| {
// let link =
// format!("zed://settings/{}", path);
// cx.write_to_clipboard(
// ClipboardItem::new_string(link),
// );
// cx.notify();
// },
// ))
// },
// )
// }),
// )
// })
}
struct SettingItem {
@@ -1492,7 +1462,7 @@ impl SettingsWindow {
fn update_matches(&mut self, cx: &mut Context<SettingsWindow>) {
self.search_task.take();
let mut query = self.search_bar.read(cx).text(cx);
let query = self.search_bar.read(cx).text(cx);
if query.is_empty() || self.search_index.is_none() {
for page in &mut self.filter_table {
page.fill(true);
@@ -1504,14 +1474,6 @@ impl SettingsWindow {
return;
}
let is_json_link_query;
if query.starts_with("#") {
query.remove(0);
is_json_link_query = true;
} else {
is_json_link_query = false;
}
let search_index = self.search_index.as_ref().unwrap().clone();
fn update_matches_inner(
@@ -1525,11 +1487,10 @@ impl SettingsWindow {
}
for match_index in match_indices {
let SearchKeyLUTEntry {
let SearchItemKey {
page_index,
header_index,
item_index,
..
} = search_index.key_lut[match_index];
let page = &mut this.filter_table[page_index];
page[header_index] = true;
@@ -1543,29 +1504,6 @@ impl SettingsWindow {
}
self.search_task = Some(cx.spawn(async move |this, cx| {
if is_json_link_query {
let mut indices = vec![];
for (index, SearchKeyLUTEntry { json_path, .. }) in
search_index.key_lut.iter().enumerate()
{
let Some(json_path) = json_path else {
continue;
};
if let Some(post) = query.strip_prefix(json_path)
&& (post.is_empty() || post.starts_with('.'))
{
indices.push(index);
}
}
if !indices.is_empty() {
this.update(cx, |this, cx| {
update_matches_inner(this, search_index.as_ref(), indices.into_iter(), cx);
})
.ok();
return;
}
}
let bm25_task = cx.background_spawn({
let search_index = search_index.clone();
let max_results = search_index.key_lut.len();
@@ -1653,7 +1591,7 @@ impl SettingsWindow {
}
fn build_search_index(&mut self) {
let mut key_lut: Vec<SearchKeyLUTEntry> = vec![];
let mut key_lut: Vec<SearchItemKey> = vec![];
let mut documents = Vec::default();
let mut fuzzy_match_candidates = Vec::default();
@@ -1675,16 +1613,11 @@ impl SettingsWindow {
let mut header_str = "";
for (item_index, item) in page.items.iter().enumerate() {
let key_index = key_lut.len();
let mut json_path = None;
match item {
SettingsPageItem::DynamicItem(DynamicItem {
discriminant: item, ..
})
| SettingsPageItem::SettingItem(item) => {
json_path = item
.field
.json_path()
.map(|path| path.trim_end_matches('$'));
documents.push(bm25::Document {
id: key_index,
contents: [page.title, header_str, item.title, item.description]
@@ -1718,11 +1651,10 @@ impl SettingsWindow {
push_candidates(&mut fuzzy_match_candidates, key_index, page.title);
push_candidates(&mut fuzzy_match_candidates, key_index, header_str);
key_lut.push(SearchKeyLUTEntry {
key_lut.push(SearchItemKey {
page_index,
header_index,
item_index,
json_path,
});
}
}
@@ -2812,14 +2744,12 @@ impl SettingsWindow {
.track_focus(&self.content_focus_handle.focus_handle(cx))
.flex_1()
.pt_6()
// .px_8()
.px_8()
.bg(cx.theme().colors().editor_background)
.child(warning_banner)
.child(page_header)
.child(
div()
.px_8()
// .debug_bg_red()
.size_full()
.tab_group()
.tab_index(CONTENT_GROUP_TAB_INDEX)
@@ -3265,8 +3195,7 @@ fn render_toggle_button<B: Into<bool> + From<bool> + Copy>(
};
Switch::new("toggle_button", toggle_state)
.tab_index(0_isize)
.color(SwitchColor::Accent)
.color(ui::SwitchColor::Accent)
.on_click({
move |state, _window, cx| {
let state = *state == ui::ToggleState::Selected;
@@ -3276,6 +3205,8 @@ fn render_toggle_button<B: Into<bool> + From<bool> + Copy>(
.log_err(); // todo(settings_ui) don't log err
}
})
.tab_index(0_isize)
.color(SwitchColor::Accent)
.into_any_element()
}
@@ -3359,13 +3290,13 @@ where
})
}),
)
.tab_index(0)
.trigger_size(ButtonSize::Medium)
.style(DropdownStyle::Outlined)
.offset(gpui::Point {
x: px(0.0),
y: px(2.0),
})
.tab_index(0)
.into_any_element()
}

View File

@@ -1,5 +1,8 @@
use crate::shell::get_system_shell;
use crate::shell::{Shell, ShellKind};
use util::shell::get_system_shell;
use crate::Shell;
pub use util::shell::ShellKind;
/// ShellBuilder is used to turn a user-requested task into a
/// program that can be executed by the shell.

View File

@@ -3,6 +3,7 @@
mod adapter_schema;
mod debug_format;
mod serde_helpers;
mod shell_builder;
pub mod static_source;
mod task_template;
mod vscode_debug_format;
@@ -11,22 +12,23 @@ mod vscode_format;
use anyhow::Context as _;
use collections::{HashMap, HashSet, hash_map};
use gpui::SharedString;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::path::PathBuf;
use std::str::FromStr;
use util::get_system_shell;
pub use adapter_schema::{AdapterSchema, AdapterSchemas};
pub use debug_format::{
AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, DebugTaskFile, LaunchRequest,
Request, TcpArgumentsTemplate, ZedDebugConfig,
};
pub use shell_builder::{ShellBuilder, ShellKind};
pub use task_template::{
DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates,
substitute_variables_in_map, substitute_variables_in_str,
};
pub use util::shell::{Shell, ShellKind};
pub use util::shell_builder::ShellBuilder;
pub use vscode_debug_format::VsCodeDebugTaskFile;
pub use vscode_format::VsCodeTaskFile;
pub use zed_actions::RevealTarget;
@@ -316,32 +318,81 @@ pub struct TaskContext {
#[derive(Clone, Debug)]
pub struct RunnableTag(pub SharedString);
pub fn shell_from_proto(proto: proto::Shell) -> anyhow::Result<Shell> {
let shell_type = proto.shell_type.context("invalid shell type")?;
let shell = match shell_type {
proto::shell::ShellType::System(_) => Shell::System,
proto::shell::ShellType::Program(program) => Shell::Program(program),
proto::shell::ShellType::WithArguments(program) => Shell::WithArguments {
program: program.program,
args: program.args,
title_override: None,
},
};
Ok(shell)
/// Shell configuration to open the terminal with.
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)]
#[serde(rename_all = "snake_case")]
pub enum Shell {
/// Use the system's default terminal configuration in /etc/passwd
#[default]
System,
/// Use a specific program with no arguments.
Program(String),
/// Use a specific program with arguments.
WithArguments {
/// The program to run.
program: String,
/// The arguments to pass to the program.
args: Vec<String>,
/// An optional string to override the title of the terminal tab
title_override: Option<SharedString>,
},
}
pub fn shell_to_proto(shell: Shell) -> proto::Shell {
let shell_type = match shell {
Shell::System => proto::shell::ShellType::System(proto::System {}),
Shell::Program(program) => proto::shell::ShellType::Program(program),
Shell::WithArguments {
program,
args,
title_override: _,
} => proto::shell::ShellType::WithArguments(proto::shell::WithArguments { program, args }),
};
proto::Shell {
shell_type: Some(shell_type),
impl Shell {
pub fn program(&self) -> String {
match self {
Shell::Program(program) => program.clone(),
Shell::WithArguments { program, .. } => program.clone(),
Shell::System => get_system_shell(),
}
}
pub fn program_and_args(&self) -> (String, &[String]) {
match self {
Shell::Program(program) => (program.clone(), &[]),
Shell::WithArguments { program, args, .. } => (program.clone(), args),
Shell::System => (get_system_shell(), &[]),
}
}
pub fn shell_kind(&self, is_windows: bool) -> ShellKind {
match self {
Shell::Program(program) => ShellKind::new(program, is_windows),
Shell::WithArguments { program, .. } => ShellKind::new(program, is_windows),
Shell::System => ShellKind::system(),
}
}
pub fn from_proto(proto: proto::Shell) -> anyhow::Result<Self> {
let shell_type = proto.shell_type.context("invalid shell type")?;
let shell = match shell_type {
proto::shell::ShellType::System(_) => Self::System,
proto::shell::ShellType::Program(program) => Self::Program(program),
proto::shell::ShellType::WithArguments(program) => Self::WithArguments {
program: program.program,
args: program.args,
title_override: None,
},
};
Ok(shell)
}
pub fn to_proto(self) -> proto::Shell {
let shell_type = match self {
Shell::System => proto::shell::ShellType::System(proto::System {}),
Shell::Program(program) => proto::shell::ShellType::Program(program),
Shell::WithArguments {
program,
args,
title_override: _,
} => proto::shell::ShellType::WithArguments(proto::shell::WithArguments {
program,
args,
}),
};
proto::Shell {
shell_type: Some(shell_type),
}
}
}

View File

@@ -67,7 +67,7 @@ use thiserror::Error;
use gpui::{
App, AppContext as _, Bounds, ClipboardItem, Context, EventEmitter, Hsla, Keystroke, Modifiers,
MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Point, Rgba,
ScrollWheelEvent, Size, Task, TouchPhase, Window, actions, black, px,
ScrollWheelEvent, SharedString, Size, Task, TouchPhase, Window, actions, black, px,
};
use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
@@ -277,7 +277,7 @@ pub struct TerminalError {
pub directory: Option<PathBuf>,
pub program: Option<String>,
pub args: Option<Vec<String>>,
pub title_override: Option<String>,
pub title_override: Option<SharedString>,
pub source: std::io::Error,
}
@@ -446,14 +446,14 @@ impl TerminalBuilder {
struct ShellParams {
program: String,
args: Option<Vec<String>>,
title_override: Option<String>,
title_override: Option<SharedString>,
}
impl ShellParams {
fn new(
program: String,
args: Option<Vec<String>>,
title_override: Option<String>,
title_override: Option<SharedString>,
) -> Self {
log::info!("Using {program} as shell");
Self {
@@ -514,8 +514,10 @@ impl TerminalBuilder {
working_directory: working_directory.clone(),
drain_on_exit: true,
env: env.clone().into_iter().collect(),
// We do not want to escape arguments if we are using CMD as our shell.
// If we do we end up with too many quotes/escaped quotes for CMD to handle.
#[cfg(windows)]
escape_args: shell_kind.tty_escape_args(),
escape_args: shell_kind != util::shell::ShellKind::Cmd,
}
};
@@ -821,7 +823,7 @@ pub struct Terminal {
pub last_content: TerminalContent,
pub selection_head: Option<AlacPoint>,
pub breadcrumb_text: String,
title_override: Option<String>,
title_override: Option<SharedString>,
scroll_px: Pixels,
next_link_id: usize,
selection_phase: SelectionPhase,

View File

@@ -66,7 +66,7 @@ fn settings_shell_to_task_shell(shell: settings::Shell) -> Shell {
} => Shell::WithArguments {
program,
args,
title_override: title_override.map(Into::into),
title_override,
},
}
}

View File

@@ -640,11 +640,6 @@ impl RenderOnce for ButtonLike {
.filter(|_| self.selected)
.unwrap_or(self.style);
let is_outlined = matches!(
self.style,
ButtonStyle::Outlined | ButtonStyle::OutlinedGhost
);
self.base
.h_flex()
.id(self.id.clone())
@@ -659,7 +654,13 @@ impl RenderOnce for ButtonLike {
.when_some(self.width, |this, width| {
this.w(width).justify_center().text_center()
})
.when(is_outlined, |this| this.border_1())
.when(
matches!(
self.style,
ButtonStyle::Outlined | ButtonStyle::OutlinedGhost
),
|this| this.border_1(),
)
.when_some(self.rounding, |this, rounding| {
this.when(rounding.top_left, |this| this.rounded_tl_sm())
.when(rounding.top_right, |this| this.rounded_tr_sm())
@@ -687,16 +688,13 @@ impl RenderOnce for ButtonLike {
let hovered_style = style.hovered(self.layer, cx);
let focus_color =
|refinement: StyleRefinement| refinement.bg(hovered_style.background);
this.cursor(self.cursor_style)
.hover(focus_color)
.map(|this| {
if is_outlined {
this.focus_visible(|s| {
s.border_color(cx.theme().colors().border_focused)
})
if matches!(self.style, ButtonStyle::Outlined) {
this.focus(|s| s.border_color(cx.theme().colors().border_focused))
} else {
this.focus_visible(focus_color)
this.focus(focus_color)
}
})
.active(|active| active.bg(style.active(cx).background))

View File

@@ -47,7 +47,6 @@ pub struct ContextMenuEntry {
toggle: Option<(IconPosition, bool)>,
label: SharedString,
icon: Option<IconName>,
custom_icon_path: Option<SharedString>,
icon_position: IconPosition,
icon_size: IconSize,
icon_color: Option<Color>,
@@ -67,7 +66,6 @@ impl ContextMenuEntry {
toggle: None,
label: label.into(),
icon: None,
custom_icon_path: None,
icon_position: IconPosition::Start,
icon_size: IconSize::Small,
icon_color: None,
@@ -92,12 +90,6 @@ impl ContextMenuEntry {
self
}
pub fn custom_icon_path(mut self, path: impl Into<SharedString>) -> Self {
self.custom_icon_path = Some(path.into());
self.icon = None; // Clear IconName if custom path is set
self
}
pub fn icon_position(mut self, position: IconPosition) -> Self {
self.icon_position = position;
self
@@ -395,7 +387,6 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -424,7 +415,6 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -453,7 +443,6 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -481,7 +470,6 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
custom_icon_path: None,
icon_position: position,
icon_size: IconSize::Small,
icon_color: None,
@@ -540,7 +528,6 @@ impl ContextMenu {
window.dispatch_action(action.boxed_clone(), cx);
}),
icon: None,
custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -571,7 +558,6 @@ impl ContextMenu {
window.dispatch_action(action.boxed_clone(), cx);
}),
icon: None,
custom_icon_path: None,
icon_size: IconSize::Small,
icon_position: IconPosition::End,
icon_color: None,
@@ -592,7 +578,6 @@ impl ContextMenu {
action: Some(action.boxed_clone()),
handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)),
icon: Some(IconName::ArrowUpRight),
custom_icon_path: None,
icon_size: IconSize::XSmall,
icon_position: IconPosition::End,
icon_color: None,
@@ -912,7 +897,6 @@ impl ContextMenu {
label,
handler,
icon,
custom_icon_path,
icon_position,
icon_size,
icon_color,
@@ -943,29 +927,7 @@ impl ContextMenu {
Color::Default
};
let label_element = if let Some(custom_path) = custom_icon_path {
h_flex()
.gap_1p5()
.when(
*icon_position == IconPosition::Start && toggle.is_none(),
|flex| {
flex.child(
Icon::from_path(custom_path.clone())
.size(*icon_size)
.color(icon_color),
)
},
)
.child(Label::new(label.clone()).color(label_color).truncate())
.when(*icon_position == IconPosition::End, |flex| {
flex.child(
Icon::from_path(custom_path.clone())
.size(*icon_size)
.color(icon_color),
)
})
.into_any_element()
} else if let Some(icon_name) = icon {
let label_element = if let Some(icon_name) = icon {
h_flex()
.gap_1p5()
.when(

View File

@@ -514,7 +514,7 @@ impl RenderOnce for Switch {
self.tab_index.filter(|_| !self.disabled),
|this, tab_index| {
this.tab_index(tab_index)
.focus_visible(|mut style| {
.focus(|mut style| {
style.border_color = Some(cx.theme().colors().border_focused);
style
})

View File

@@ -159,7 +159,7 @@ impl RenderOnce for TreeViewItem {
.rounded_sm()
.border_1()
.border_color(transparent_border)
.focus_visible(|s| s.border_color(focused_border))
.focus(|s| s.border_color(focused_border))
.when(self.selected, |this| {
this.border_color(selected_border).bg(selected_bg)
})

View File

@@ -338,7 +338,7 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
.border_color(border_color)
.bg(bg_color)
.hover(|s| s.bg(hover_bg_color))
.focus_visible(|s| s.border_color(focus_border_color).bg(hover_bg_color))
.focus(|s| s.border_color(focus_border_color).bg(hover_bg_color))
.child(Icon::new(icon).size(IconSize::Small))
};
@@ -369,6 +369,7 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
let new_value = value.saturating_sub(step);
let new_value = if new_value < min { min } else { new_value };
on_change(&new_value, window, cx);
window.focus_prev();
}
};

View File

@@ -15,7 +15,7 @@ use std::{
sync::LazyLock,
};
use crate::{rel_path::RelPath, shell::ShellKind};
use crate::rel_path::RelPath;
static HOME_DIR: OnceLock<PathBuf> = OnceLock::new();
@@ -84,7 +84,9 @@ pub trait PathExt {
fn multiple_extensions(&self) -> Option<String>;
/// Try to make a shell-safe representation of the path.
fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result<String>;
///
/// For Unix, the path is escaped to be safe for POSIX shells
fn try_shell_safe(&self) -> anyhow::Result<String>;
}
impl<T: AsRef<Path>> PathExt for T {
@@ -162,16 +164,24 @@ impl<T: AsRef<Path>> PathExt for T {
Some(parts.into_iter().join("."))
}
fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result<String> {
let path_str = self
.as_ref()
.to_str()
.with_context(|| "Path contains invalid UTF-8")?;
shell_kind
.try_quote(path_str)
.as_deref()
.map(ToOwned::to_owned)
.context("Failed to quote path")
fn try_shell_safe(&self) -> anyhow::Result<String> {
#[cfg(target_os = "windows")]
{
Ok(self.as_ref().to_string_lossy().to_string())
}
#[cfg(not(target_os = "windows"))]
{
let path_str = self
.as_ref()
.to_str()
.with_context(|| "Path contains invalid UTF-8")?;
// As of writing, this can only be fail if the path contains a null byte, which shouldn't be possible
// but shlex has annotated the error as #[non_exhaustive] so we can't make it a compile error if other
// errors are introduced in the future :(
Ok(shlex::try_quote(path_str)?.into_owned())
}
}
}

View File

@@ -1,53 +1,6 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, fmt, path::Path, sync::LazyLock};
/// Shell configuration to open the terminal with.
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)]
#[serde(rename_all = "snake_case")]
pub enum Shell {
/// Use the system's default terminal configuration in /etc/passwd
#[default]
System,
/// Use a specific program with no arguments.
Program(String),
/// Use a specific program with arguments.
WithArguments {
/// The program to run.
program: String,
/// The arguments to pass to the program.
args: Vec<String>,
/// An optional string to override the title of the terminal tab
title_override: Option<String>,
},
}
impl Shell {
pub fn program(&self) -> String {
match self {
Shell::Program(program) => program.clone(),
Shell::WithArguments { program, .. } => program.clone(),
Shell::System => get_system_shell(),
}
}
pub fn program_and_args(&self) -> (String, &[String]) {
match self {
Shell::Program(program) => (program.clone(), &[]),
Shell::WithArguments { program, args, .. } => (program.clone(), args),
Shell::System => (get_system_shell(), &[]),
}
}
pub fn shell_kind(&self, is_windows: bool) -> ShellKind {
match self {
Shell::Program(program) => ShellKind::new(program, is_windows),
Shell::WithArguments { program, .. } => ShellKind::new(program, is_windows),
Shell::System => ShellKind::system(),
}
}
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum ShellKind {
#[default]
@@ -232,20 +185,32 @@ impl ShellKind {
.unwrap_or_else(|| program.as_os_str())
.to_string_lossy();
match &*program {
"powershell" | "pwsh" => ShellKind::PowerShell,
"cmd" => ShellKind::Cmd,
"nu" => ShellKind::Nushell,
"fish" => ShellKind::Fish,
"csh" => ShellKind::Csh,
"tcsh" => ShellKind::Tcsh,
"rc" => ShellKind::Rc,
"xonsh" => ShellKind::Xonsh,
"sh" | "bash" | "zsh" => ShellKind::Posix,
_ if is_windows => ShellKind::PowerShell,
// Some other shell detected, the user might install and use a
// unix-like shell.
_ => ShellKind::Posix,
if program == "powershell" || program == "pwsh" {
ShellKind::PowerShell
} else if program == "cmd" {
ShellKind::Cmd
} else if program == "nu" {
ShellKind::Nushell
} else if program == "fish" {
ShellKind::Fish
} else if program == "csh" {
ShellKind::Csh
} else if program == "tcsh" {
ShellKind::Tcsh
} else if program == "rc" {
ShellKind::Rc
} else if program == "xonsh" {
ShellKind::Xonsh
} else if program == "sh" || program == "bash" {
ShellKind::Posix
} else {
if is_windows {
ShellKind::PowerShell
} else {
// Some other shell detected, the user might install and use a
// unix-like shell.
ShellKind::Posix
}
}
}
@@ -398,27 +363,14 @@ impl ShellKind {
match self {
ShellKind::PowerShell => Some('&'),
ShellKind::Nushell => Some('^'),
ShellKind::Posix
| ShellKind::Csh
| ShellKind::Tcsh
| ShellKind::Rc
| ShellKind::Fish
| ShellKind::Cmd
| ShellKind::Xonsh => None,
_ => None,
}
}
pub const fn sequential_commands_separator(&self) -> char {
match self {
ShellKind::Cmd => '&',
ShellKind::Posix
| ShellKind::Csh
| ShellKind::Tcsh
| ShellKind::Rc
| ShellKind::Fish
| ShellKind::PowerShell
| ShellKind::Nushell
| ShellKind::Xonsh => ';',
_ => ';',
}
}
@@ -426,103 +378,29 @@ impl ShellKind {
shlex::try_quote(arg).ok().map(|arg| match self {
// If we are running in PowerShell, we want to take extra care when escaping strings.
// In particular, we want to escape strings with a backtick (`) rather than a backslash (\).
ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"").replace("\\\\", "\\")),
ShellKind::Cmd => Cow::Owned(arg.replace("\\\\", "\\")),
ShellKind::Posix
| ShellKind::Csh
| ShellKind::Tcsh
| ShellKind::Rc
| ShellKind::Fish
| ShellKind::Nushell
| ShellKind::Xonsh => arg,
// TODO double escaping backslashes is not necessary in PowerShell and probably CMD
ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"")),
_ => arg,
})
}
pub fn split(&self, input: &str) -> Option<Vec<String>> {
shlex::split(input)
}
pub const fn activate_keyword(&self) -> &'static str {
match self {
ShellKind::Cmd => "",
ShellKind::Nushell => "overlay use",
ShellKind::PowerShell => ".",
ShellKind::Fish
| ShellKind::Csh
| ShellKind::Tcsh
| ShellKind::Posix
| ShellKind::Rc
| ShellKind::Xonsh => "source",
ShellKind::Fish => "source",
ShellKind::Csh => "source",
ShellKind::Tcsh => "source",
ShellKind::Posix | ShellKind::Rc => "source",
ShellKind::Xonsh => "source",
}
}
pub const fn clear_screen_command(&self) -> &'static str {
match self {
ShellKind::Cmd => "cls",
ShellKind::Posix
| ShellKind::Csh
| ShellKind::Tcsh
| ShellKind::Rc
| ShellKind::Fish
| ShellKind::PowerShell
| ShellKind::Nushell
| ShellKind::Xonsh => "clear",
}
}
#[cfg(windows)]
/// We do not want to escape arguments if we are using CMD as our shell.
/// If we do we end up with too many quotes/escaped quotes for CMD to handle.
pub const fn tty_escape_args(&self) -> bool {
match self {
ShellKind::Cmd => false,
ShellKind::Posix
| ShellKind::Csh
| ShellKind::Tcsh
| ShellKind::Rc
| ShellKind::Fish
| ShellKind::PowerShell
| ShellKind::Nushell
| ShellKind::Xonsh => true,
_ => "clear",
}
}
}
#[cfg(test)]
mod tests {
use super::*;
// Examples
// WSL
// wsl.exe --distribution NixOS --cd /home/user -- /usr/bin/zsh -c "echo hello"
// wsl.exe --distribution NixOS --cd /home/user -- /usr/bin/zsh -c "\"echo hello\"" | grep hello"
// wsl.exe --distribution NixOS --cd ~ env RUST_LOG=info,remote=debug .zed_wsl_server/zed-remote-server-dev-build proxy --identifier dev-workspace-53
// PowerShell from Nushell
// nu -c overlay use "C:\Users\kubko\dev\python\39007\tests\.venv\Scripts\activate.nu"; ^"C:\Program Files\PowerShell\7\pwsh.exe" -C "C:\Users\kubko\dev\python\39007\tests\.venv\Scripts\python.exe -m pytest \"test_foo.py::test_foo\""
// PowerShell from CMD
// cmd /C \" \"C:\\\\Users\\\\kubko\\\\dev\\\\python\\\\39007\\\\tests\\\\.venv\\\\Scripts\\\\activate.bat\"& \"C:\\\\Program Files\\\\PowerShell\\\\7\\\\pwsh.exe\" -C \"C:\\\\Users\\\\kubko\\\\dev\\\\python\\\\39007\\\\tests\\\\.venv\\\\Scripts\\\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"\"
#[test]
fn test_try_quote_powershell() {
let shell_kind = ShellKind::PowerShell;
assert_eq!(
shell_kind
.try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"")
.unwrap()
.into_owned(),
"\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest `\"test_foo.py::test_foo`\"\"".to_string()
);
}
#[test]
fn test_try_quote_cmd() {
let shell_kind = ShellKind::Cmd;
assert_eq!(
shell_kind
.try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"")
.unwrap()
.into_owned(),
"\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string()
);
}
}

View File

@@ -35,8 +35,8 @@ async fn capture_unix(
use std::os::unix::process::CommandExt;
use std::process::Stdio;
let zed_path = super::get_shell_safe_zed_path()?;
let shell_kind = ShellKind::new(shell_path, false);
let zed_path = super::get_shell_safe_zed_path(shell_kind)?;
let mut command_string = String::new();
let mut command = std::process::Command::new(shell_path);

View File

@@ -9,7 +9,6 @@ pub mod rel_path;
pub mod schemars;
pub mod serde;
pub mod shell;
pub mod shell_builder;
pub mod shell_env;
pub mod size;
#[cfg(any(test, feature = "test-support"))]
@@ -296,12 +295,12 @@ fn load_shell_from_passwd() -> Result<()> {
}
/// Returns a shell escaped path for the current zed executable
pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result<String> {
pub fn get_shell_safe_zed_path() -> anyhow::Result<String> {
let zed_path =
std::env::current_exe().context("Failed to determine current zed executable path.")?;
zed_path
.try_shell_safe(shell_kind)
.try_shell_safe()
.context("Failed to shell-escape Zed executable path.")
}

View File

@@ -165,6 +165,7 @@ zeta.workspace = true
zeta2.workspace = true
zlog.workspace = true
zlog_settings.workspace = true
gh-workflow = "0.8.0"
[target.'cfg(target_os = "windows")'.dependencies]
windows.workspace = true

View File

@@ -853,13 +853,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
// languages.$(language).tab_size
// [ languages $(language) tab_size]
workspace::with_active_or_new_workspace(cx, |_workspace, window, cx| {
match setting_path {
None => window.dispatch_action(Box::new(zed_actions::OpenSettings), cx),
Some(setting_path) => window.dispatch_action(
Box::new(zed_actions::OpenSettingsAt { path: setting_path }),
cx,
),
}
window.dispatch_action(
Box::new(zed_actions::OpenSettingsAt { path: setting_path }),
cx,
);
});
}
}

View File

@@ -18,6 +18,7 @@ use breadcrumbs::Breadcrumbs;
use client::zed_urls;
use collections::VecDeque;
use debugger_ui::debugger_panel::DebugPanel;
use editor::ProposedChangesEditorToolbar;
use editor::{Editor, MultiBuffer};
use extension_host::ExtensionStore;
use feature_flags::{FeatureFlagAppExt, PanicFeatureFlag};
@@ -870,24 +871,6 @@ fn register_actions(
}
}
})
.register_action({
let fs = app_state.fs.clone();
move |_, action: &zed_actions::ResetAllZoom, _window, cx| {
if action.persist {
update_settings_file(fs.clone(), cx, move |settings, _| {
settings.theme.ui_font_size = None;
settings.theme.buffer_font_size = None;
settings.theme.agent_ui_font_size = None;
settings.theme.agent_buffer_font_size = None;
});
} else {
theme::reset_ui_font_size(cx);
theme::reset_buffer_font_size(cx);
theme::reset_agent_ui_font_size(cx);
theme::reset_agent_buffer_font_size(cx);
}
}
})
.register_action(|_, _: &install_cli::RegisterZedScheme, window, cx| {
cx.spawn_in(window, async move |workspace, cx| {
install_cli::register_zed_scheme(cx).await?;
@@ -1052,6 +1035,8 @@ fn initialize_pane(
)
});
toolbar.add_item(buffer_search_bar.clone(), window, cx);
let proposed_change_bar = cx.new(|_| ProposedChangesEditorToolbar::new());
toolbar.add_item(proposed_change_bar, window, cx);
let quick_action_bar =
cx.new(|cx| QuickActionBar::new(buffer_search_bar, workspace, cx));
toolbar.add_item(quick_action_bar, window, cx);

View File

@@ -20,10 +20,6 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
"Reset Zoom",
zed_actions::ResetBufferFontSize { persist: false },
),
MenuItem::action(
"Reset All Zoom",
zed_actions::ResetAllZoom { persist: false },
),
MenuItem::separator(),
MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock),
MenuItem::action("Toggle Right Dock", workspace::ToggleRightDock),

View File

@@ -47,10 +47,7 @@ pub enum OpenRequestKind {
AgentPanel,
DockMenuAction { index: usize },
BuiltinJsonSchema { schema_path: String },
Setting {
// None just opens settings without navigating to a specific path
setting_path: Option<String> ,
},
Setting { setting_path: String },
}
impl OpenRequest {
@@ -97,14 +94,9 @@ impl OpenRequest {
this.kind = Some(OpenRequestKind::BuiltinJsonSchema {
schema_path: schema_path.to_string(),
});
} else if url == "zed://settings" || url == "zed://settings/" {
} else if let Some(setting_path) = url.strip_prefix("zed://settings/") {
this.kind = Some(OpenRequestKind::Setting {
setting_path: None
});
}
else if let Some(setting_path) = url.strip_prefix("zed://settings/") {
this.kind = Some(OpenRequestKind::Setting {
setting_path: Some(setting_path.to_string()),
setting_path: setting_path.to_string(),
});
} else if url.starts_with("ssh://") {
this.parse_ssh_file_path(&url, cx)?

View File

@@ -70,7 +70,6 @@ pub enum ExtensionCategoryFilter {
Grammars,
LanguageServers,
ContextServers,
AgentServers,
SlashCommands,
IndexedDocsProviders,
Snippets,
@@ -154,15 +153,6 @@ pub struct ResetUiFontSize {
pub persist: bool,
}
/// Resets all zoom levels (UI and buffer font sizes, including in the agent panel) to their default values.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
#[serde(deny_unknown_fields)]
pub struct ResetAllZoom {
#[serde(default)]
pub persist: bool,
}
pub mod dev {
use gpui::actions;
@@ -320,8 +310,6 @@ pub mod agent {
/// Add the current selection as context for threads in the agent panel.
#[action(deprecated_aliases = ["assistant::QuoteSelection", "agent::QuoteSelection"])]
AddSelectionToThread,
/// Resets the agent panel zoom levels (agent UI and buffer font sizes).
ResetAgentZoom,
]
);
}

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
set -eu
set -euxo pipefail
if [[ $# -ne 1 ]]; then
echo "usage: $0 <MAX_SIZE_IN_GB>"
@@ -15,11 +15,11 @@ fi
max_size_gb=$1
current_size=$(du -s target | cut -f1)
current_size_gb=$(expr ${current_size} / 1024 / 1024)
current_size_gb=$(( current_size / 1024 / 1024 ))
echo "target directory size: ${current_size_gb}gb. max size: ${max_size_gb}gb"
if [[ ${current_size_gb} -gt ${max_size_gb} ]]; then
echo "clearing target directory"
rm -rf target
rm -rf target/*
fi

View File

@@ -16,3 +16,4 @@ clap = { workspace = true, features = ["derive"] }
toml.workspace = true
indoc.workspace = true
toml_edit.workspace = true
gh-workflow.workspace = true

View File

@@ -20,6 +20,7 @@ enum CliCommand {
PackageConformity(tasks::package_conformity::PackageConformityArgs),
/// Publishes GPUI and its dependencies to crates.io.
PublishGpui(tasks::publish_gpui::PublishGpuiArgs),
GenerateWorkflow(tasks::generate_workflow::GenerateWorkflowArgs),
}
fn main() -> Result<()> {
@@ -32,5 +33,6 @@ fn main() -> Result<()> {
tasks::package_conformity::run_package_conformity(args)
}
CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args),
CliCommand::GenerateWorkflow(args) => tasks::generate_workflow::run_generate_workflow(args),
}
}

View File

@@ -1,4 +1,5 @@
pub mod clippy;
pub mod generate_workflow;
pub mod licenses;
pub mod package_conformity;
pub mod publish_gpui;

View File

@@ -0,0 +1,41 @@
use anyhow::Result;
use clap::Parser;
use gh_workflow::*;
#[derive(Parser)]
pub struct GenerateWorkflowArgs {}
pub fn run_generate_workflow(_args: GenerateWorkflowArgs) -> Result<()> {
// Create the "Run tests" composite action workflow
let workflow = Workflow::default().name("Run tests").add_job(
"run_tests",
Job::default()
.add_step(Step::new("Install Rust").run("cargo install cargo-nextest --locked"))
.add_step(
Step::new("Install Node")
.uses(
"actions",
"setup-node",
"49933ea5288caeca8642d1e84afbd3f7d6820020",
)
.add_with(("node-version", "18")),
)
.add_step(
Step::new("Limit target directory size")
.run("script/clear-target-dir-if-larger-than ${{ env.MAX_SIZE }}")
.env(("MAX_SIZE", "${{ runner.os == 'macOS' && 300 || 100 }}")),
)
.add_step(Step::new("Run tests").run(
"cargo nextest run --workspace --no-fail-fast --failure-output immediate-final",
)),
);
// Generate and print the workflow YAML
let yaml = workflow
.to_string()
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
println!("{}", yaml);
Ok(())
}