Compare commits

..

21 Commits

Author SHA1 Message Date
Conrad Irwin
b5cc703c7c de-de-bug 2025-11-17 22:18:31 -07:00
Conrad Irwin
46aaa03ace tidy up 2025-11-17 22:17:36 -07:00
Conrad Irwin
f766fa2e9f Merge branch 'main' into feature/context-server-transports 2025-11-17 21:10:22 -07:00
Conrad Irwin
977666f8bf Simplify header handling 2025-11-17 21:10:17 -07:00
Conrad Irwin
5ca2645da6 Merge branch 'main' into feature/context-server-transports 2025-11-10 22:44:32 -07:00
Artur Shirokov
411669f763 Merge branch 'main' into feature/context-server-transports 2025-10-06 20:34:10 +01:00
Artur Shirokov
7211a672b6 Removed sse placeholder. 2025-10-05 18:41:38 +01:00
Artur Shirokov
6a9a891985 Bearer token support is done, tested with github mcp server. 2025-10-05 18:38:38 +01:00
Artur Shirokov
6526936f96 configure MCP modal now uses the same json editor for both local and remote. 2025-10-05 10:40:10 +01:00
Artur Shirokov
55ddcba3cb Now using gpui executor instead smol::spawn. 2025-09-30 18:58:18 +01:00
Artur Shirokov
4e56e1b8b7 First working version. 2025-09-27 13:27:46 +01:00
Artur Shirokov
4b895e71f7 Added more detailed loggin to client.rs. 2025-09-27 13:27:46 +01:00
Artur Shirokov
06b18ac607 Added proper headers. 2025-09-27 13:27:46 +01:00
Artur Shirokov
3b2647f182 Right now it at least shows connected but isn't recieving the tools. 2025-09-27 13:27:46 +01:00
Artur Shirokov
f9d1cc8735 Improving the logging. 2025-09-27 13:27:46 +01:00
Artur Shirokov
1e9689b0b2 Added agent_ui support for remote server, ignore the fact that the actual button is off a bit, ill fix tat later. 2025-09-27 13:27:46 +01:00
Artur Shirokov
7523fb0442 Removed the custom modal, checking if the rest is working. 2025-09-27 13:27:46 +01:00
Artur Shirokov
551ef4b6f3 Solving the errors from the new code. 2025-09-27 13:27:46 +01:00
Artur Shirokov
c56ead48cb wip 2025-09-27 13:27:46 +01:00
Artur Shirokov
08c594966f feat(context_server): Add HTTP/SSE transports and settings
This commit introduces HTTP and SSE transport implementations for the
`context_server` crate, allowing Zed to communicate with remote context
providers.

The new transports are built on the existing `http_client` and `gpui`
executor, adhering to the project's architectural patterns.

To support configuration of these new transports, the settings system
has been updated:

- A `Remote` variant has been added to the `ContextServerSettings`
  enum, allowing users to configure remote servers in `settings.json`
  with a URL.
- The project logic has been updated to initialize remote context
  servers using the new `ContextServer::from_url` constructor.
- The agent configuration UI now includes an "Add Remote Server"
  option and a dedicated modal for adding and editing remote server
  configurations.

Unit tests have been added for the new transports and for the logic
that handles remote server configurations.
2025-09-27 13:27:37 +01:00
Artur Shirokov
59a9ec6fb3 feat(context_server): Add HTTP and SSE transport implementations
This commit adds HTTP and SSE transport implementations to the `context_server` crate, allowing it to communicate with remote context providers over these protocols.

The new transports are:
- `HttpTransport`: A simple request/response transport over HTTP.
- `SseTransport`: A transport for receiving Server-Sent Events (SSE).

Both transports are built on top of the existing `http_client` crate and use `gpui::spawn` to perform network operations on a background thread, as is the pattern in the rest of the Zed application. This avoids introducing a new Tokio runtime and adheres to the project's architecture.

A `build_transport` function has been added to construct the appropriate transport based on the URL scheme.

Unit tests using `FakeHttpClient` have been added for both transports to ensure their correctness.
2025-09-27 13:27:22 +01:00
70 changed files with 1291 additions and 2053 deletions

View File

@@ -16,7 +16,9 @@ rustflags = ["-D", "warnings"]
debug = "limited"
# Use Mold on Linux, because it's faster than GNU ld and LLD.
# We dont use wild in CI as its not production ready.
#
# We no longer set this in the default `config.toml` so that developers can opt in to Wild, which
# is faster than Mold, in their own ~/.cargo/config.toml.
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]

View File

@@ -8,14 +8,6 @@ perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests",
# Keep similar flags here to share some ccache
perf-compare = ["run", "--profile", "release-fast", "-p", "perf", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]", "--", "compare"]
# [target.x86_64-unknown-linux-gnu]
# linker = "clang"
# rustflags = ["-C", "link-arg=-fuse-ld=mold"]
[target.aarch64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
[target.'cfg(target_os = "windows")']
rustflags = [
"--cfg",

View File

@@ -26,4 +26,3 @@ jobs:
ascending: true
enable-statistics: true
stale-issue-label: "stale"
exempt-issue-labels: "never stale"

49
Cargo.lock generated
View File

@@ -2617,23 +2617,26 @@ dependencies = [
[[package]]
name = "calloop"
version = "0.14.3"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b99da2f8558ca23c71f4fd15dc57c906239752dd27ff3c00a1d56b685b7cbfec"
dependencies = [
"bitflags 2.9.4",
"log",
"polling",
"rustix 1.1.2",
"rustix 0.38.44",
"slab",
"tracing",
"thiserror 1.0.69",
]
[[package]]
name = "calloop-wayland-source"
version = "0.4.1"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "138efcf0940a02ebf0cc8d1eff41a1682a46b431630f4c52450d6265876021fa"
checksum = "95a66a987056935f7efce4ab5668920b5d0dac4a7c99991a67395f13702ddd20"
dependencies = [
"calloop",
"rustix 1.1.2",
"rustix 0.38.44",
"wayland-backend",
"wayland-client",
]
@@ -3687,6 +3690,7 @@ dependencies = [
"collections",
"futures 0.3.31",
"gpui",
"http_client",
"log",
"net",
"parking_lot",
@@ -5311,7 +5315,6 @@ dependencies = [
"serde_json",
"settings",
"supermaven",
"sweep_ai",
"telemetry",
"theme",
"ui",
@@ -10027,7 +10030,6 @@ name = "miniprofiler_ui"
version = "0.1.0"
dependencies = [
"gpui",
"log",
"serde_json",
"smol",
"util",
@@ -16589,33 +16591,6 @@ dependencies = [
"zeno",
]
[[package]]
name = "sweep_ai"
version = "0.1.0"
dependencies = [
"anyhow",
"arrayvec",
"brotli",
"client",
"collections",
"edit_prediction",
"feature_flags",
"futures 0.3.31",
"gpui",
"http_client",
"indoc",
"language",
"project",
"release_channel",
"reqwest_client",
"serde",
"serde_json",
"tree-sitter-rust",
"util",
"workspace",
"zlog",
]
[[package]]
name = "symphonia"
version = "0.5.5"
@@ -21243,6 +21218,7 @@ dependencies = [
"audio",
"auto_update",
"auto_update_ui",
"backtrace",
"bincode 1.3.3",
"breadcrumbs",
"call",
@@ -21307,6 +21283,7 @@ dependencies = [
"mimalloc",
"miniprofiler_ui",
"nc",
"nix 0.29.0",
"node_runtime",
"notifications",
"onboarding",
@@ -21342,13 +21319,13 @@ dependencies = [
"snippets_ui",
"supermaven",
"svg_preview",
"sweep_ai",
"sysinfo 0.37.2",
"system_specs",
"tab_switcher",
"task",
"tasks_ui",
"telemetry",
"telemetry_events",
"terminal_view",
"theme",
"theme_extension",

View File

@@ -165,7 +165,6 @@ members = [
"crates/sum_tree",
"crates/supermaven",
"crates/supermaven_api",
"crates/sweep_ai",
"crates/codestral",
"crates/svg_preview",
"crates/system_specs",
@@ -399,7 +398,6 @@ streaming_diff = { path = "crates/streaming_diff" }
sum_tree = { path = "crates/sum_tree" }
supermaven = { path = "crates/supermaven" }
supermaven_api = { path = "crates/supermaven_api" }
sweep_ai = { path = "crates/sweep_ai" }
codestral = { path = "crates/codestral" }
system_specs = { path = "crates/system_specs" }
tab_switcher = { path = "crates/tab_switcher" }
@@ -480,7 +478,6 @@ bitflags = "2.6.0"
blade-graphics = { version = "0.7.0" }
blade-macros = { version = "0.3.0" }
blade-util = { version = "0.3.0" }
brotli = "8.0.2"
bytes = "1.0"
cargo_metadata = "0.19"
cargo_toml = "0.21"
@@ -784,7 +781,6 @@ features = [
notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
calloop = { path = "/home/davidsk/tmp/calloop" }
[profile.dev]
split-debuginfo = "unpacked"
@@ -861,7 +857,7 @@ ui_input = { codegen-units = 1 }
zed_actions = { codegen-units = 1 }
[profile.release]
debug = "full"
debug = "limited"
lto = "thin"
codegen-units = 1

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 9.3 KiB

View File

@@ -313,7 +313,7 @@
"use_key_equivalents": true,
"bindings": {
"cmd-n": "agent::NewTextThread",
"cmd-alt-n": "agent::NewExternalAgentThread"
"cmd-alt-t": "agent::NewThread"
}
},
{

View File

@@ -247,37 +247,58 @@ impl AgentConnection for AcpConnection {
let default_mode = self.default_mode.clone();
let cwd = cwd.to_path_buf();
let context_server_store = project.read(cx).context_server_store().read(cx);
let mcp_servers = if project.read(cx).is_local() {
context_server_store
.configured_server_ids()
.iter()
.filter_map(|id| {
let configuration = context_server_store.configuration_for_server(id)?;
let command = configuration.command();
Some(acp::McpServer::Stdio {
name: id.0.to_string(),
command: command.path.clone(),
args: command.args.clone(),
env: if let Some(env) = command.env.as_ref() {
env.iter()
.map(|(name, value)| acp::EnvVariable {
name: name.clone(),
value: value.clone(),
meta: None,
})
.collect()
} else {
vec![]
},
let mcp_servers =
if project.read(cx).is_local() {
context_server_store
.configured_server_ids()
.iter()
.filter_map(|id| {
let configuration = context_server_store.configuration_for_server(id)?;
match &*configuration {
project::context_server_store::ContextServerConfiguration::Custom {
command,
..
}
| project::context_server_store::ContextServerConfiguration::Extension {
command,
..
} => Some(acp::McpServer::Stdio {
name: id.0.to_string(),
command: command.path.clone(),
args: command.args.clone(),
env: if let Some(env) = command.env.as_ref() {
env.iter()
.map(|(name, value)| acp::EnvVariable {
name: name.clone(),
value: value.clone(),
meta: None,
})
.collect()
} else {
vec![]
},
}),
project::context_server_store::ContextServerConfiguration::Http {
url,
headers,
} => Some(acp::McpServer::Http {
name: id.0.to_string(),
url: url.to_string(),
headers: headers.iter().map(|(name, value)| acp::HttpHeader {
name: name.clone(),
value: value.clone(),
meta: None,
}).collect(),
}),
}
})
})
.collect()
} else {
// In SSH projects, the external agent is running on the remote
// machine, and currently we only run MCP servers on the local
// machine. So don't pass any MCP servers to the agent in that case.
Vec::new()
};
.collect()
} else {
// In SSH projects, the external agent is running on the remote
// machine, and currently we only run MCP servers on the local
// machine. So don't pass any MCP servers to the agent in that case.
Vec::new()
};
cx.spawn(async move |cx| {
let response = conn

View File

@@ -1,5 +1,5 @@
mod add_llm_provider_modal;
mod configure_context_server_modal;
pub mod configure_context_server_modal;
mod configure_context_server_tools_modal;
mod manage_profiles_modal;
mod tool_picker;
@@ -46,10 +46,12 @@ pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal;
pub(crate) use manage_profiles_modal::ManageProfilesModal;
use crate::{
AddContextServer,
agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
use crate::agent_configuration::add_llm_provider_modal::{
AddLlmProviderModal, LlmCompatibleProvider,
};
use gpui::actions;
actions!(agent_ui, [AddRemoteContextServer]);
pub struct AgentConfiguration {
fs: Arc<dyn Fs>,
@@ -553,7 +555,14 @@ impl AgentConfiguration {
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
menu.entry("Add Custom Server", None, {
|window, cx| window.dispatch_action(AddContextServer.boxed_clone(), cx)
|window, cx| {
window.dispatch_action(crate::AddContextServer.boxed_clone(), cx)
}
})
.entry("Add Remote Server", None, {
|window, cx| {
window.dispatch_action(AddRemoteContextServer.boxed_clone(), cx)
}
})
.entry("Install from Extensions", None, {
|window, cx| {
@@ -651,7 +660,7 @@ impl AgentConfiguration {
let is_running = matches!(server_status, ContextServerStatus::Running);
let item_id = SharedString::from(context_server_id.0.clone());
// Servers without a configuration can only be provided by extensions.
let provided_by_extension = server_configuration.is_none_or(|config| {
let provided_by_extension = server_configuration.as_ref().is_none_or(|config| {
matches!(
config.as_ref(),
ContextServerConfiguration::Extension { .. }
@@ -707,7 +716,10 @@ impl AgentConfiguration {
"Server is stopped.",
),
};
let is_remote = server_configuration
.as_ref()
.map(|config| matches!(config.as_ref(), ContextServerConfiguration::Http { .. }))
.unwrap_or(false);
let context_server_configuration_menu = PopoverMenu::new("context-server-config-menu")
.trigger_with_tooltip(
IconButton::new("context-server-config-menu", IconName::Settings)
@@ -729,15 +741,27 @@ impl AgentConfiguration {
let context_server_id = context_server_id.clone();
let language_registry = language_registry.clone();
let workspace = workspace.clone();
let is_remote = is_remote;
move |window, cx| {
ConfigureContextServerModal::show_modal_for_existing_server(
context_server_id.clone(),
language_registry.clone(),
workspace.clone(),
window,
cx,
)
.detach_and_log_err(cx);
if is_remote {
crate::agent_configuration::configure_context_server_modal::ConfigureContextServerModal::show_modal_for_existing_server(
context_server_id.clone(),
language_registry.clone(),
workspace.clone(),
window,
cx,
)
.detach();
} else {
ConfigureContextServerModal::show_modal_for_existing_server(
context_server_id.clone(),
language_registry.clone(),
workspace.clone(),
window,
cx,
)
.detach();
}
}
}).when(tool_count > 0, |this| this.entry("View Tools", None, {
let context_server_id = context_server_id.clone();

View File

@@ -4,6 +4,7 @@ use std::{
};
use anyhow::{Context as _, Result};
use collections::HashMap;
use context_server::{ContextServerCommand, ContextServerId};
use editor::{Editor, EditorElement, EditorStyle};
use gpui::{
@@ -20,6 +21,7 @@ use project::{
project_settings::{ContextServerSettings, ProjectSettings},
worktree_store::WorktreeStore,
};
use serde::Deserialize;
use settings::{Settings as _, update_settings_file};
use theme::ThemeSettings;
use ui::{
@@ -33,10 +35,16 @@ use crate::AddContextServer;
enum ConfigurationTarget {
New,
NewRemote,
Existing {
id: ContextServerId,
command: ContextServerCommand,
},
ExistingHttp {
id: ContextServerId,
url: String,
headers: HashMap<String, String>,
},
Extension {
id: ContextServerId,
repository_url: Option<SharedString>,
@@ -47,9 +55,11 @@ enum ConfigurationTarget {
enum ConfigurationSource {
New {
editor: Entity<Editor>,
is_remote: bool,
},
Existing {
editor: Entity<Editor>,
is_remote: bool,
},
Extension {
id: ContextServerId,
@@ -97,6 +107,11 @@ impl ConfigurationSource {
match target {
ConfigurationTarget::New => ConfigurationSource::New {
editor: create_editor(context_server_input(None), jsonc_language, window, cx),
is_remote: false,
},
ConfigurationTarget::NewRemote => ConfigurationSource::New {
editor: create_editor(context_server_http_input(None), jsonc_language, window, cx),
is_remote: true,
},
ConfigurationTarget::Existing { id, command } => ConfigurationSource::Existing {
editor: create_editor(
@@ -105,6 +120,20 @@ impl ConfigurationSource {
window,
cx,
),
is_remote: false,
},
ConfigurationTarget::ExistingHttp {
id,
url,
headers: auth,
} => ConfigurationSource::Existing {
editor: create_editor(
context_server_http_input(Some((id, url, auth))),
jsonc_language,
window,
cx,
),
is_remote: true,
},
ConfigurationTarget::Extension {
id,
@@ -141,16 +170,30 @@ impl ConfigurationSource {
fn output(&self, cx: &mut App) -> Result<(ContextServerId, ContextServerSettings)> {
match self {
ConfigurationSource::New { editor } | ConfigurationSource::Existing { editor } => {
parse_input(&editor.read(cx).text(cx)).map(|(id, command)| {
(
id,
ContextServerSettings::Custom {
enabled: true,
command,
},
)
})
ConfigurationSource::New { editor, is_remote }
| ConfigurationSource::Existing { editor, is_remote } => {
if *is_remote {
parse_http_input(&editor.read(cx).text(cx)).map(|(id, url, auth)| {
(
id,
ContextServerSettings::Http {
enabled: true,
url,
headers: auth,
},
)
})
} else {
parse_input(&editor.read(cx).text(cx)).map(|(id, command)| {
(
id,
ContextServerSettings::Custom {
enabled: true,
command,
},
)
})
}
}
ConfigurationSource::Extension {
id,
@@ -212,6 +255,66 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand)
)
}
fn context_server_http_input(
existing: Option<(ContextServerId, String, HashMap<String, String>)>,
) -> String {
let (name, url, headers) = match existing {
Some((id, url, headers)) => {
let header = if headers.is_empty() {
r#"// "Authorization": "Bearer <token>"#.to_string()
} else {
let json = serde_json::to_string_pretty(&headers).unwrap();
let mut lines = json.split("\n").collect::<Vec<_>>();
if lines.len() > 1 {
lines.remove(0);
lines.pop();
}
lines
.into_iter()
.map(|line| format!(" {}", line.to_string()))
.collect::<String>()
};
(id.0.to_string(), url, header)
}
None => (
"some-remote-server".to_string(),
"https://example.com/mcp".to_string(),
r#"// "Authorization": "Bearer <token>"#.to_string(),
),
};
format!(
r#"{{
/// The name of your remote MCP server
"{name}": {{
/// The URL of the remote MCP server
"url": "{url}"
"headers": {{
/// Any headers to send along
{headers}
}}
}}
}}"#
)
}
fn parse_http_input(text: &str) -> Result<(ContextServerId, String, HashMap<String, String>)> {
#[derive(Deserialize)]
struct Temp {
url: String,
#[serde(default)]
headers: HashMap<String, String>,
}
let value: HashMap<String, Temp> = serde_json_lenient::from_str(text)?;
if value.len() != 1 {
anyhow::bail!("Expected exactly one context server configuration");
}
let (key, value) = value.into_iter().next().unwrap();
Ok((ContextServerId(key.into()), value.url, value.headers))
}
fn resolve_context_server_extension(
id: ContextServerId,
worktree_store: Entity<WorktreeStore>,
@@ -257,6 +360,20 @@ pub struct ConfigureContextServerModal {
}
impl ConfigureContextServerModal {
pub fn new_remote_server(
project: Entity<project::Project>,
workspace: WeakEntity<Workspace>,
window: &mut Window,
cx: &mut App,
) -> Task<Result<()>> {
let target = ConfigurationTarget::NewRemote;
let language_registry = project.read(cx).languages().clone();
window.spawn(cx, async move |mut cx| {
Self::show_modal(target, language_registry, workspace, &mut cx).await
})
}
pub fn register(
workspace: &mut Workspace,
language_registry: Arc<LanguageRegistry>,
@@ -312,6 +429,15 @@ impl ConfigureContextServerModal {
id: server_id,
command,
}),
ContextServerSettings::Http {
enabled: _,
url,
headers,
} => Some(ConfigurationTarget::ExistingHttp {
id: server_id,
url,
headers,
}),
ContextServerSettings::Extension { .. } => {
match workspace
.update(cx, |workspace, cx| {
@@ -353,8 +479,9 @@ impl ConfigureContextServerModal {
state: State::Idle,
original_server_id: match &target {
ConfigurationTarget::Existing { id, .. } => Some(id.clone()),
ConfigurationTarget::ExistingHttp { id, .. } => Some(id.clone()),
ConfigurationTarget::Extension { id, .. } => Some(id.clone()),
ConfigurationTarget::New => None,
ConfigurationTarget::New | ConfigurationTarget::NewRemote => None,
},
source: ConfigurationSource::from_target(
target,
@@ -481,7 +608,7 @@ impl ModalView for ConfigureContextServerModal {}
impl Focusable for ConfigureContextServerModal {
fn focus_handle(&self, cx: &App) -> FocusHandle {
match &self.source {
ConfigurationSource::New { editor } => editor.focus_handle(cx),
ConfigurationSource::New { editor, .. } => editor.focus_handle(cx),
ConfigurationSource::Existing { editor, .. } => editor.focus_handle(cx),
ConfigurationSource::Extension { editor, .. } => editor
.as_ref()
@@ -527,9 +654,10 @@ impl ConfigureContextServerModal {
}
fn render_modal_content(&self, cx: &App) -> AnyElement {
// All variants now use single editor approach
let editor = match &self.source {
ConfigurationSource::New { editor } => editor,
ConfigurationSource::Existing { editor } => editor,
ConfigurationSource::New { editor, .. } => editor,
ConfigurationSource::Existing { editor, .. } => editor,
ConfigurationSource::Extension { editor, .. } => {
let Some(editor) = editor else {
return div().into_any_element();

View File

@@ -1892,9 +1892,6 @@ impl AgentPanel {
.anchor(Corner::TopRight)
.with_handle(self.new_thread_menu_handle.clone())
.menu({
let selected_agent = self.selected_agent.clone();
let is_agent_selected = move |agent_type: AgentType| selected_agent == agent_type;
let workspace = self.workspace.clone();
let is_via_collab = workspace
.update(cx, |workspace, cx| {
@@ -1932,9 +1929,7 @@ impl AgentPanel {
})
.item(
ContextMenuEntry::new("Zed Agent")
.when(is_agent_selected(AgentType::NativeAgent) | is_agent_selected(AgentType::TextThread) , |this| {
this.action(Box::new(NewExternalAgentThread { agent: None }))
})
.action(NewThread.boxed_clone())
.icon(IconName::ZedAgent)
.icon_color(Color::Muted)
.handler({
@@ -1960,9 +1955,9 @@ impl AgentPanel {
)
.item(
ContextMenuEntry::new("Text Thread")
.action(NewTextThread.boxed_clone())
.icon(IconName::TextThread)
.icon_color(Color::Muted)
.action(NewTextThread.boxed_clone())
.handler({
let workspace = workspace.clone();
move |window, cx| {
@@ -1988,9 +1983,6 @@ impl AgentPanel {
.header("External Agents")
.item(
ContextMenuEntry::new("Claude Code")
.when(is_agent_selected(AgentType::ClaudeCode), |this| {
this.action(Box::new(NewExternalAgentThread { agent: None }))
})
.icon(IconName::AiClaude)
.disabled(is_via_collab)
.icon_color(Color::Muted)
@@ -2017,9 +2009,6 @@ impl AgentPanel {
)
.item(
ContextMenuEntry::new("Codex CLI")
.when(is_agent_selected(AgentType::Codex), |this| {
this.action(Box::new(NewExternalAgentThread { agent: None }))
})
.icon(IconName::AiOpenAi)
.disabled(is_via_collab)
.icon_color(Color::Muted)
@@ -2046,9 +2035,6 @@ impl AgentPanel {
)
.item(
ContextMenuEntry::new("Gemini CLI")
.when(is_agent_selected(AgentType::Gemini), |this| {
this.action(Box::new(NewExternalAgentThread { agent: None }))
})
.icon(IconName::AiGemini)
.icon_color(Color::Muted)
.disabled(is_via_collab)
@@ -2074,8 +2060,8 @@ impl AgentPanel {
}),
)
.map(|mut menu| {
let agent_server_store = agent_server_store.read(cx);
let agent_names = agent_server_store
let agent_server_store_read = agent_server_store.read(cx);
let agent_names = agent_server_store_read
.external_agents()
.filter(|name| {
name.0 != GEMINI_NAME
@@ -2084,38 +2070,21 @@ impl AgentPanel {
})
.cloned()
.collect::<Vec<_>>();
let custom_settings = cx
.global::<SettingsStore>()
.get::<AllAgentServersSettings>(None)
.custom
.clone();
for agent_name in agent_names {
let icon_path = agent_server_store.agent_icon(&agent_name);
let mut entry = ContextMenuEntry::new(agent_name.clone());
let command = custom_settings
.get(&agent_name.0)
.map(|settings| settings.command.clone())
.unwrap_or(placeholder_command());
let icon_path = agent_server_store_read.agent_icon(&agent_name);
let mut entry =
ContextMenuEntry::new(format!("{}", agent_name));
if let Some(icon_path) = icon_path {
entry = entry.custom_icon_svg(icon_path);
} else {
entry = entry.icon(IconName::Terminal);
}
entry = entry
.when(
is_agent_selected(AgentType::Custom {
name: agent_name.0.clone(),
command: command.clone(),
}),
|this| {
this.action(Box::new(NewExternalAgentThread { agent: None }))
},
)
.icon_color(Color::Muted)
.disabled(is_via_collab)
.handler({
@@ -2155,7 +2124,6 @@ impl AgentPanel {
}
}
});
menu = menu.item(entry);
}
@@ -2188,7 +2156,7 @@ impl AgentPanel {
.id("selected_agent_icon")
.when_some(selected_agent_custom_icon, |this, icon_path| {
let label = selected_agent_label.clone();
this.px_1()
this.px(DynamicSpacing::Base02.rems(cx))
.child(Icon::from_external_svg(icon_path).color(Color::Muted))
.tooltip(move |_window, cx| {
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
@@ -2197,7 +2165,7 @@ impl AgentPanel {
.when(!has_custom_icon, |this| {
this.when_some(self.selected_agent.icon(), |this, icon| {
let label = selected_agent_label.clone();
this.px_1()
this.px(DynamicSpacing::Base02.rems(cx))
.child(Icon::new(icon).color(Color::Muted))
.tooltip(move |_window, cx| {
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)

View File

@@ -45,7 +45,9 @@ use serde::{Deserialize, Serialize};
use settings::{LanguageModelSelection, Settings as _, SettingsStore};
use std::any::TypeId;
use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal};
use crate::agent_configuration::{
AddRemoteContextServer, ConfigureContextServerModal, ManageProfilesModal,
};
pub use crate::agent_panel::{AgentPanel, ConcreteAssistantPanelDelegate};
pub use crate::inline_assistant::InlineAssistant;
pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
@@ -274,6 +276,19 @@ pub fn init(
ConfigureContextServerModal::register(workspace, language_registry.clone(), window, cx)
})
.detach();
cx.observe_new(move |workspace: &mut workspace::Workspace, _window, _| {
workspace.register_action({
move |workspace, _: &AddRemoteContextServer, window, cx| {
crate::agent_configuration::configure_context_server_modal::ConfigureContextServerModal::new_remote_server(
workspace.project().clone(),
workspace.weak_handle(),
window,
cx,
).detach();
}
});
})
.detach();
cx.observe_new(ManageProfilesModal::register).detach();
// Update command palette filter based on AI settings
@@ -346,9 +361,7 @@ fn update_command_palette_filter(cx: &mut App) {
filter.show_namespace("supermaven");
filter.show_action_types(edit_prediction_actions.iter());
}
EditPredictionProvider::Zed
| EditPredictionProvider::Codestral
| EditPredictionProvider::Experimental(_) => {
EditPredictionProvider::Zed | EditPredictionProvider::Codestral => {
filter.show_namespace("edit_prediction");
filter.hide_namespace("copilot");
filter.hide_namespace("supermaven");

View File

@@ -10,8 +10,8 @@ use paths::remote_servers_dir;
use release_channel::{AppCommitSha, ReleaseChannel};
use serde::{Deserialize, Serialize};
use settings::{RegisterSetting, Settings, SettingsStore};
use smol::fs::File;
use smol::{fs, io::AsyncReadExt};
use smol::{fs::File, process::Command};
use std::mem;
use std::{
env::{
@@ -23,7 +23,6 @@ use std::{
sync::Arc,
time::Duration,
};
use util::command::new_smol_command;
use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
@@ -122,7 +121,7 @@ impl Drop for MacOsUnmounter<'_> {
let mount_path = mem::take(&mut self.mount_path);
self.background_executor
.spawn(async move {
let unmount_output = new_smol_command("hdiutil")
let unmount_output = Command::new("hdiutil")
.args(["detach", "-force"])
.arg(&mount_path)
.output()
@@ -800,7 +799,7 @@ async fn install_release_linux(
.await
.context("failed to create directory into which to extract update")?;
let output = new_smol_command("tar")
let output = Command::new("tar")
.arg("-xzf")
.arg(&downloaded_tar_gz)
.arg("-C")
@@ -835,7 +834,7 @@ async fn install_release_linux(
to = PathBuf::from(prefix);
}
let output = new_smol_command("rsync")
let output = Command::new("rsync")
.args(["-av", "--delete"])
.arg(&from)
.arg(&to)
@@ -867,7 +866,7 @@ async fn install_release_macos(
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
mounted_app_path.push("/");
let output = new_smol_command("hdiutil")
let output = Command::new("hdiutil")
.args(["attach", "-nobrowse"])
.arg(&downloaded_dmg)
.arg("-mountroot")
@@ -887,7 +886,7 @@ async fn install_release_macos(
background_executor: cx.background_executor(),
};
let output = new_smol_command("rsync")
let output = Command::new("rsync")
.args(["-av", "--delete"])
.arg(&mounted_app_path)
.arg(&running_app_path)
@@ -918,7 +917,7 @@ async fn cleanup_windows() -> Result<()> {
}
async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option<PathBuf>> {
let output = new_smol_command(downloaded_installer)
let output = Command::new(downloaded_installer)
.arg("/verysilent")
.arg("/update=true")
.arg("!desktopicon")

View File

@@ -293,11 +293,10 @@ impl Telemetry {
}
pub fn metrics_enabled(self: &Arc<Self>) -> bool {
self.state.lock().settings.metrics
}
pub fn diagnostics_enabled(self: &Arc<Self>) -> bool {
self.state.lock().settings.diagnostics
let state = self.state.lock();
let enabled = state.settings.metrics;
drop(state);
enabled
}
pub fn set_authenticated_user_info(

View File

@@ -12,7 +12,7 @@ workspace = true
path = "src/context_server.rs"
[features]
test-support = []
test-support = ["gpui/test-support"]
[dependencies]
anyhow.workspace = true
@@ -20,6 +20,7 @@ async-trait.workspace = true
collections.workspace = true
futures.workspace = true
gpui.workspace = true
http_client = { workspace = true, features = ["test-support"] }
log.workspace = true
net.workspace = true
parking_lot.workspace = true
@@ -32,3 +33,6 @@ smol.workspace = true
tempfile.workspace = true
url = { workspace = true, features = ["serde"] }
util.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }

View File

@@ -6,15 +6,19 @@ pub mod test;
pub mod transport;
pub mod types;
use collections::HashMap;
use std::path::Path;
use std::sync::Arc;
use std::{fmt::Display, path::PathBuf};
use anyhow::Result;
use client::Client;
use gpui::AsyncApp;
use gpui::{App, AsyncApp};
use parking_lot::RwLock;
pub use settings::ContextServerCommand;
use url::Url;
use crate::transport::HttpTransport;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ContextServerId(pub Arc<str>);
@@ -52,6 +56,25 @@ impl ContextServer {
}
}
pub fn http(
id: ContextServerId,
endpoint: &Url,
headers: HashMap<String, String>,
cx: &App,
) -> Result<Self> {
let http_client = cx.http_client();
let transport = match endpoint.scheme() {
"http" | "https" => {
log::info!("Using HTTP transport for {}", endpoint);
let transport = HttpTransport::new(http_client, endpoint.to_string(), headers, cx);
Arc::new(transport) as _
}
_ => anyhow::bail!("unsupported MCP url scheme {}", endpoint.scheme()),
};
Ok(Self::new(id, transport))
}
pub fn new(id: ContextServerId, transport: Arc<dyn crate::transport::Transport>) -> Self {
Self {
id,

View File

@@ -1,11 +1,12 @@
pub mod http;
mod stdio_transport;
use std::pin::Pin;
use anyhow::Result;
use async_trait::async_trait;
use futures::Stream;
use std::pin::Pin;
pub use http::*;
pub use stdio_transport::*;
#[async_trait]

View File

@@ -0,0 +1,271 @@
use anyhow::{Result, anyhow};
use async_trait::async_trait;
use collections::HashMap;
use futures::{Stream, StreamExt};
use gpui::{App, BackgroundExecutor};
use http_client::{AsyncBody, HttpClient, Request, Response, http::Method};
use parking_lot::Mutex as SyncMutex;
use smol::channel;
use std::{pin::Pin, sync::Arc};
use crate::transport::Transport;
// Constants from MCP spec
const HEADER_SESSION_ID: &str = "Mcp-Session-Id";
const EVENT_STREAM_MIME_TYPE: &str = "text/event-stream";
const JSON_MIME_TYPE: &str = "application/json";
/// HTTP Transport with session management and SSE support
pub struct HttpTransport {
http_client: Arc<dyn HttpClient>,
endpoint: String,
session_id: Arc<SyncMutex<Option<String>>>,
executor: BackgroundExecutor,
response_tx: channel::Sender<String>,
response_rx: channel::Receiver<String>,
error_tx: channel::Sender<String>,
error_rx: channel::Receiver<String>,
// Track if we've sent the initialize response
initialized: Arc<SyncMutex<bool>>,
// Authentication headers to include in requests
headers: HashMap<String, String>,
}
impl HttpTransport {
pub fn new(
http_client: Arc<dyn HttpClient>,
endpoint: String,
headers: HashMap<String, String>,
cx: &App,
) -> Self {
let (response_tx, response_rx) = channel::unbounded();
let (error_tx, error_rx) = channel::unbounded();
Self {
http_client,
executor: cx.background_executor().clone(),
endpoint,
session_id: Arc::new(SyncMutex::new(None)),
response_tx,
response_rx,
error_tx,
error_rx,
initialized: Arc::new(SyncMutex::new(false)),
headers,
}
}
/// Send a message and handle the response based on content type
async fn send_message(&self, message: String) -> Result<()> {
// Check if this is an initialize request
let is_initialize = message.contains("\"method\":\"initialize\"");
let is_notification =
!message.contains("\"id\":") || message.contains("notifications/initialized");
let mut request_builder = Request::builder()
.method(Method::POST)
.uri(&self.endpoint)
.header("Content-Type", JSON_MIME_TYPE)
.header(
"Accept",
format!("{}, {}", JSON_MIME_TYPE, EVENT_STREAM_MIME_TYPE),
);
for (key, value) in &self.headers {
request_builder = request_builder.header(key.as_str(), value.as_str());
}
// Add session ID if we have one (except for initialize)
if !is_initialize {
if let Some(ref session_id) = *self.session_id.lock() {
request_builder = request_builder.header(HEADER_SESSION_ID, session_id.as_str());
}
}
let request = request_builder.body(AsyncBody::from(message.into_bytes()))?;
let mut response = self.http_client.send(request).await?;
// Handle different response types based on status and content-type
match response.status() {
status if status.is_success() => {
// Check content type
let content_type = response
.headers()
.get("content-type")
.and_then(|v| v.to_str().ok());
// Extract session ID from response headers if present
if let Some(session_id) = response
.headers()
.get(HEADER_SESSION_ID)
.and_then(|v| v.to_str().ok())
{
*self.session_id.lock() = Some(session_id.to_string());
log::debug!("Session ID set: {}", session_id);
}
match content_type {
Some(ct) if ct.starts_with(JSON_MIME_TYPE) => {
// JSON response - read and forward immediately
let mut body = String::new();
futures::AsyncReadExt::read_to_string(response.body_mut(), &mut body)
.await?;
// Only send non-empty responses
if !body.is_empty() {
self.response_tx
.send(body)
.await
.map_err(|_| anyhow!("Failed to send JSON response"))?;
}
}
Some(ct) if ct.starts_with(EVENT_STREAM_MIME_TYPE) => {
// SSE stream - set up streaming
self.setup_sse_stream(response).await?;
// Mark as initialized after setting up the first SSE stream
if is_initialize {
*self.initialized.lock() = true;
}
}
_ => {
// For notifications, 202 Accepted with no content type is ok
if is_notification && status.as_u16() == 202 {
log::debug!("Notification accepted");
} else {
return Err(anyhow!("Unexpected content type: {:?}", content_type));
}
}
}
}
status if status.as_u16() == 202 => {
// Accepted - notification acknowledged, no response needed
log::debug!("Notification accepted");
}
_ => {
let mut error_body = String::new();
futures::AsyncReadExt::read_to_string(response.body_mut(), &mut error_body).await?;
self.error_tx
.send(format!("HTTP {}: {}", response.status(), error_body))
.await
.map_err(|_| anyhow!("Failed to send error"))?;
}
}
Ok(())
}
/// Set up SSE streaming from the response
async fn setup_sse_stream(&self, mut response: Response<AsyncBody>) -> Result<()> {
let response_tx = self.response_tx.clone();
let error_tx = self.error_tx.clone();
// Spawn a task to handle the SSE stream
smol::spawn(async move {
let reader = futures::io::BufReader::new(response.body_mut());
let mut lines = futures::AsyncBufReadExt::lines(reader);
let mut data_buffer = Vec::new();
let mut in_message = false;
while let Some(line_result) = lines.next().await {
match line_result {
Ok(line) => {
if line.is_empty() {
// Empty line signals end of event
if !data_buffer.is_empty() {
let message = data_buffer.join("\n");
// Filter out ping messages and empty data
if !message.trim().is_empty() && message != "ping" {
if let Err(e) = response_tx.send(message).await {
log::error!("Failed to send SSE message: {}", e);
break;
}
}
data_buffer.clear();
}
in_message = false;
} else if let Some(data) = line.strip_prefix("data: ") {
// Handle data lines
let data = data.trim();
if !data.is_empty() {
// Check if this is a ping message
if data == "ping" {
log::trace!("Received SSE ping");
continue;
}
data_buffer.push(data.to_string());
in_message = true;
}
} else if line.starts_with("event:")
|| line.starts_with("id:")
|| line.starts_with("retry:")
{
// Ignore other SSE fields
continue;
} else if in_message {
// Continuation of data
data_buffer.push(line);
}
}
Err(e) => {
let _ = error_tx.send(format!("SSE stream error: {}", e)).await;
break;
}
}
}
})
.detach();
Ok(())
}
}
#[async_trait]
impl Transport for HttpTransport {
async fn send(&self, message: String) -> Result<()> {
self.send_message(message).await
}
fn receive(&self) -> Pin<Box<dyn Stream<Item = String> + Send>> {
Box::pin(self.response_rx.clone())
}
fn receive_err(&self) -> Pin<Box<dyn Stream<Item = String> + Send>> {
Box::pin(self.error_rx.clone())
}
}
impl Drop for HttpTransport {
fn drop(&mut self) {
// Try to cleanup session on drop
let http_client = self.http_client.clone();
let endpoint = self.endpoint.clone();
let session_id = self.session_id.lock().clone();
let headers = self.headers.clone();
if let Some(session_id) = session_id {
self.executor
.spawn(async move {
let mut request_builder = Request::builder()
.method(Method::DELETE)
.uri(&endpoint)
.header(HEADER_SESSION_ID, &session_id);
// Add authentication headers if present
for (key, value) in headers {
request_builder = request_builder.header(key.as_str(), value.as_str());
}
let request = request_builder.body(AsyncBody::empty());
if let Ok(request) = request {
let _ = http_client.send(request).await;
}
})
.detach();
}
}
}

View File

@@ -491,7 +491,7 @@ impl ProjectDiagnosticsEditor {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let was_empty = self.multibuffer.read(cx).is_empty();
let buffer_snapshot = buffer.read(cx).snapshot();
let mut buffer_snapshot = buffer.read(cx).snapshot();
let buffer_id = buffer_snapshot.remote_id();
let max_severity = if self.include_warnings {
@@ -602,6 +602,7 @@ impl ProjectDiagnosticsEditor {
cx,
)
.await;
buffer_snapshot = cx.update(|_, cx| buffer.read(cx).snapshot())?;
let initial_range = buffer_snapshot.anchor_after(b.initial_range.start)
..buffer_snapshot.anchor_before(b.initial_range.end);
let excerpt_range = ExcerptRange {
@@ -1009,14 +1010,11 @@ async fn heuristic_syntactic_expand(
snapshot: BufferSnapshot,
cx: &mut AsyncApp,
) -> Option<RangeInclusive<BufferRow>> {
let start = snapshot.clip_point(input_range.start, Bias::Right);
let end = snapshot.clip_point(input_range.end, Bias::Left);
let input_row_count = input_range.end.row - input_range.start.row;
if input_row_count > max_row_count {
return None;
}
let input_range = start..end;
// If the outline node contains the diagnostic and is small enough, just use that.
let outline_range = snapshot.outline_range_containing(input_range.clone());
if let Some(outline_range) = outline_range.clone() {

View File

@@ -18,19 +18,18 @@ client.workspace = true
cloud_llm_client.workspace = true
codestral.workspace = true
copilot.workspace = true
edit_prediction.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
gpui.workspace = true
indoc.workspace = true
edit_prediction.workspace = true
language.workspace = true
paths.workspace = true
project.workspace = true
regex.workspace = true
settings.workspace = true
supermaven.workspace = true
sweep_ai.workspace = true
telemetry.workspace = true
ui.workspace = true
workspace.workspace = true

View File

@@ -18,15 +18,12 @@ use language::{
};
use project::DisableAiSettings;
use regex::Regex;
use settings::{
EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, Settings, SettingsStore, update_settings_file,
};
use settings::{Settings, SettingsStore, update_settings_file};
use std::{
sync::{Arc, LazyLock},
time::Duration,
};
use supermaven::{AccountStatus, Supermaven};
use sweep_ai::SweepFeatureFlag;
use ui::{
Clickable, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, IconButton,
IconButtonShape, Indicator, PopoverMenu, PopoverMenuHandle, ProgressBar, Tooltip, prelude::*,
@@ -81,7 +78,7 @@ impl Render for EditPredictionButton {
let all_language_settings = all_language_settings(None, cx);
match &all_language_settings.edit_predictions.provider {
match all_language_settings.edit_predictions.provider {
EditPredictionProvider::None => div().hidden(),
EditPredictionProvider::Copilot => {
@@ -300,15 +297,6 @@ impl Render for EditPredictionButton {
.with_handle(self.popover_menu_handle.clone()),
)
}
EditPredictionProvider::Experimental(provider_name) => {
if *provider_name == EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME
&& cx.has_flag::<SweepFeatureFlag>()
{
div().child(Icon::new(IconName::SweepAi))
} else {
div()
}
}
EditPredictionProvider::Zed => {
let enabled = self.editor_enabled.unwrap_or(true);
@@ -537,7 +525,7 @@ impl EditPredictionButton {
set_completion_provider(fs.clone(), cx, provider);
})
}
EditPredictionProvider::None | EditPredictionProvider::Experimental(_) => continue,
EditPredictionProvider::None => continue,
};
}
}

View File

@@ -3291,8 +3291,8 @@ impl Editor {
self.refresh_document_highlights(cx);
refresh_linked_ranges(self, window, cx);
// self.refresh_selected_text_highlights(false, window, cx);
// self.refresh_matching_bracket_highlights(window, cx);
self.refresh_selected_text_highlights(false, window, cx);
self.refresh_matching_bracket_highlights(window, cx);
self.update_visible_edit_prediction(window, cx);
self.edit_prediction_requires_modifier_in_indent_conflict = true;
self.inline_blame_popover.take();
@@ -21248,9 +21248,9 @@ impl Editor {
self.active_indent_guides_state.dirty = true;
self.refresh_active_diagnostics(cx);
self.refresh_code_actions(window, cx);
// self.refresh_selected_text_highlights(true, window, cx);
self.refresh_selected_text_highlights(true, window, cx);
self.refresh_single_line_folds(window, cx);
// self.refresh_matching_bracket_highlights(window, cx);
self.refresh_matching_bracket_highlights(window, cx);
if self.has_active_edit_prediction() {
self.update_visible_edit_prediction(window, cx);
}
@@ -21345,7 +21345,6 @@ impl Editor {
}
multi_buffer::Event::Reparsed(buffer_id) => {
self.tasks_update_task = Some(self.refresh_runnables(window, cx));
// self.refresh_selected_text_highlights(true, window, cx);
jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx);
cx.emit(EditorEvent::Reparsed(*buffer_id));
@@ -23908,10 +23907,6 @@ impl EditorSnapshot {
self.scroll_anchor.scroll_position(&self.display_snapshot)
}
pub fn scroll_near_end(&self) -> bool {
self.scroll_anchor.near_end(&self.display_snapshot)
}
fn gutter_dimensions(
&self,
font_id: FontId,

View File

@@ -9055,9 +9055,6 @@ impl Element for EditorElement {
)
});
if snapshot.scroll_near_end() {
dbg!("near end!");
}
let mut scroll_position = snapshot.scroll_position();
// The scroll position is a fractional point, the whole number of which represents
// the top of the window in terms of display rows.

View File

@@ -46,20 +46,12 @@ impl ScrollAnchor {
}
}
pub fn near_end(&self, snapshot: &DisplaySnapshot) -> bool {
let editor_length = snapshot.max_point().row().as_f64();
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64();
(scroll_top - editor_length).abs() < 300.0
}
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point<ScrollOffset> {
self.offset.apply_along(Axis::Vertical, |offset| {
if self.anchor == Anchor::min() {
0.
} else {
dbg!(snapshot.max_point().row().as_f64());
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64();
dbg!(scroll_top, offset);
(offset + scroll_top).max(0.)
}
})
@@ -251,11 +243,6 @@ impl ScrollManager {
}
}
};
let near_end = self.anchor.near_end(map);
// // TODO call load more here
// if near_end {
// cx.read();
// }
let scroll_top_row = DisplayRow(scroll_top as u32);
let scroll_top_buffer_point = map

View File

@@ -990,6 +990,9 @@ impl ExtensionImports for WasmState {
command: None,
settings: Some(settings),
})?),
project::project_settings::ContextServerSettings::Http { .. } => {
bail!("remote context server settings not supported in 0.6.0")
}
}
}
_ => {

View File

@@ -395,19 +395,19 @@ mod tests {
thread::spawn(move || stream.run(move |events| tx.send(events.to_vec()).is_ok()));
fs::write(path.join("new-file"), "").unwrap();
let events = rx.recv_timeout(timeout()).unwrap();
let events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("new-file"));
assert!(event.flags.contains(StreamFlags::ITEM_CREATED));
fs::remove_file(path.join("existing-file-5")).unwrap();
let mut events = rx.recv_timeout(timeout()).unwrap();
let mut events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let mut event = events.last().unwrap();
// we see this duplicate about 1/100 test runs.
if event.path == path.join("new-file")
&& event.flags.contains(StreamFlags::ITEM_CREATED)
{
events = rx.recv_timeout(timeout()).unwrap();
events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
event = events.last().unwrap();
}
assert_eq!(event.path, path.join("existing-file-5"));
@@ -440,13 +440,13 @@ mod tests {
});
fs::write(path.join("new-file"), "").unwrap();
let events = rx.recv_timeout(timeout()).unwrap();
let events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("new-file"));
assert!(event.flags.contains(StreamFlags::ITEM_CREATED));
fs::remove_file(path.join("existing-file-5")).unwrap();
let events = rx.recv_timeout(timeout()).unwrap();
let events = rx.recv_timeout(Duration::from_secs(2)).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("existing-file-5"));
assert!(event.flags.contains(StreamFlags::ITEM_REMOVED));
@@ -477,11 +477,11 @@ mod tests {
});
fs::write(path.join("new-file"), "").unwrap();
assert_eq!(rx.recv_timeout(timeout()).unwrap(), "running");
assert_eq!(rx.recv_timeout(Duration::from_secs(2)).unwrap(), "running");
// Dropping the handle causes `EventStream::run` to return.
drop(handle);
assert_eq!(rx.recv_timeout(timeout()).unwrap(), "stopped");
assert_eq!(rx.recv_timeout(Duration::from_secs(2)).unwrap(), "stopped");
}
#[test]
@@ -500,14 +500,11 @@ mod tests {
}
fn flush_historical_events() {
thread::sleep(timeout());
}
fn timeout() -> Duration {
if std::env::var("CI").is_ok() {
Duration::from_secs(4)
let duration = if std::env::var("CI").is_ok() {
Duration::from_secs(2)
} else {
Duration::from_millis(500)
}
};
thread::sleep(duration);
}
}

View File

@@ -60,27 +60,27 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity<MultiBuffer>, cx: &mu
buffer_added(editor, buffer, cx);
}
// cx.subscribe(&cx.entity(), |editor, _, event, cx| match event {
// EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx),
// EditorEvent::ExcerptsExpanded { ids } => {
// let multibuffer = editor.buffer().read(cx).snapshot(cx);
// for excerpt_id in ids {
// let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else {
// continue;
// };
// let addon = editor.addon::<ConflictAddon>().unwrap();
// let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else {
// return;
// };
// excerpt_for_buffer_updated(editor, conflict_set, cx);
// }
// }
// EditorEvent::ExcerptsRemoved {
// removed_buffer_ids, ..
// } => buffers_removed(editor, removed_buffer_ids, cx),
// _ => {}
// })
// .detach();
cx.subscribe(&cx.entity(), |editor, _, event, cx| match event {
EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx),
EditorEvent::ExcerptsExpanded { ids } => {
let multibuffer = editor.buffer().read(cx).snapshot(cx);
for excerpt_id in ids {
let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else {
continue;
};
let addon = editor.addon::<ConflictAddon>().unwrap();
let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else {
return;
};
excerpt_for_buffer_updated(editor, conflict_set, cx);
}
}
EditorEvent::ExcerptsRemoved {
removed_buffer_ids, ..
} => buffers_removed(editor, removed_buffer_ids, cx),
_ => {}
})
.detach();
}
fn excerpt_for_buffer_updated(

View File

@@ -30,11 +30,10 @@ use git::{
TrashUntrackedFiles, UnstageAll,
};
use gpui::{
Action, AppContext, AsyncApp, AsyncWindowContext, ClickEvent, Corner, DismissEvent, Entity,
EventEmitter, FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior,
ListSizingBehavior, MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy,
Subscription, Task, UniformListScrollHandle, WeakEntity, actions, anchored, deferred,
uniform_list,
Action, AsyncApp, AsyncWindowContext, ClickEvent, Corner, DismissEvent, Entity, EventEmitter,
FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior,
MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy, Subscription, Task,
UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list,
};
use itertools::Itertools;
use language::{Buffer, File};
@@ -312,9 +311,6 @@ pub struct GitPanel {
bulk_staging: Option<BulkStaging>,
stash_entries: GitStash,
_settings_subscription: Subscription,
/// On clicking an entry in a the git_panel this will
/// trigger loading it
open_diff_task: Option<Task<()>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -475,7 +471,6 @@ impl GitPanel {
bulk_staging: None,
stash_entries: Default::default(),
_settings_subscription,
open_diff_task: None,
};
this.schedule_update(window, cx);
@@ -755,23 +750,11 @@ impl GitPanel {
fn open_diff(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
maybe!({
let entry = self
.entries
.get(self.selected_entry?)?
.status_entry()?
.clone();
let entry = self.entries.get(self.selected_entry?)?.status_entry()?;
let workspace = self.workspace.upgrade()?;
let git_repo = self.active_repository.as_ref()?.clone();
let focus_handle = self.focus_handle.clone();
let git_repo = self.active_repository.as_ref()?;
// let panel = panel.upgrade().unwrap(); // TODO FIXME
// cx.read_entity(&panel, |panel, cx| {
// panel
// })
// .unwrap(); // TODO FIXME
let project_diff = if let Some(project_diff) =
workspace.read(cx).active_item_as::<ProjectDiff>(cx)
if let Some(project_diff) = workspace.read(cx).active_item_as::<ProjectDiff>(cx)
&& let Some(project_path) = project_diff.read(cx).active_path(cx)
&& Some(&entry.repo_path)
== git_repo
@@ -781,21 +764,16 @@ impl GitPanel {
{
project_diff.focus_handle(cx).focus(window);
project_diff.update(cx, |project_diff, cx| project_diff.autoscroll(cx));
project_diff
} else {
workspace.update(cx, |workspace, cx| {
ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx)
})
return None;
};
focus_handle.focus(window); // TODO: should we focus before the file is loaded or wait for that?
let project_diff = project_diff.downgrade();
// TODO use the fancy new thing
self.open_diff_task = Some(cx.spawn_in(window, async move |_, cx| {
ProjectDiff::refresh_one(project_diff, entry.repo_path, entry.status, cx)
.await
.unwrap(); // TODO FIXME
}));
self.workspace
.update(cx, |workspace, cx| {
ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx);
})
.ok();
self.focus_handle.focus(window);
Some(())
});
}
@@ -3882,7 +3860,6 @@ impl GitPanel {
})
}
// context menu
fn deploy_entry_context_menu(
&mut self,
position: Point<Pixels>,
@@ -4108,7 +4085,6 @@ impl GitPanel {
this.selected_entry = Some(ix);
cx.notify();
if event.modifiers().secondary() {
// the click handler
this.open_file(&Default::default(), window, cx)
} else {
this.open_diff(&Default::default(), window, cx);

View File

@@ -7,21 +7,19 @@ use crate::{
use anyhow::{Context as _, Result, anyhow};
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus};
use collections::{HashMap, HashSet};
use db::smol::stream::StreamExt;
use editor::{
Addon, Editor, EditorEvent, SelectionEffects,
actions::{GoToHunk, GoToPreviousHunk},
multibuffer_context_lines,
scroll::Autoscroll,
};
use futures::stream::FuturesUnordered;
use git::{
Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus},
status::FileStatus,
};
use gpui::{
Action, AnyElement, AnyView, App, AppContext, AsyncWindowContext, Entity, EventEmitter,
Action, AnyElement, AnyView, App, AppContext as _, AsyncWindowContext, Entity, EventEmitter,
FocusHandle, Focusable, Render, Subscription, Task, WeakEntity, actions,
};
use language::{Anchor, Buffer, Capability, OffsetRangeExt};
@@ -29,21 +27,17 @@ use multi_buffer::{MultiBuffer, PathKey};
use project::{
Project, ProjectPath,
git_store::{
self, Repository, StatusEntry,
Repository,
branch_diff::{self, BranchDiffEvent, DiffBase},
},
};
use settings::{Settings, SettingsStore};
use std::{
any::{Any, TypeId},
collections::VecDeque,
sync::Arc,
};
use std::{ops::Range, time::Instant};
use std::any::{Any, TypeId};
use std::ops::Range;
use std::sync::Arc;
use theme::ActiveTheme;
use ui::{KeyBinding, Tooltip, prelude::*, vertical_divider};
use util::{ResultExt, rel_path::RelPath};
use util::{ResultExt as _, rel_path::RelPath};
use workspace::{
CloseActiveItem, ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
@@ -52,8 +46,6 @@ use workspace::{
searchable::SearchableItemHandle,
};
mod diff_loader;
actions!(
git,
[
@@ -100,7 +92,7 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
Self::deploy_at(workspace, None, window, cx);
Self::deploy_at(workspace, None, window, cx)
}
fn deploy_branch_diff(
@@ -142,7 +134,7 @@ impl ProjectDiff {
entry: Option<GitStatusEntry>,
window: &mut Window,
cx: &mut Context<Workspace>,
) -> Entity<ProjectDiff> {
) {
telemetry::event!(
"Git Diff Opened",
source = if entry.is_some() {
@@ -174,8 +166,7 @@ impl ProjectDiff {
project_diff.update(cx, |project_diff, cx| {
project_diff.move_to_entry(entry, window, cx);
})
};
project_diff
}
}
pub fn autoscroll(&self, cx: &mut Context<Self>) {
@@ -276,23 +267,15 @@ impl ProjectDiff {
cx.subscribe_in(&editor, window, Self::handle_editor_event)
.detach();
let loader = diff_loader::start_loader(cx.entity(), window, cx);
let branch_diff_subscription = cx.subscribe_in(
&branch_diff,
window,
move |this, _git_store, event, window, cx| match event {
BranchDiffEvent::FileListChanged => {
// TODO this does not account for size of paths
// maybe a quick fs metadata could get us info on that?
// would make number of paths async but thats fine here
// let entries = this.first_n_entries(cx, 100);
loader.update_file_list();
// let
// this._task = window.spawn(cx, {
// let this = cx.weak_entity();
// async |cx| Self::refresh(this, entries, cx).await
// })
this._task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
}
},
);
@@ -307,32 +290,22 @@ impl ProjectDiff {
if is_sort_by_path != was_sort_by_path
|| is_collapse_untracked_diff != was_collapse_untracked_diff
{
// no idea why we need to do anything here
// probably should sort the multibuffer instead of reparsing
// everything though!!!
todo!("resort multibuffer entries");
todo!("assert the entries in the list did not change")
// this._task = {
// window.spawn(cx, {
// let this = cx.weak_entity();
// async |cx| Self::refresh(this, cx).await
// })
// }
this._task = {
window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
}
}
was_sort_by_path = is_sort_by_path;
was_collapse_untracked_diff = is_collapse_untracked_diff;
})
.detach();
// let task = window.spawn(cx, {
// let this = cx.weak_entity();
// async |cx| {
// let entries = this
// .read_with(cx, |project_diff, cx| project_diff.first_n_entries(cx, 100))
// .unwrap();
// Self::refresh(this, entries, cx).await
// }
// });
let task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
});
Self {
project,
@@ -498,11 +471,10 @@ impl ProjectDiff {
cx: &mut Context<Self>,
) {
let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| {
// TODO fix this
// this._task = window.spawn(cx, {
// let this = cx.weak_entity();
// async |cx| Self::refresh(this, cx).await
// })
this._task = window.spawn(cx, {
let this = cx.weak_entity();
async |cx| Self::refresh(this, cx).await
})
});
self.buffer_diff_subscriptions
.insert(path_key.path.clone(), (diff.clone(), subscription));
@@ -578,221 +550,51 @@ impl ProjectDiff {
}
}
pub fn all_entries(&self, cx: &App) -> Vec<StatusEntry> {
let Some(ref repo) = self.branch_diff.read(cx).repo else {
return Vec::new();
};
repo.read(cx).cached_status().collect()
}
pub fn entries(&self, cx: &App) -> Option<impl Iterator<Item = StatusEntry>> {
Some(
self.branch_diff
.read(cx)
.repo
.as_ref()?
.read(cx)
.cached_status(),
)
}
pub fn first_n_entries(&self, cx: &App, n: usize) -> VecDeque<StatusEntry> {
let Some(ref repo) = self.branch_diff.read(cx).repo else {
return VecDeque::new();
};
repo.read(cx).cached_status().take(n).collect()
}
pub async fn refresh_one(
this: WeakEntity<Self>,
repo_path: RepoPath,
status: FileStatus,
cx: &mut AsyncWindowContext,
) -> Result<()> {
use git_store::branch_diff::BranchDiff;
let Some(this) = this.upgrade() else {
return Ok(());
};
let multibuffer = cx.read_entity(&this, |this, _| this.multibuffer.clone())?;
let branch_diff = cx.read_entity(&this, |pd, _| pd.branch_diff.clone())?;
let Some(repo) = cx.read_entity(&branch_diff, |bd, _| bd.repo.clone())? else {
return Ok(());
};
let project = cx.read_entity(&branch_diff, |bd, _| bd.project.clone())?;
let mut previous_paths =
cx.read_entity(&multibuffer, |mb, _| mb.paths().collect::<HashSet<_>>())?;
let tree_diff_status = cx.read_entity(&branch_diff, |branch_diff, _| {
branch_diff
.tree_diff
.as_ref()
.and_then(|t| t.entries.get(&repo_path))
.cloned()
})?;
let Some(status) = cx.read_entity(&branch_diff, |bd, _| {
bd.merge_statuses(Some(status), tree_diff_status.as_ref())
})?
else {
return Ok(());
};
if !status.has_changes() {
return Ok(());
}
let Some(project_path) = cx.read_entity(&repo, |repo, cx| {
repo.repo_path_to_project_path(&repo_path, cx)
})?
else {
return Ok(());
};
let sort_prefix =
cx.read_entity(&repo, |repo, cx| sort_prefix(repo, &repo_path, status, cx))?;
let path_key = PathKey::with_sort_prefix(sort_prefix, repo_path.into_arc());
previous_paths.remove(&path_key);
let repo = repo.clone();
let Some((buffer, diff)) = BranchDiff::load_buffer(
tree_diff_status,
project_path,
repo,
project.downgrade(),
&mut cx.to_app(),
)
.await
.log_err() else {
return Ok(());
};
cx.update(|window, cx| {
this.update(cx, |this, cx| {
this.register_buffer(path_key, status, buffer, diff, window, cx)
pub async fn refresh(this: WeakEntity<Self>, cx: &mut AsyncWindowContext) -> Result<()> {
let mut path_keys = Vec::new();
let buffers_to_load = this.update(cx, |this, cx| {
let (repo, buffers_to_load) = this.branch_diff.update(cx, |branch_diff, cx| {
let load_buffers = branch_diff.load_buffers(cx);
(branch_diff.repo().cloned(), load_buffers)
});
})?;
let mut previous_paths = this.multibuffer.read(cx).paths().collect::<HashSet<_>>();
// TODO LL clear multibuff on open?
// // remove anything not part of the diff in the multibuffer
// this.update(cx, |this, cx| {
// multibuffer.update(cx, |multibuffer, cx| {
// for path in previous_paths {
// this.buffer_diff_subscriptions.remove(&path.path);
// multibuffer.remove_excerpts_for_path(path, cx);
// }
// });
// })?;
if let Some(repo) = repo {
let repo = repo.read(cx);
Ok(())
}
pub async fn refresh(
this: WeakEntity<Self>,
cached_status: Vec<StatusEntry>,
cx: &mut AsyncWindowContext,
) -> Result<()> {
dbg!("refreshing all");
use git_store::branch_diff::BranchDiff;
let Some(this) = this.upgrade() else {
return Ok(());
};
let multibuffer = cx.read_entity(&this, |this, _| this.multibuffer.clone())?;
let branch_diff = cx.read_entity(&this, |pd, _| pd.branch_diff.clone())?;
let Some(repo) = cx.read_entity(&branch_diff, |bd, _| bd.repo.clone())? else {
return Ok(());
};
let project = cx.read_entity(&branch_diff, |bd, _| bd.project.clone())?;
let mut previous_paths =
cx.read_entity(&multibuffer, |mb, _| mb.paths().collect::<HashSet<_>>())?;
// Idea: on click in git panel prioritize task for that file in some way ...
// could have a hashmap of futures here
// - needs to prioritize *some* background tasks over others
// -
let mut tasks = FuturesUnordered::new();
let mut seen = HashSet::default();
for entry in cached_status {
seen.insert(entry.repo_path.clone());
let tree_diff_status = cx.read_entity(&branch_diff, |branch_diff, _| {
branch_diff
.tree_diff
.as_ref()
.and_then(|t| t.entries.get(&entry.repo_path))
.cloned()
})?;
let Some(status) = cx.read_entity(&branch_diff, |bd, _| {
bd.merge_statuses(Some(entry.status), tree_diff_status.as_ref())
})?
else {
continue;
};
if !status.has_changes() {
continue;
path_keys = Vec::with_capacity(buffers_to_load.len());
for entry in buffers_to_load.iter() {
let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx);
let path_key =
PathKey::with_sort_prefix(sort_prefix, entry.repo_path.as_ref().clone());
previous_paths.remove(&path_key);
path_keys.push(path_key)
}
}
let Some(project_path) = cx.read_entity(&repo, |repo, cx| {
repo.repo_path_to_project_path(&entry.repo_path, cx)
})?
else {
continue;
};
let sort_prefix = cx.read_entity(&repo, |repo, cx| {
sort_prefix(repo, &entry.repo_path, entry.status, cx)
})?;
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.into_arc());
previous_paths.remove(&path_key);
let repo = repo.clone();
let project = project.downgrade();
let task = cx.spawn(async move |cx| {
let res = BranchDiff::load_buffer(
tree_diff_status,
project_path,
repo,
project,
&mut cx.to_app(),
)
.await;
(res, path_key, entry.status)
});
tasks.push(task)
}
// remove anything not part of the diff in the multibuffer
this.update(cx, |this, cx| {
multibuffer.update(cx, |multibuffer, cx| {
this.multibuffer.update(cx, |multibuffer, cx| {
for path in previous_paths {
this.buffer_diff_subscriptions.remove(&path.path);
multibuffer.remove_excerpts_for_path(path, cx);
}
});
buffers_to_load
})?;
// add the new buffers as they are parsed
let mut last_notify = Instant::now();
while let Some((res, path_key, file_status)) = tasks.next().await {
if let Some((buffer, diff)) = res.log_err() {
for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) {
if let Some((buffer, diff)) = entry.load.await.log_err() {
cx.update(|window, cx| {
this.update(cx, |this, cx| {
this.register_buffer(path_key, file_status, buffer, diff, window, cx)
});
this.register_buffer(path_key, entry.file_status, buffer, diff, window, cx)
})
.ok();
})?;
}
if last_notify.elapsed().as_millis() > 100 {
cx.update_entity(&this, |_, cx| cx.notify())?;
last_notify = Instant::now();
}
}
this.update(cx, |this, cx| {
this.pending_scroll.take();
cx.notify();
})?;
Ok(())
}

View File

@@ -1,249 +0,0 @@
//! Task which updates the project diff multibuffer without putting too much
//! pressure on the frontend executor. It prioritizes loading the area around the user
use collections::HashSet;
use db::smol::stream::StreamExt;
use futures::channel::mpsc;
use gpui::{AppContext, AsyncWindowContext, Entity, Task, WeakEntity};
use project::git_store::StatusEntry;
use ui::{App, Window};
use util::ResultExt;
use crate::{git_panel::GitStatusEntry, project_diff::ProjectDiff};
enum Update {
Position(usize),
NewFile(StatusEntry),
ListChanged,
// should not need to handle re-ordering (sorting) here.
// something to handle scroll? or should that live in the project diff?
}
struct LoaderHandle {
task: Task<Option<()>>,
sender: mpsc::UnboundedSender<Update>,
}
impl LoaderHandle {
pub fn update_file_list(&self) {
let _ = self
.sender
.unbounded_send(Update::ListChanged)
.log_err();
}
pub fn update_pos(&self, pos: usize) {
let _ = self
.sender
.unbounded_send(Update::Position((pos)))
.log_err();
}
}
pub fn start_loader(project_diff: Entity<ProjectDiff>, window: &Window, cx: &App) -> LoaderHandle {
let (tx, rx) = mpsc::unbounded();
let task = window.spawn(cx, async move |cx| {
load(rx, project_diff.downgrade(), cx).await
});
LoaderHandle { task, sender: tx }
}
enum DiffEntry {
Loading(GitStatusEntry),
Loaded(GitStatusEntry),
Queued(GitStatusEntry),
}
impl DiffEntry {
fn queued(&self) -> bool {
matches!(self, DiffEntry::Queued(_))
}
}
async fn load(
rx: mpsc::UnboundedReceiver<Update>,
project_diff: WeakEntity<ProjectDiff>,
cx: &mut AsyncWindowContext,
) -> Option<()> {
// let initial_entries = cx.read_entity(&cx.entity(), |project_diff, cx| project_diff.first_n_entries(cx, 100));
// let loading = to_load.drain(..100).map(|| refresh_one)
let mut existing = Vec::new();
loop {
let update = rx.next().await?;
match update {
Update::Position(pos) => {
if existing.get(pos).is_some_and(|diff| diff.queued()) {
todo!("append to future unordered, also load in the bit
around (maybe with a short sleep ahead so we get some sense
of 'priority'")
}
// drop whatever is loading so we get to the new bit earlier
}
Update::NewFile(status_entry) => todo!(),
Update::ListChanged => {
let (added, removed) = project_diff
.upgrade()?
.read_with(cx, |diff, cx| diff_current_list(&existing, diff, cx))
.ok()?;
}
}
// wait for Update OR Load done
// -> Immediately spawn update
// OR
// -> spawn next group
}
}
// could be new list
fn diff_current_list(
existing_entries: &[GitStatusEntry],
project_diff: &ProjectDiff,
cx: &App,
) -> (Vec<(usize, GitStatusEntry)>, Vec<usize>) {
let Some(new_entries) = project_diff.entries(cx) else {
return (Vec::new(), Vec::new());
};
let existing_entries = existing_entries.iter().enumerate();
for entry in new_entries {
let Some((idx, existing)) = existing_entries.next() else {
todo!();
};
if existing == entry {
}
}
// let initial_entries = cx.read_entity(&cx.entity(), |project_diff, cx| project_diff.first_n_entries(cx, 100));
// let loading = to_load.drain(..100).map(|| refresh_one)
}
// // remove anything not part of the diff in the multibuffer
// fn remove_anything_not_being_loaded() {
// this.update(cx, |this, cx| {
// multibuffer.update(cx, |multibuffer, cx| {
// for path in previous_paths {
// this.buffer_diff_subscriptions.remove(&path.path);
// multibuffer.remove_excerpts_for_path(path, cx);
// }
// });
// })?;
// }
pub async fn refresh_group(
this: WeakEntity<ProjectDiff>,
cached_status: Vec<StatusEntry>,
cx: &mut AsyncWindowContext,
) -> anyhow::Result<()> {
dbg!("refreshing all");
use project::git_store::branch_diff::BranchDiff;
let Some(this) = this.upgrade() else {
return Ok(());
};
let multibuffer = cx.read_entity(&this, |this, _| this.multibuffer.clone())?;
let branch_diff = cx.read_entity(&this, |pd, _| pd.branch_diff.clone())?;
let Some(repo) = cx.read_entity(&branch_diff, |bd, _| bd.repo.clone())? else {
return Ok(());
};
let project = cx.read_entity(&branch_diff, |bd, _| bd.project.clone())?;
let mut previous_paths =
cx.read_entity(&multibuffer, |mb, _| mb.paths().collect::<HashSet<_>>())?;
// Idea: on click in git panel prioritize task for that file in some way ...
// could have a hashmap of futures here
// - needs to prioritize *some* background tasks over others
// -
let mut tasks = FuturesUnordered::new();
let mut seen = HashSet::default();
for entry in cached_status {
seen.insert(entry.repo_path.clone());
let tree_diff_status = cx.read_entity(&branch_diff, |branch_diff, _| {
branch_diff
.tree_diff
.as_ref()
.and_then(|t| t.entries.get(&entry.repo_path))
.cloned()
})?;
let Some(status) = cx.read_entity(&branch_diff, |bd, _| {
bd.merge_statuses(Some(entry.status), tree_diff_status.as_ref())
})?
else {
continue;
};
if !status.has_changes() {
continue;
}
let Some(project_path) = cx.read_entity(&repo, |repo, cx| {
repo.repo_path_to_project_path(&entry.repo_path, cx)
})?
else {
continue;
};
let sort_prefix = cx.read_entity(&repo, |repo, cx| {
sort_prefix(repo, &entry.repo_path, entry.status, cx)
})?;
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.into_arc());
previous_paths.remove(&path_key);
let repo = repo.clone();
let project = project.downgrade();
let task = cx.spawn(async move |cx| {
let res = BranchDiff::load_buffer(
tree_diff_status,
project_path,
repo,
project,
&mut cx.to_app(),
)
.await;
(res, path_key, entry.status)
});
tasks.push(task)
}
// remove anything not part of the diff in the multibuffer
this.update(cx, |this, cx| {
multibuffer.update(cx, |multibuffer, cx| {
for path in previous_paths {
this.buffer_diff_subscriptions.remove(&path.path);
multibuffer.remove_excerpts_for_path(path, cx);
}
});
})?;
// add the new buffers as they are parsed
let mut last_notify = Instant::now();
while let Some((res, path_key, file_status)) = tasks.next().await {
if let Some((buffer, diff)) = res.log_err() {
cx.update(|window, cx| {
this.update(cx, |this, cx| {
this.register_buffer(path_key, file_status, buffer, diff, window, cx)
});
})?;
}
if last_notify.elapsed().as_millis() > 100 {
cx.update_entity(&this, |_, cx| cx.notify())?;
last_notify = Instant::now();
}
}
Ok(())
}
pub(crate) fn sort_or_collapse_changed() {
todo!()
}

View File

@@ -187,13 +187,12 @@ font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "11052312
"source-fontconfig-dlopen",
], optional = true }
calloop = { version = "0.14.3" }
calloop = { version = "0.13.0" }
filedescriptor = { version = "0.8.2", optional = true }
open = { version = "5.2.0", optional = true }
# Wayland
calloop-wayland-source = { version = "0.4.1", optional = true }
calloop-wayland-source = { version = "0.3.0", optional = true }
wayland-backend = { version = "0.3.3", features = [
"client_system",
"dlopen",
@@ -266,6 +265,7 @@ naga.workspace = true
[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.build-dependencies]
naga.workspace = true
[[example]]
name = "hello_world"
path = "examples/hello_world.rs"

View File

@@ -310,11 +310,6 @@ impl AsyncWindowContext {
.update(self, |_, window, cx| read(cx.global(), window, cx))
}
/// Returns an `AsyncApp` by cloning the one used by Self
pub fn to_app(&self) -> AsyncApp {
self.app.clone()
}
/// A convenience method for [`App::update_global`](BorrowAppContext::update_global).
/// for updating the global state of the specified type.
pub fn update_global<G, R>(

View File

@@ -233,9 +233,6 @@ impl<'a, T: 'static> Context<'a, T> {
/// Spawn the future returned by the given function.
/// The function is provided a weak handle to the entity owned by this context and a context that can be held across await points.
/// The returned task must be held or detached.
///
/// # Example
/// `cx.spawn(async move |some_weak_entity, cx| ...)`
#[track_caller]
pub fn spawn<AsyncFn, R>(&self, f: AsyncFn) -> Task<R>
where

View File

@@ -1,6 +1,7 @@
use std::{
env,
path::{Path, PathBuf},
process::Command,
rc::Rc,
sync::Arc,
};
@@ -17,7 +18,6 @@ use anyhow::{Context as _, anyhow};
use calloop::{LoopSignal, channel::Channel};
use futures::channel::oneshot;
use util::ResultExt as _;
use util::command::{new_smol_command, new_std_command};
#[cfg(any(feature = "wayland", feature = "x11"))]
use xkbcommon::xkb::{self, Keycode, Keysym, State};
@@ -215,7 +215,7 @@ impl<P: LinuxClient + 'static> Platform for P {
clippy::disallowed_methods,
reason = "We are restarting ourselves, using std command thus is fine"
)]
let restart_process = new_std_command("/usr/bin/env")
let restart_process = Command::new("/usr/bin/env")
.arg("bash")
.arg("-c")
.arg(script)
@@ -422,7 +422,7 @@ impl<P: LinuxClient + 'static> Platform for P {
let path = path.to_owned();
self.background_executor()
.spawn(async move {
let _ = new_smol_command("xdg-open")
let _ = smol::process::Command::new("xdg-open")
.arg(path)
.spawn()
.context("invoking xdg-open")

View File

@@ -487,15 +487,12 @@ impl WaylandClient {
let (common, main_receiver) = LinuxCommon::new(event_loop.get_signal());
let handle = event_loop.handle(); // CHECK that wayland sources get higher prio
let handle = event_loop.handle();
handle
// these are all tasks spawned on the foreground executor.
// There is no concept of priority, they are all equal.
.insert_source(main_receiver, {
let handle = handle.clone();
move |event, _, _: &mut WaylandClientStatePtr| {
if let calloop::channel::Event::Msg(runnable) = event {
// will only be called when the event loop has finished processing all pending events from the sources
handle.insert_idle(|_| {
let start = Instant::now();
let mut timing = match runnable {
@@ -653,7 +650,6 @@ impl WaylandClient {
event_loop: Some(event_loop),
}));
// MAGIC HERE IT IS
WaylandSource::new(conn, event_queue)
.insert(handle)
.unwrap();
@@ -1578,7 +1574,6 @@ fn linux_button_to_gpui(button: u32) -> Option<MouseButton> {
})
}
// how is this being called inside calloop
impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
fn event(
this: &mut Self,
@@ -1669,7 +1664,7 @@ impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
modifiers: state.modifiers,
});
drop(state);
window.handle_input(input); // How does this get into the event loop?
window.handle_input(input);
}
}
wl_pointer::Event::Button {

View File

@@ -53,16 +53,14 @@ use std::{
ffi::{CStr, OsStr, c_void},
os::{raw::c_char, unix::ffi::OsStrExt},
path::{Path, PathBuf},
process::Command,
ptr,
rc::Rc,
slice, str,
sync::{Arc, OnceLock},
};
use strum::IntoEnumIterator;
use util::{
ResultExt,
command::{new_smol_command, new_std_command},
};
use util::ResultExt;
#[allow(non_upper_case_globals)]
const NSUTF8StringEncoding: NSUInteger = 4;
@@ -554,7 +552,7 @@ impl Platform for MacPlatform {
clippy::disallowed_methods,
reason = "We are restarting ourselves, using std command thus is fine"
)]
let restart_process = new_std_command("/bin/bash")
let restart_process = Command::new("/bin/bash")
.arg("-c")
.arg(script)
.arg(app_pid)
@@ -869,7 +867,7 @@ impl Platform for MacPlatform {
.lock()
.background_executor
.spawn(async move {
if let Some(mut child) = new_smol_command("open")
if let Some(mut child) = smol::process::Command::new("open")
.arg(path)
.spawn()
.context("invoking open command")

View File

@@ -217,7 +217,6 @@ pub enum IconName {
SupermavenError,
SupermavenInit,
SwatchBook,
SweepAi,
Tab,
Terminal,
TerminalAlt,

View File

@@ -67,15 +67,10 @@ impl RustLspAdapter {
#[cfg(target_os = "linux")]
async fn determine_libc_type() -> LibcType {
use futures::pin_mut;
use smol::process::Command;
async fn from_ldd_version() -> Option<LibcType> {
use util::command::new_smol_command;
let ldd_output = new_smol_command("ldd")
.arg("--version")
.output()
.await
.ok()?;
let ldd_output = Command::new("ldd").arg("--version").output().await.ok()?;
let ldd_version = String::from_utf8_lossy(&ldd_output.stdout);
if ldd_version.contains("GNU libc") || ldd_version.contains("GLIBC") {

View File

@@ -85,6 +85,7 @@
[
"as"
"async"
"await"
"const"
"default"
"dyn"
@@ -101,7 +102,6 @@
"ref"
"static"
"struct"
"for"
"trait"
"type"
"union"
@@ -114,10 +114,10 @@
] @keyword
[
"await"
"break"
"continue"
"else"
"for"
"if"
"in"
"loop"
@@ -127,9 +127,6 @@
"yield"
] @keyword.control
(for_expression
("for" @keyword.control))
[
(string_literal)
(raw_string_literal)

View File

@@ -18,7 +18,6 @@ workspace.workspace = true
util.workspace = true
serde_json.workspace = true
smol.workspace = true
log.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }

View File

@@ -5,7 +5,10 @@ use std::{
};
use gpui::{
App, AppContext, Context, Entity, Hsla, InteractiveElement, IntoElement, ParentElement, Render, ScrollHandle, SerializedThreadTaskTimings, StatefulInteractiveElement, Styled, Task, TaskTiming, ThreadTaskTimings, TitlebarOptions, WindowBounds, WindowHandle, WindowOptions, div, prelude::FluentBuilder, px, relative, size
App, AppContext, Context, Entity, Hsla, InteractiveElement, IntoElement, ParentElement, Render,
ScrollHandle, SerializedTaskTiming, StatefulInteractiveElement, Styled, Task, TaskTiming,
TitlebarOptions, WindowBounds, WindowHandle, WindowOptions, div, prelude::FluentBuilder, px,
relative, size,
};
use util::ResultExt;
use workspace::{
@@ -284,13 +287,8 @@ impl Render for ProfilerWindow {
let Some(data) = this.get_timings() else {
return;
};
let timings = ThreadTaskTimings {
thread_name: Some("main".to_string()),
thread_id: std::thread::current().id(),
timings: data.clone()
};
let timings = Vec::from([SerializedThreadTaskTimings::convert(this.startup_time, timings)]);
let timings =
SerializedTaskTiming::convert(this.startup_time, &data);
let active_path = workspace
.read_with(cx, |workspace, cx| {
@@ -307,17 +305,12 @@ impl Render for ProfilerWindow {
);
cx.background_spawn(async move {
let path = match path.await.log_err() {
Some(Ok(Some(path))) => path,
Some(e @ Err(_)) => {
e.log_err();
log::warn!("Saving miniprof in workingdir");
std::path::Path::new(
"performance_profile.miniprof",
)
.to_path_buf()
}
Some(Ok(None)) | None => return,
let path = path.await;
let path =
path.log_err().and_then(|p| p.log_err()).flatten();
let Some(path) = path else {
return;
};
let Some(timings) =

View File

@@ -43,7 +43,6 @@ text.workspace = true
theme.workspace = true
tree-sitter.workspace = true
util.workspace = true
zlog.workspace = true
[dev-dependencies]
buffer_diff = { workspace = true, features = ["test-support"] }

View File

@@ -76,8 +76,6 @@ impl MultiBuffer {
context_line_count: u32,
cx: &mut Context<Self>,
) -> (Vec<Range<Anchor>>, bool) {
let _timer =
zlog::time!("set_excerpts_for_path").warn_if_gt(std::time::Duration::from_millis(100));
let buffer_snapshot = buffer.read(cx).snapshot();
let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot);
@@ -290,6 +288,7 @@ impl MultiBuffer {
.get(&path)
.cloned()
.unwrap_or_default();
let mut new_iter = new.into_iter().peekable();
let mut existing_iter = existing.into_iter().peekable();
@@ -414,17 +413,14 @@ impl MultiBuffer {
// todo(lw): There is a logic bug somewhere that causes the to_remove vector to be not ordered correctly
to_remove.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id));
self.remove_excerpts(to_remove, cx);
if excerpt_ids.is_empty() {
self.excerpts_by_path.remove(&path);
} else {
for excerpt_id in &excerpt_ids {
self.paths_by_excerpt.insert(*excerpt_id, path.clone());
}
let snapshot = &*self.snapshot.get_mut();
let mut excerpt_ids: Vec<_> = excerpt_ids.iter().dedup().cloned().collect();
excerpt_ids.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id));
self.excerpts_by_path.insert(path, excerpt_ids);
self.excerpts_by_path
.insert(path, excerpt_ids.iter().dedup().cloned().collect());
}
(excerpt_ids, added_a_new_excerpt)

View File

@@ -99,13 +99,18 @@ pub enum ContextServerConfiguration {
command: ContextServerCommand,
settings: serde_json::Value,
},
Http {
url: url::Url,
headers: HashMap<String, String>,
},
}
impl ContextServerConfiguration {
pub fn command(&self) -> &ContextServerCommand {
pub fn command(&self) -> Option<&ContextServerCommand> {
match self {
ContextServerConfiguration::Custom { command } => command,
ContextServerConfiguration::Extension { command, .. } => command,
ContextServerConfiguration::Custom { command } => Some(command),
ContextServerConfiguration::Extension { command, .. } => Some(command),
ContextServerConfiguration::Http { .. } => None,
}
}
@@ -142,6 +147,14 @@ impl ContextServerConfiguration {
}
}
}
ContextServerSettings::Http {
enabled: _,
url,
headers: auth,
} => {
let url = url::Url::parse(&url).log_err()?;
Some(ContextServerConfiguration::Http { url, headers: auth })
}
}
}
}
@@ -390,7 +403,7 @@ impl ContextServerStore {
let configuration = state.configuration();
self.stop_server(&state.server().id(), cx)?;
let new_server = self.create_context_server(id.clone(), configuration.clone(), cx);
let new_server = self.create_context_server(id.clone(), configuration.clone(), cx)?;
self.run_server(new_server, configuration, cx);
}
Ok(())
@@ -479,33 +492,41 @@ impl ContextServerStore {
id: ContextServerId,
configuration: Arc<ContextServerConfiguration>,
cx: &mut Context<Self>,
) -> Arc<ContextServer> {
let project = self.project.upgrade();
let mut root_path = None;
if let Some(project) = project {
let project = project.read(cx);
if project.is_local() {
if let Some(path) = project.active_project_directory(cx) {
root_path = Some(path);
} else {
for worktree in self.worktree_store.read(cx).visible_worktrees(cx) {
if let Some(path) = worktree.read(cx).root_dir() {
root_path = Some(path);
break;
}
}
}
}
};
) -> Result<Arc<ContextServer>> {
if let Some(factory) = self.context_server_factory.as_ref() {
factory(id, configuration)
} else {
Arc::new(ContextServer::stdio(
id,
configuration.command().clone(),
root_path,
))
return Ok(factory(id, configuration));
}
match configuration.as_ref() {
ContextServerConfiguration::Http { url, headers } => Ok(Arc::new(ContextServer::http(
id.clone(),
url,
headers.clone(),
cx,
)?)),
_ => {
let root_path = self
.project
.read_with(cx, |project, cx| project.active_project_directory(cx))
.ok()
.flatten()
.or_else(|| {
self.worktree_store.read_with(cx, |store, cx| {
store.visible_worktrees(cx).fold(None, |acc, item| {
if acc.is_none() {
item.read(cx).root_dir()
} else {
acc
}
})
})
});
Ok(Arc::new(ContextServer::stdio(
id,
configuration.command().unwrap().clone(),
root_path,
)))
}
}
}
@@ -621,14 +642,16 @@ impl ContextServerStore {
let existing_config = state.as_ref().map(|state| state.configuration());
if existing_config.as_deref() != Some(&config) || is_stopped {
let config = Arc::new(config);
let server = this.create_context_server(id.clone(), config.clone(), cx);
let server = this.create_context_server(id.clone(), config.clone(), cx)?;
servers_to_start.push((server, config));
if this.servers.contains_key(&id) {
servers_to_stop.insert(id);
}
}
}
})?;
anyhow::Ok(())
})??;
this.update(cx, |this, cx| {
for id in servers_to_stop {
@@ -1228,6 +1251,62 @@ mod tests {
});
}
#[gpui::test]
async fn test_remote_context_server(cx: &mut TestAppContext) {
const SERVER_ID: &str = "remote-server";
let server_id = ContextServerId(SERVER_ID.into());
let server_url = "http://example.com/api";
let (_fs, project) = setup_context_server_test(
cx,
json!({ "code.rs": "" }),
vec![(
SERVER_ID.into(),
ContextServerSettings::Http {
enabled: true,
url: server_url.to_string(),
headers: Default::default(),
},
)],
)
.await;
let executor = cx.executor();
let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
let store = cx.new(|cx| {
ContextServerStore::test_maintain_server_loop(
Box::new(move |id, config| {
assert_eq!(id.0.as_ref(), SERVER_ID);
match config.as_ref() {
ContextServerConfiguration::Http { url, headers } => {
assert_eq!(url.as_str(), server_url);
assert!(headers.is_empty());
}
_ => panic!("Expected remote configuration"),
}
Arc::new(ContextServer::new(
id.clone(),
Arc::new(create_fake_transport(id.0.to_string(), executor.clone())),
))
}),
registry.clone(),
project.read(cx).worktree_store(),
project.downgrade(),
cx,
)
});
let _server_events = assert_server_events(
&store,
vec![
(server_id.clone(), ContextServerStatus::Starting),
(server_id.clone(), ContextServerStatus::Running),
],
cx,
);
cx.run_until_parked();
}
struct ServerEvents {
received_event_count: Rc<RefCell<usize>>,
expected_event_count: usize,

View File

@@ -3,10 +3,13 @@ use async_trait::async_trait;
use dap::{DapLocator, DebugRequest, adapters::DebugAdapterName};
use gpui::SharedString;
use serde_json::{Value, json};
use smol::{Timer, io::AsyncReadExt, process::Stdio};
use smol::{
Timer,
io::AsyncReadExt,
process::{Command, Stdio},
};
use std::time::Duration;
use task::{BuildTaskDefinition, DebugScenario, ShellBuilder, SpawnInTerminal, TaskTemplate};
use util::command::new_smol_command;
pub(crate) struct CargoLocator;
@@ -15,7 +18,7 @@ async fn find_best_executable(executables: &[String], test_name: &str) -> Option
return executables.first().cloned();
}
for executable in executables {
let Some(mut child) = new_smol_command(&executable)
let Some(mut child) = Command::new(&executable)
.arg("--list")
.stdout(Stdio::piped())
.spawn()

View File

@@ -6,7 +6,7 @@ use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID};
use std::{collections::VecDeque, path::Path, sync::Arc};
use task::{Shell, shell_to_proto};
use terminal::terminal_settings::TerminalSettings;
use util::{ResultExt, command::new_smol_command, rel_path::RelPath};
use util::{ResultExt, rel_path::RelPath};
use worktree::Worktree;
use collections::HashMap;
@@ -389,7 +389,7 @@ async fn load_direnv_environment(
};
let args = &["export", "json"];
let direnv_output = new_smol_command(&direnv_path)
let direnv_output = smol::process::Command::new(&direnv_path)
.args(args)
.envs(env)
.env("TERM", "dumb")

View File

@@ -34,11 +34,11 @@ impl DiffBase {
pub struct BranchDiff {
diff_base: DiffBase,
pub repo: Option<Entity<Repository>>,
pub project: Entity<Project>,
repo: Option<Entity<Repository>>,
project: Entity<Project>,
base_commit: Option<SharedString>,
head_commit: Option<SharedString>,
pub tree_diff: Option<TreeDiff>,
tree_diff: Option<TreeDiff>,
_subscription: Subscription,
update_needed: postage::watch::Sender<()>,
_task: Task<()>,
@@ -283,11 +283,7 @@ impl BranchDiff {
else {
continue;
};
let repo = repo.clone();
let task = cx.spawn(async move |project, cx| {
Self::load_buffer(branch_diff, project_path, repo.clone(), project, cx).await
});
let task = Self::load_buffer(branch_diff, project_path, repo.clone(), cx);
output.push(DiffBuffer {
repo_path: item.repo_path.clone(),
@@ -307,11 +303,8 @@ impl BranchDiff {
let Some(project_path) = repo.read(cx).repo_path_to_project_path(&path, cx) else {
continue;
};
let repo = repo.clone();
let branch_diff2 = Some(branch_diff.clone());
let task = cx.spawn(async move |project, cx| {
Self::load_buffer(branch_diff2, project_path, repo, project, cx).await
});
let task =
Self::load_buffer(Some(branch_diff.clone()), project_path, repo.clone(), cx);
let file_status = diff_status_to_file_status(branch_diff);
@@ -325,40 +318,42 @@ impl BranchDiff {
output
}
pub async fn load_buffer(
fn load_buffer(
branch_diff: Option<git::status::TreeDiffStatus>,
project_path: crate::ProjectPath,
repo: Entity<Repository>,
project: WeakEntity<Project>,
cx: &mut gpui::AsyncApp, // making this generic over AppContext hangs the compiler
) -> Result<(Entity<Buffer>, Entity<BufferDiff>)> {
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
cx: &Context<'_, Project>,
) -> Task<Result<(Entity<Buffer>, Entity<BufferDiff>)>> {
let task = cx.spawn(async move |project, cx| {
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
let languages = project.update(cx, |project, _cx| project.languages().clone())?;
let languages = project.update(cx, |project, _cx| project.languages().clone())?;
let changes = if let Some(entry) = branch_diff {
let oid = match entry {
git::status::TreeDiffStatus::Added { .. } => None,
git::status::TreeDiffStatus::Modified { old, .. }
| git::status::TreeDiffStatus::Deleted { old } => Some(old),
let changes = if let Some(entry) = branch_diff {
let oid = match entry {
git::status::TreeDiffStatus::Added { .. } => None,
git::status::TreeDiffStatus::Modified { old, .. }
| git::status::TreeDiffStatus::Deleted { old } => Some(old),
};
project
.update(cx, |project, cx| {
project.git_store().update(cx, |git_store, cx| {
git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx)
})
})?
.await?
} else {
project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?
};
project
.update(cx, |project, cx| {
project.git_store().update(cx, |git_store, cx| {
git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx)
})
})?
.await?
} else {
project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})?
.await?
};
Ok((buffer, changes))
Ok((buffer, changes))
});
task
}
}

View File

@@ -6732,7 +6732,6 @@ impl LspStore {
let buffer_snapshot = buffer.read(cx).snapshot();
let next_hint_id = self.next_hint_id.clone();
let lsp_data = self.latest_lsp_data(&buffer, cx);
let query_version = lsp_data.buffer_version.clone();
let mut lsp_refresh_requested = false;
let for_server = if let InvalidationStrategy::RefreshRequested {
server_id,
@@ -6835,7 +6834,6 @@ impl LspStore {
for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() {
let next_hint_id = next_hint_id.clone();
let buffer = buffer.clone();
let query_version = query_version.clone();
let new_inlay_hints = cx
.spawn(async move |lsp_store, cx| {
let new_fetch_task = lsp_store.update(cx, |lsp_store, cx| {
@@ -6846,7 +6844,9 @@ impl LspStore {
.and_then(|new_hints_by_server| {
lsp_store.update(cx, |lsp_store, cx| {
let lsp_data = lsp_store.latest_lsp_data(&buffer, cx);
let update_cache = lsp_data.buffer_version == query_version;
let update_cache = !lsp_data
.buffer_version
.changed_since(&buffer.read(cx).version());
if new_hints_by_server.is_empty() {
if update_cache {
lsp_data.inlay_hints.invalidate_for_chunk(chunk);

View File

@@ -135,6 +135,16 @@ pub enum ContextServerSettings {
/// are supported.
settings: serde_json::Value,
},
Http {
/// Whether the context server is enabled.
#[serde(default = "default_true")]
enabled: bool,
/// The URL of the remote context server.
url: String,
/// Optional authentication configuration for the remote server.
#[serde(skip_serializing_if = "HashMap::is_empty", default)]
headers: HashMap<String, String>,
},
}
impl From<settings::ContextServerSettingsContent> for ContextServerSettings {
@@ -146,6 +156,15 @@ impl From<settings::ContextServerSettingsContent> for ContextServerSettings {
settings::ContextServerSettingsContent::Extension { enabled, settings } => {
ContextServerSettings::Extension { enabled, settings }
}
settings::ContextServerSettingsContent::Http {
enabled,
url,
headers,
} => ContextServerSettings::Http {
enabled,
url,
headers,
},
}
}
}
@@ -158,6 +177,15 @@ impl Into<settings::ContextServerSettingsContent> for ContextServerSettings {
ContextServerSettings::Extension { enabled, settings } => {
settings::ContextServerSettingsContent::Extension { enabled, settings }
}
ContextServerSettings::Http {
enabled,
url,
headers,
} => settings::ContextServerSettingsContent::Http {
enabled,
url,
headers,
},
}
}
}
@@ -174,6 +202,7 @@ impl ContextServerSettings {
match self {
ContextServerSettings::Custom { enabled, .. } => *enabled,
ContextServerSettings::Extension { enabled, .. } => *enabled,
ContextServerSettings::Http { enabled, .. } => *enabled,
}
}
@@ -181,6 +210,7 @@ impl ContextServerSettings {
match self {
ContextServerSettings::Custom { enabled: e, .. } => *e = enabled,
ContextServerSettings::Extension { enabled: e, .. } => *e = enabled,
ContextServerSettings::Http { enabled: e, .. } => *e = enabled,
}
}
}

View File

@@ -16,7 +16,7 @@ use task::{Shell, ShellBuilder, ShellKind, SpawnInTerminal};
use terminal::{
TaskState, TaskStatus, Terminal, TerminalBuilder, terminal_settings::TerminalSettings,
};
use util::{command::new_std_command, get_default_system_shell, maybe, rel_path::RelPath};
use util::{get_default_system_shell, maybe, rel_path::RelPath};
use crate::{Project, ProjectPath};
@@ -505,13 +505,13 @@ impl Project {
None,
None,
)?;
let mut command = new_std_command(command_template.program);
let mut command = std::process::Command::new(command_template.program);
command.args(command_template.args);
command.envs(command_template.env);
Ok(command)
}
None => {
let mut command = new_std_command(command);
let mut command = std::process::Command::new(command);
command.args(args);
command.envs(env);
if let Some(path) = path {

View File

@@ -124,7 +124,6 @@ async fn build_remote_server_from_source(
use smol::process::{Command, Stdio};
use std::env::VarError;
use std::path::Path;
use util::command::new_smol_command;
// By default, we make building remote server from source opt-out and we do not force artifact compression
// for quicker builds.
@@ -190,7 +189,7 @@ async fn build_remote_server_from_source(
delegate.set_status(Some("Building remote server binary from source"), cx);
log::info!("building remote server binary from source");
run_cmd(
new_smol_command("cargo")
Command::new("cargo")
.current_dir(concat!(env!("CARGO_MANIFEST_DIR"), "/../.."))
.args([
"build",
@@ -220,18 +219,12 @@ async fn build_remote_server_from_source(
.context("rustup not found on $PATH, install rustup (see https://rustup.rs/)")?;
delegate.set_status(Some("Adding rustup target for cross-compilation"), cx);
log::info!("adding rustup target");
run_cmd(
new_smol_command(rustup)
.args(["target", "add"])
.arg(&triple),
)
.await?;
run_cmd(Command::new(rustup).args(["target", "add"]).arg(&triple)).await?;
if which("cargo-zigbuild", cx).await?.is_none() {
delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx);
log::info!("installing cargo-zigbuild");
run_cmd(new_smol_command("cargo").args(["install", "--locked", "cargo-zigbuild"]))
.await?;
run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?;
}
delegate.set_status(
@@ -242,7 +235,7 @@ async fn build_remote_server_from_source(
);
log::info!("building remote binary from source for {triple} with Zig");
run_cmd(
new_smol_command("cargo")
Command::new("cargo")
.args([
"zigbuild",
"--package",
@@ -269,13 +262,12 @@ async fn build_remote_server_from_source(
#[cfg(not(target_os = "windows"))]
{
run_cmd(new_smol_command("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
}
#[cfg(target_os = "windows")]
{
// On Windows, we use 7z to compress the binary
let seven_zip = which("7z.exe",cx)
.await?
.context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?;
@@ -283,7 +275,7 @@ async fn build_remote_server_from_source(
if smol::fs::metadata(&gz_path).await.is_ok() {
smol::fs::remove_file(&gz_path).await?;
}
run_cmd(new_smol_command(seven_zip).args([
run_cmd(Command::new(seven_zip).args([
"a",
"-tgzip",
&gz_path,

View File

@@ -16,7 +16,6 @@ use language::LanguageRegistry;
use node_runtime::{NodeBinaryOptions, NodeRuntime};
use paths::logs_dir;
use project::project_settings::ProjectSettings;
use util::command::new_smol_command;
use proto::CrashReport;
use release_channel::{AppVersion, RELEASE_CHANNEL, ReleaseChannel};
@@ -657,7 +656,7 @@ pub(crate) fn execute_proxy(
async fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<(), ExecuteProxyError> {
log::info!("killing existing server with PID {}", pid);
new_smol_command("kill")
smol::process::Command::new("kill")
.arg(pid.to_string())
.output()
.await
@@ -708,7 +707,7 @@ async fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
}
let binary_name = std::env::current_exe().map_err(SpawnServerError::CurrentExe)?;
let mut server_process = new_smol_command(binary_name);
let mut server_process = smol::process::Command::new(binary_name);
server_process
.arg("run")
.arg("--log-file")
@@ -773,7 +772,7 @@ async fn check_pid_file(path: &Path) -> Result<Option<u32>, CheckPidError> {
};
log::debug!("Checking if process with PID {} exists...", pid);
match new_smol_command("kill")
match smol::process::Command::new("kill")
.arg("-0")
.arg(pid.to_string())
.output()

View File

@@ -80,22 +80,20 @@ impl ReqwestClient {
}
}
pub fn runtime() -> &'static tokio::runtime::Runtime {
RUNTIME.get_or_init(|| {
tokio::runtime::Builder::new_multi_thread()
// Since we now have two executors, let's try to keep our footprint small
.worker_threads(1)
.enable_all()
.build()
.expect("Failed to initialize HTTP client")
})
}
impl From<reqwest::Client> for ReqwestClient {
fn from(client: reqwest::Client) -> Self {
let handle = tokio::runtime::Handle::try_current().unwrap_or_else(|_| {
log::debug!("no tokio runtime found, creating one for Reqwest...");
runtime().handle().clone()
let runtime = RUNTIME.get_or_init(|| {
tokio::runtime::Builder::new_multi_thread()
// Since we now have two executors, let's try to keep our footprint small
.worker_threads(1)
.enable_all()
.build()
.expect("Failed to initialize HTTP client")
});
runtime.handle().clone()
});
Self {
client,

View File

@@ -13,32 +13,6 @@ use std::{
time::Duration,
};
// https://docs.rs/tokio/latest/src/tokio/task/yield_now.rs.html#39-64
pub async fn yield_now() {
/// Yield implementation
struct YieldNow {
yielded: bool,
}
impl Future for YieldNow {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
// use core::task::ready;
// ready!(crate::trace::trace_leaf(cx));
if self.yielded {
return Poll::Ready(());
}
self.yielded = true;
// context::defer(cx.waker());
Poll::Pending
}
}
YieldNow { yielded: false }.await;
}
#[derive(Clone)]
pub struct ForegroundExecutor {
session_id: SessionId,

View File

@@ -3,7 +3,7 @@ use std::num::NonZeroU32;
use collections::{HashMap, HashSet};
use gpui::{Modifiers, SharedString};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize, de::Error as _};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use settings_macros::MergeFrom;
use std::sync::Arc;
@@ -68,7 +68,9 @@ pub struct FeaturesContent {
}
/// The provider that supplies edit predictions.
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, JsonSchema, MergeFrom)]
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom,
)]
#[serde(rename_all = "snake_case")]
pub enum EditPredictionProvider {
None,
@@ -77,47 +79,6 @@ pub enum EditPredictionProvider {
Supermaven,
Zed,
Codestral,
Experimental(&'static str),
}
pub const EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME: &str = "sweep";
impl<'de> Deserialize<'de> for EditPredictionProvider {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum Content {
None,
Copilot,
Supermaven,
Zed,
Codestral,
Experimental(String),
}
Ok(match Content::deserialize(deserializer)? {
Content::None => EditPredictionProvider::None,
Content::Copilot => EditPredictionProvider::Copilot,
Content::Supermaven => EditPredictionProvider::Supermaven,
Content::Zed => EditPredictionProvider::Zed,
Content::Codestral => EditPredictionProvider::Codestral,
Content::Experimental(name) => {
if name == EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME {
EditPredictionProvider::Experimental(
EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME,
)
} else {
return Err(D::Error::custom(format!(
"Unknown experimental edit prediction provider: {}",
name
)));
}
}
})
}
}
impl EditPredictionProvider {
@@ -127,8 +88,7 @@ impl EditPredictionProvider {
EditPredictionProvider::None
| EditPredictionProvider::Copilot
| EditPredictionProvider::Supermaven
| EditPredictionProvider::Codestral
| EditPredictionProvider::Experimental(_) => false,
| EditPredictionProvider::Codestral => false,
}
}
}

View File

@@ -196,7 +196,7 @@ pub struct SessionSettingsContent {
}
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema, MergeFrom, Debug)]
#[serde(tag = "source", rename_all = "snake_case")]
#[serde(untagged, rename_all = "snake_case")]
pub enum ContextServerSettingsContent {
Custom {
/// Whether the context server is enabled.
@@ -206,6 +206,16 @@ pub enum ContextServerSettingsContent {
#[serde(flatten)]
command: ContextServerCommand,
},
Http {
/// Whether the context server is enabled.
#[serde(default = "default_true")]
enabled: bool,
/// The URL of the remote context server.
url: String,
/// Optional headers to send.
#[serde(skip_serializing_if = "HashMap::is_empty", default)]
headers: HashMap<String, String>,
},
Extension {
/// Whether the context server is enabled.
#[serde(default = "default_true")]
@@ -217,19 +227,24 @@ pub enum ContextServerSettingsContent {
settings: serde_json::Value,
},
}
impl ContextServerSettingsContent {
pub fn set_enabled(&mut self, enabled: bool) {
match self {
ContextServerSettingsContent::Custom {
enabled: custom_enabled,
command: _,
..
} => {
*custom_enabled = enabled;
}
ContextServerSettingsContent::Extension {
enabled: ext_enabled,
settings: _,
..
} => *ext_enabled = enabled,
ContextServerSettingsContent::Http {
enabled: remote_enabled,
..
} => *remote_enabled = enabled,
}
}
}

View File

@@ -1,43 +0,0 @@
[package]
name = "sweep_ai"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
exclude = ["fixtures"]
[lints]
workspace = true
[lib]
path = "src/sweep_ai.rs"
doctest = false
[dependencies]
anyhow.workspace = true
arrayvec.workspace = true
brotli.workspace = true
client.workspace = true
collections.workspace = true
edit_prediction.workspace = true
feature_flags.workspace = true
futures.workspace = true
gpui.workspace = true
http_client.workspace = true
language.workspace = true
project.workspace = true
release_channel.workspace = true
serde.workspace = true
serde_json.workspace = true
util.workspace = true
workspace.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
reqwest_client = { workspace = true, features = ["test-support"] }
tree-sitter-rust.workspace = true
workspace = { workspace = true, features = ["test-support"] }
zlog.workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-GPL

View File

@@ -1,90 +0,0 @@
use std::{path::Path, sync::Arc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize)]
pub struct AutocompleteRequest {
pub debug_info: Arc<str>,
pub repo_name: String,
pub branch: Option<String>,
pub file_path: Arc<Path>,
pub file_contents: String,
pub recent_changes: String,
pub cursor_position: usize,
pub original_file_contents: String,
pub file_chunks: Vec<FileChunk>,
pub retrieval_chunks: Vec<RetrievalChunk>,
pub recent_user_actions: Vec<UserAction>,
pub multiple_suggestions: bool,
pub privacy_mode_enabled: bool,
pub changes_above_cursor: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct FileChunk {
pub file_path: String,
pub start_line: usize,
pub end_line: usize,
pub content: String,
pub timestamp: Option<u64>,
}
#[derive(Debug, Clone, Serialize)]
pub struct RetrievalChunk {
pub file_path: String,
pub start_line: usize,
pub end_line: usize,
pub content: String,
pub timestamp: u64,
}
#[derive(Debug, Clone, Serialize)]
pub struct UserAction {
pub action_type: ActionType,
pub line_number: usize,
pub offset: usize,
pub file_path: String,
pub timestamp: u64,
}
#[allow(dead_code)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum ActionType {
CursorMovement,
InsertChar,
DeleteChar,
InsertSelection,
DeleteSelection,
}
#[derive(Debug, Clone, Deserialize)]
pub struct AutocompleteResponse {
pub autocomplete_id: String,
pub start_index: usize,
pub end_index: usize,
pub completion: String,
#[allow(dead_code)]
pub confidence: f64,
#[allow(dead_code)]
pub logprobs: Option<serde_json::Value>,
#[allow(dead_code)]
pub finish_reason: Option<String>,
#[allow(dead_code)]
pub elapsed_time_ms: u64,
#[allow(dead_code)]
#[serde(default, rename = "completions")]
pub additional_completions: Vec<AdditionalCompletion>,
}
#[allow(dead_code)]
#[derive(Debug, Clone, Deserialize)]
pub struct AdditionalCompletion {
pub start_index: usize,
pub end_index: usize,
pub completion: String,
pub confidence: f64,
pub autocomplete_id: String,
pub logprobs: Option<serde_json::Value>,
pub finish_reason: Option<String>,
}

View File

@@ -1,778 +0,0 @@
mod api;
use anyhow::{Context as _, Result};
use arrayvec::ArrayVec;
use client::telemetry;
use collections::HashMap;
use feature_flags::FeatureFlag;
use futures::AsyncReadExt as _;
use gpui::{App, AppContext, Context, Entity, EntityId, Global, Task, WeakEntity};
use http_client::{AsyncBody, Method};
use language::{Anchor, Buffer, BufferSnapshot, EditPreview, ToOffset as _, ToPoint, text_diff};
use project::Project;
use release_channel::{AppCommitSha, AppVersion};
use std::collections::{VecDeque, hash_map};
use std::fmt::{self, Display};
use std::mem;
use std::{
cmp,
fmt::Write,
ops::Range,
path::Path,
sync::Arc,
time::{Duration, Instant},
};
use util::ResultExt;
use util::rel_path::RelPath;
use workspace::Workspace;
use crate::api::{AutocompleteRequest, AutocompleteResponse, FileChunk};
const BUFFER_CHANGE_GROUPING_INTERVAL: Duration = Duration::from_secs(1);
const MAX_EVENT_COUNT: usize = 16;
const SWEEP_API_URL: &str = "https://autocomplete.sweep.dev/backend/next_edit_autocomplete";
pub struct SweepFeatureFlag;
impl FeatureFlag for SweepFeatureFlag {
const NAME: &str = "sweep-ai";
fn enabled_for_staff() -> bool {
false
}
}
#[derive(Clone)]
struct SweepAiGlobal(Entity<SweepAi>);
impl Global for SweepAiGlobal {}
#[derive(Clone)]
pub struct EditPrediction {
id: EditPredictionId,
path: Arc<Path>,
edits: Arc<[(Range<Anchor>, Arc<str>)]>,
snapshot: BufferSnapshot,
edit_preview: EditPreview,
}
impl EditPrediction {
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
}
}
impl fmt::Debug for EditPrediction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("EditPrediction")
.field("path", &self.path)
.field("edits", &self.edits)
.finish_non_exhaustive()
}
}
#[derive(Clone, Default, Debug, PartialEq, Eq, Hash)]
pub struct EditPredictionId(String);
impl Display for EditPredictionId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
pub struct SweepAi {
projects: HashMap<EntityId, SweepAiProject>,
debug_info: Arc<str>,
api_token: Option<String>,
}
struct SweepAiProject {
events: VecDeque<Event>,
registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
}
impl SweepAi {
pub fn global(cx: &mut App) -> Option<Entity<Self>> {
cx.try_global::<SweepAiGlobal>()
.map(|global| global.0.clone())
}
pub fn register(cx: &mut App) -> Entity<Self> {
Self::global(cx).unwrap_or_else(|| {
let entity = cx.new(|cx| Self::new(cx));
cx.set_global(SweepAiGlobal(entity.clone()));
entity
})
}
pub fn clear_history(&mut self) {
for sweep_ai_project in self.projects.values_mut() {
sweep_ai_project.events.clear();
}
}
fn new(cx: &mut Context<Self>) -> Self {
Self {
api_token: std::env::var("SWEEP_AI_TOKEN").ok(),
projects: HashMap::default(),
debug_info: format!(
"Zed v{version} ({sha}) - OS: {os} - Zed v{version}",
version = AppVersion::global(cx),
sha = AppCommitSha::try_global(cx).map_or("unknown".to_string(), |sha| sha.full()),
os = telemetry::os_name(),
)
.into(),
}
}
fn get_or_init_sweep_ai_project(
&mut self,
project: &Entity<Project>,
cx: &mut Context<Self>,
) -> &mut SweepAiProject {
let project_id = project.entity_id();
match self.projects.entry(project_id) {
hash_map::Entry::Occupied(entry) => entry.into_mut(),
hash_map::Entry::Vacant(entry) => {
cx.observe_release(project, move |this, _, _cx| {
this.projects.remove(&project_id);
})
.detach();
entry.insert(SweepAiProject {
events: VecDeque::with_capacity(MAX_EVENT_COUNT),
registered_buffers: HashMap::default(),
})
}
}
}
fn push_event(sweep_ai_project: &mut SweepAiProject, event: Event) {
let events = &mut sweep_ai_project.events;
if let Some(Event::BufferChange {
new_snapshot: last_new_snapshot,
timestamp: last_timestamp,
..
}) = events.back_mut()
{
// Coalesce edits for the same buffer when they happen one after the other.
let Event::BufferChange {
old_snapshot,
new_snapshot,
timestamp,
} = &event;
if timestamp.duration_since(*last_timestamp) <= BUFFER_CHANGE_GROUPING_INTERVAL
&& old_snapshot.remote_id() == last_new_snapshot.remote_id()
&& old_snapshot.version == last_new_snapshot.version
{
*last_new_snapshot = new_snapshot.clone();
*last_timestamp = *timestamp;
return;
}
}
if events.len() >= MAX_EVENT_COUNT {
// These are halved instead of popping to improve prompt caching.
events.drain(..MAX_EVENT_COUNT / 2);
}
events.push_back(event);
}
pub fn register_buffer(
&mut self,
buffer: &Entity<Buffer>,
project: &Entity<Project>,
cx: &mut Context<Self>,
) {
let sweep_ai_project = self.get_or_init_sweep_ai_project(project, cx);
Self::register_buffer_impl(sweep_ai_project, buffer, project, cx);
}
fn register_buffer_impl<'a>(
sweep_ai_project: &'a mut SweepAiProject,
buffer: &Entity<Buffer>,
project: &Entity<Project>,
cx: &mut Context<Self>,
) -> &'a mut RegisteredBuffer {
let buffer_id = buffer.entity_id();
match sweep_ai_project.registered_buffers.entry(buffer_id) {
hash_map::Entry::Occupied(entry) => entry.into_mut(),
hash_map::Entry::Vacant(entry) => {
let snapshot = buffer.read(cx).snapshot();
let project_entity_id = project.entity_id();
entry.insert(RegisteredBuffer {
snapshot,
_subscriptions: [
cx.subscribe(buffer, {
let project = project.downgrade();
move |this, buffer, event, cx| {
if let language::BufferEvent::Edited = event
&& let Some(project) = project.upgrade()
{
this.report_changes_for_buffer(&buffer, &project, cx);
}
}
}),
cx.observe_release(buffer, move |this, _buffer, _cx| {
let Some(sweep_ai_project) = this.projects.get_mut(&project_entity_id)
else {
return;
};
sweep_ai_project.registered_buffers.remove(&buffer_id);
}),
],
})
}
}
}
pub fn request_completion(
&mut self,
workspace: &WeakEntity<Workspace>,
project: &Entity<Project>,
active_buffer: &Entity<Buffer>,
position: language::Anchor,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
let snapshot = active_buffer.read(cx).snapshot();
let debug_info = self.debug_info.clone();
let Some(api_token) = self.api_token.clone() else {
return Task::ready(Ok(None));
};
let full_path: Arc<Path> = snapshot
.file()
.map(|file| file.full_path(cx))
.unwrap_or_else(|| "untitled".into())
.into();
let project_file = project::File::from_dyn(snapshot.file());
let repo_name = project_file
.map(|file| file.worktree.read(cx).root_name_str())
.unwrap_or("untitled")
.into();
let offset = position.to_offset(&snapshot);
let project_state = self.get_or_init_sweep_ai_project(project, cx);
let events = project_state.events.clone();
let http_client = cx.http_client();
let Some(recent_buffers) = workspace
.read_with(cx, |workspace, cx| {
workspace
.recent_navigation_history_iter(cx)
.filter_map(|(project_path, _)| {
let buffer = project.read(cx).get_open_buffer(&project_path, cx)?;
if active_buffer == &buffer {
None
} else {
Some(buffer.read(cx).snapshot())
}
})
.take(3)
.collect::<Vec<_>>()
})
.log_err()
else {
return Task::ready(Ok(None));
};
let result = cx.background_spawn({
let full_path = full_path.clone();
async move {
let text = snapshot.text();
let mut recent_changes = String::new();
for event in events {
writeln!(&mut recent_changes, "{event}")?;
}
let file_chunks = recent_buffers
.into_iter()
.map(|snapshot| {
let end_point = language::Point::new(30, 0).min(snapshot.max_point());
FileChunk {
content: snapshot
.text_for_range(language::Point::zero()..end_point)
.collect(),
file_path: snapshot
.file()
.map(|f| f.path().as_unix_str())
.unwrap_or("untitled")
.to_string(),
start_line: 0,
end_line: end_point.row as usize,
timestamp: snapshot.file().and_then(|file| {
Some(
file.disk_state()
.mtime()?
.to_seconds_and_nanos_for_persistence()?
.0,
)
}),
}
})
.collect();
let request_body = AutocompleteRequest {
debug_info,
repo_name,
file_path: full_path.clone(),
file_contents: text.clone(),
original_file_contents: text,
cursor_position: offset,
recent_changes: recent_changes.clone(),
changes_above_cursor: true,
multiple_suggestions: false,
branch: None,
file_chunks,
retrieval_chunks: vec![],
recent_user_actions: vec![],
// TODO
privacy_mode_enabled: false,
};
let mut buf: Vec<u8> = Vec::new();
let writer = brotli::CompressorWriter::new(&mut buf, 4096, 11, 22);
serde_json::to_writer(writer, &request_body)?;
let body: AsyncBody = buf.into();
let request = http_client::Request::builder()
.uri(SWEEP_API_URL)
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", api_token))
.header("Connection", "keep-alive")
.header("Content-Encoding", "br")
.method(Method::POST)
.body(body)?;
let mut response = http_client.send(request).await?;
let mut body: Vec<u8> = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
if !response.status().is_success() {
anyhow::bail!(
"Request failed with status: {:?}\nBody: {}",
response.status(),
String::from_utf8_lossy(&body),
);
};
let response: AutocompleteResponse = serde_json::from_slice(&body)?;
let old_text = snapshot
.text_for_range(response.start_index..response.end_index)
.collect::<String>();
let edits = text_diff(&old_text, &response.completion)
.into_iter()
.map(|(range, text)| {
(
snapshot.anchor_after(response.start_index + range.start)
..snapshot.anchor_before(response.start_index + range.end),
text,
)
})
.collect::<Vec<_>>();
anyhow::Ok((response.autocomplete_id, edits, snapshot))
}
});
let buffer = active_buffer.clone();
cx.spawn(async move |_, cx| {
let (id, edits, old_snapshot) = result.await?;
if edits.is_empty() {
return anyhow::Ok(None);
}
let Some((edits, new_snapshot, preview_task)) =
buffer.read_with(cx, |buffer, cx| {
let new_snapshot = buffer.snapshot();
let edits: Arc<[(Range<Anchor>, Arc<str>)]> =
edit_prediction::interpolate_edits(&old_snapshot, &new_snapshot, &edits)?
.into();
let preview_task = buffer.preview_edits(edits.clone(), cx);
Some((edits, new_snapshot, preview_task))
})?
else {
return anyhow::Ok(None);
};
let prediction = EditPrediction {
id: EditPredictionId(id),
path: full_path,
edits,
snapshot: new_snapshot,
edit_preview: preview_task.await,
};
anyhow::Ok(Some(prediction))
})
}
fn report_changes_for_buffer(
&mut self,
buffer: &Entity<Buffer>,
project: &Entity<Project>,
cx: &mut Context<Self>,
) -> BufferSnapshot {
let sweep_ai_project = self.get_or_init_sweep_ai_project(project, cx);
let registered_buffer = Self::register_buffer_impl(sweep_ai_project, buffer, project, cx);
let new_snapshot = buffer.read(cx).snapshot();
if new_snapshot.version != registered_buffer.snapshot.version {
let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone());
Self::push_event(
sweep_ai_project,
Event::BufferChange {
old_snapshot,
new_snapshot: new_snapshot.clone(),
timestamp: Instant::now(),
},
);
}
new_snapshot
}
}
struct RegisteredBuffer {
snapshot: BufferSnapshot,
_subscriptions: [gpui::Subscription; 2],
}
#[derive(Clone)]
pub enum Event {
BufferChange {
old_snapshot: BufferSnapshot,
new_snapshot: BufferSnapshot,
timestamp: Instant,
},
}
impl Display for Event {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Event::BufferChange {
old_snapshot,
new_snapshot,
..
} => {
let old_path = old_snapshot
.file()
.map(|f| f.path().as_ref())
.unwrap_or(RelPath::unix("untitled").unwrap());
let new_path = new_snapshot
.file()
.map(|f| f.path().as_ref())
.unwrap_or(RelPath::unix("untitled").unwrap());
if old_path != new_path {
// TODO confirm how to do this for sweep
// writeln!(f, "User renamed {:?} to {:?}\n", old_path, new_path)?;
}
let diff = language::unified_diff(&old_snapshot.text(), &new_snapshot.text());
if !diff.is_empty() {
write!(
f,
"File: {}:\n{}\n",
new_path.display(util::paths::PathStyle::Posix),
diff
)?
}
fmt::Result::Ok(())
}
}
}
}
#[derive(Debug, Clone)]
struct CurrentEditPrediction {
buffer_id: EntityId,
completion: EditPrediction,
}
impl CurrentEditPrediction {
fn should_replace_completion(&self, old_completion: &Self, snapshot: &BufferSnapshot) -> bool {
if self.buffer_id != old_completion.buffer_id {
return true;
}
let Some(old_edits) = old_completion.completion.interpolate(snapshot) else {
return true;
};
let Some(new_edits) = self.completion.interpolate(snapshot) else {
return false;
};
if old_edits.len() == 1 && new_edits.len() == 1 {
let (old_range, old_text) = &old_edits[0];
let (new_range, new_text) = &new_edits[0];
new_range == old_range && new_text.starts_with(old_text.as_ref())
} else {
true
}
}
}
struct PendingCompletion {
id: usize,
_task: Task<()>,
}
pub struct SweepAiEditPredictionProvider {
workspace: WeakEntity<Workspace>,
sweep_ai: Entity<SweepAi>,
pending_completions: ArrayVec<PendingCompletion, 2>,
next_pending_completion_id: usize,
current_completion: Option<CurrentEditPrediction>,
last_request_timestamp: Instant,
project: Entity<Project>,
}
impl SweepAiEditPredictionProvider {
pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300);
pub fn new(
sweep_ai: Entity<SweepAi>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
) -> Self {
Self {
sweep_ai,
pending_completions: ArrayVec::new(),
next_pending_completion_id: 0,
current_completion: None,
last_request_timestamp: Instant::now(),
project,
workspace,
}
}
}
impl edit_prediction::EditPredictionProvider for SweepAiEditPredictionProvider {
fn name() -> &'static str {
"zed-predict"
}
fn display_name() -> &'static str {
"Zed's Edit Predictions"
}
fn show_completions_in_menu() -> bool {
true
}
fn show_tab_accept_marker() -> bool {
true
}
fn is_enabled(
&self,
_buffer: &Entity<Buffer>,
_cursor_position: language::Anchor,
cx: &App,
) -> bool {
self.sweep_ai.read(cx).api_token.is_some()
}
fn is_refreshing(&self) -> bool {
!self.pending_completions.is_empty()
}
fn refresh(
&mut self,
buffer: Entity<Buffer>,
position: language::Anchor,
_debounce: bool,
cx: &mut Context<Self>,
) {
if let Some(current_completion) = self.current_completion.as_ref() {
let snapshot = buffer.read(cx).snapshot();
if current_completion
.completion
.interpolate(&snapshot)
.is_some()
{
return;
}
}
let pending_completion_id = self.next_pending_completion_id;
self.next_pending_completion_id += 1;
let last_request_timestamp = self.last_request_timestamp;
let project = self.project.clone();
let workspace = self.workspace.clone();
let task = cx.spawn(async move |this, cx| {
if let Some(timeout) = (last_request_timestamp + Self::THROTTLE_TIMEOUT)
.checked_duration_since(Instant::now())
{
cx.background_executor().timer(timeout).await;
}
let completion_request = this.update(cx, |this, cx| {
this.last_request_timestamp = Instant::now();
this.sweep_ai.update(cx, |sweep_ai, cx| {
sweep_ai.request_completion(&workspace, &project, &buffer, position, cx)
})
});
let completion = match completion_request {
Ok(completion_request) => {
let completion_request = completion_request.await;
completion_request.map(|c| {
c.map(|completion| CurrentEditPrediction {
buffer_id: buffer.entity_id(),
completion,
})
})
}
Err(error) => Err(error),
};
let Some(new_completion) = completion
.context("edit prediction failed")
.log_err()
.flatten()
else {
this.update(cx, |this, cx| {
if this.pending_completions[0].id == pending_completion_id {
this.pending_completions.remove(0);
} else {
this.pending_completions.clear();
}
cx.notify();
})
.ok();
return;
};
this.update(cx, |this, cx| {
if this.pending_completions[0].id == pending_completion_id {
this.pending_completions.remove(0);
} else {
this.pending_completions.clear();
}
if let Some(old_completion) = this.current_completion.as_ref() {
let snapshot = buffer.read(cx).snapshot();
if new_completion.should_replace_completion(old_completion, &snapshot) {
this.current_completion = Some(new_completion);
}
} else {
this.current_completion = Some(new_completion);
}
cx.notify();
})
.ok();
});
// We always maintain at most two pending completions. When we already
// have two, we replace the newest one.
if self.pending_completions.len() <= 1 {
self.pending_completions.push(PendingCompletion {
id: pending_completion_id,
_task: task,
});
} else if self.pending_completions.len() == 2 {
self.pending_completions.pop();
self.pending_completions.push(PendingCompletion {
id: pending_completion_id,
_task: task,
});
}
}
fn cycle(
&mut self,
_buffer: Entity<Buffer>,
_cursor_position: language::Anchor,
_direction: edit_prediction::Direction,
_cx: &mut Context<Self>,
) {
// Right now we don't support cycling.
}
fn accept(&mut self, _cx: &mut Context<Self>) {
self.pending_completions.clear();
}
fn discard(&mut self, _cx: &mut Context<Self>) {
self.pending_completions.clear();
self.current_completion.take();
}
fn suggest(
&mut self,
buffer: &Entity<Buffer>,
cursor_position: language::Anchor,
cx: &mut Context<Self>,
) -> Option<edit_prediction::EditPrediction> {
let CurrentEditPrediction {
buffer_id,
completion,
..
} = self.current_completion.as_mut()?;
// Invalidate previous completion if it was generated for a different buffer.
if *buffer_id != buffer.entity_id() {
self.current_completion.take();
return None;
}
let buffer = buffer.read(cx);
let Some(edits) = completion.interpolate(&buffer.snapshot()) else {
self.current_completion.take();
return None;
};
let cursor_row = cursor_position.to_point(buffer).row;
let (closest_edit_ix, (closest_edit_range, _)) =
edits.iter().enumerate().min_by_key(|(_, (range, _))| {
let distance_from_start = cursor_row.abs_diff(range.start.to_point(buffer).row);
let distance_from_end = cursor_row.abs_diff(range.end.to_point(buffer).row);
cmp::min(distance_from_start, distance_from_end)
})?;
let mut edit_start_ix = closest_edit_ix;
for (range, _) in edits[..edit_start_ix].iter().rev() {
let distance_from_closest_edit =
closest_edit_range.start.to_point(buffer).row - range.end.to_point(buffer).row;
if distance_from_closest_edit <= 1 {
edit_start_ix -= 1;
} else {
break;
}
}
let mut edit_end_ix = closest_edit_ix + 1;
for (range, _) in &edits[edit_end_ix..] {
let distance_from_closest_edit =
range.start.to_point(buffer).row - closest_edit_range.end.to_point(buffer).row;
if distance_from_closest_edit <= 1 {
edit_end_ix += 1;
} else {
break;
}
}
Some(edit_prediction::EditPrediction::Local {
id: Some(completion.id.to_string().into()),
edits: edits[edit_start_ix..edit_end_ix].to_vec(),
edit_preview: Some(completion.edit_preview.clone()),
})
}
}

View File

@@ -124,3 +124,66 @@ pub struct AssistantEventData {
pub error_message: Option<String>,
pub language_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BacktraceFrame {
pub ip: usize,
pub symbol_addr: usize,
pub base: Option<usize>,
pub symbols: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HangReport {
pub backtrace: Vec<BacktraceFrame>,
pub app_version: Option<SemanticVersion>,
pub os_name: String,
pub os_version: Option<String>,
pub architecture: String,
/// Identifier unique to each Zed installation (differs for stable, preview, dev)
pub installation_id: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LocationData {
pub file: String,
pub line: u32,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Panic {
/// The name of the thread that panicked
pub thread: String,
/// The panic message
pub payload: String,
/// The location of the panic (file, line number)
#[serde(skip_serializing_if = "Option::is_none")]
pub location_data: Option<LocationData>,
pub backtrace: Vec<String>,
/// Zed version number
pub app_version: String,
/// The Git commit SHA that Zed was built at.
#[serde(skip_serializing_if = "Option::is_none")]
pub app_commit_sha: Option<String>,
/// Zed release channel (stable, preview, dev)
pub release_channel: String,
pub target: Option<String>,
pub os_name: String,
pub os_version: Option<String>,
pub architecture: String,
/// The time the panic occurred (UNIX millisecond timestamp)
pub panicked_on: i64,
/// Identifier unique to each system Zed is installed on
#[serde(skip_serializing_if = "Option::is_none")]
pub system_id: Option<String>,
/// Identifier unique to each Zed installation (differs for stable, preview, dev)
#[serde(skip_serializing_if = "Option::is_none")]
pub installation_id: Option<String>,
/// Identifier unique to each Zed session (differs for each time you open Zed)
pub session_id: String,
}
#[derive(Serialize, Deserialize)]
pub struct PanicRequest {
pub panic: Panic,
}

View File

@@ -1556,8 +1556,7 @@ pub(crate) fn default_working_directory(workspace: &Workspace, cx: &App) -> Opti
.read(cx)
.active_project_directory(cx)
.as_deref()
.map(Path::to_path_buf)
.or_else(|| first_project_directory(workspace, cx)),
.map(Path::to_path_buf),
WorkingDirectory::FirstProjectDirectory => first_project_directory(workspace, cx),
WorkingDirectory::AlwaysHome => None,
WorkingDirectory::Always { directory } => {
@@ -1571,13 +1570,10 @@ pub(crate) fn default_working_directory(workspace: &Workspace, cx: &App) -> Opti
///Gets the first project's home directory, or the home directory
fn first_project_directory(workspace: &Workspace, cx: &App) -> Option<PathBuf> {
let worktree = workspace.worktrees(cx).next()?.read(cx);
let worktree_path = worktree.abs_path();
if worktree.root_entry()?.is_dir() {
Some(worktree_path.to_path_buf())
} else {
// If worktree is a file, return its parent directory
worktree_path.parent().map(|p| p.to_path_buf())
if !worktree.root_entry()?.is_dir() {
return None;
}
Some(worktree.abs_path().to_path_buf())
}
#[cfg(test)]
@@ -1610,7 +1606,7 @@ mod tests {
});
}
// No active entry, but a worktree, worktree is a file -> parent directory
// No active entry, but a worktree, worktree is a file -> home_dir()
#[gpui::test]
async fn no_active_entry_worktree_is_file(cx: &mut TestAppContext) {
let (project, workspace) = init_test(cx).await;
@@ -1625,9 +1621,9 @@ mod tests {
assert!(workspace.worktrees(cx).next().is_some());
let res = default_working_directory(workspace, cx);
assert_eq!(res, Some(Path::new("/").to_path_buf()));
assert_eq!(res, None);
let res = first_project_directory(workspace, cx);
assert_eq!(res, Some(Path::new("/").to_path_buf()));
assert_eq!(res, None);
});
}

View File

@@ -34,13 +34,11 @@ async fn capture_unix(
) -> Result<collections::HashMap<String, String>> {
use std::os::unix::process::CommandExt;
use crate::command::new_std_command;
let shell_kind = ShellKind::new(shell_path, false);
let zed_path = super::get_shell_safe_zed_path(shell_kind)?;
let mut command_string = String::new();
let mut command = new_std_command(shell_path);
let mut command = std::process::Command::new(shell_path);
command.args(args);
// In some shells, file descriptors greater than 2 cannot be used in interactive mode,
// so file descriptor 0 (stdin) is used instead. This impacts zsh, old bash; perhaps others.

View File

@@ -211,7 +211,7 @@ fn find_target(
let mut pre_char = String::new();
// Backward scan to find the start of the number, but stop at start_offset
for ch in snapshot.reversed_chars_at(offset + if offset < snapshot.len() { 1 } else { 0 }) {
for ch in snapshot.reversed_chars_at(offset + 1) {
// Search boundaries
if offset == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) {
break;

View File

@@ -29,6 +29,7 @@ assets.workspace = true
audio.workspace = true
auto_update.workspace = true
auto_update_ui.workspace = true
backtrace = "0.3"
bincode.workspace = true
breadcrumbs.workspace = true
call.workspace = true
@@ -99,6 +100,7 @@ migrator.workspace = true
miniprofiler_ui.workspace = true
mimalloc = { version = "0.1", optional = true }
nc.workspace = true
nix = { workspace = true, features = ["pthread", "signal"] }
node_runtime.workspace = true
notifications.workspace = true
onboarding.workspace = true
@@ -133,12 +135,12 @@ snippet_provider.workspace = true
snippets_ui.workspace = true
supermaven.workspace = true
svg_preview.workspace = true
sweep_ai.workspace = true
sysinfo.workspace = true
tab_switcher.workspace = true
task.workspace = true
tasks_ui.workspace = true
telemetry.workspace = true
telemetry_events.workspace = true
terminal_view.workspace = true
theme.workspace = true
theme_extension.workspace = true

View File

@@ -549,7 +549,11 @@ pub fn main() {
auto_update::init(client.clone(), cx);
dap_adapters::init(cx);
auto_update_ui::init(cx);
reliability::init(client.clone(), cx);
reliability::init(
client.http_client(),
system_id.as_ref().map(|id| id.to_string()),
cx,
);
extension_host::init(
extension_host_proxy.clone(),
app_state.fs.clone(),

View File

@@ -1,42 +1,46 @@
use anyhow::{Context as _, Result};
use client::{Client, telemetry::MINIDUMP_ENDPOINT};
use client::{TelemetrySettings, telemetry::MINIDUMP_ENDPOINT};
use futures::AsyncReadExt;
use gpui::{App, AppContext as _, SerializedThreadTaskTimings};
use http_client::{self, HttpClient};
use http_client::{self, HttpClient, HttpClientWithUrl};
use log::info;
use project::Project;
use proto::{CrashReport, GetCrashFilesResponse};
use reqwest::multipart::{Form, Part};
use settings::Settings;
use smol::stream::StreamExt;
use std::{ffi::OsStr, fs, sync::Arc, thread::ThreadId, time::Duration};
use util::ResultExt;
use crate::STARTUP_TIME;
pub fn init(client: Arc<Client>, cx: &mut App) {
pub fn init(http_client: Arc<HttpClientWithUrl>, installation_id: Option<String>, cx: &mut App) {
monitor_hangs(cx);
if client.telemetry().diagnostics_enabled() {
let client = client.clone();
#[cfg(target_os = "macos")]
monitor_main_thread_hangs(http_client.clone(), installation_id.clone(), cx);
if client::TelemetrySettings::get_global(cx).diagnostics {
let client = http_client.clone();
let id = installation_id.clone();
cx.background_spawn(async move {
upload_previous_minidumps(client).await.warn_on_err();
upload_previous_minidumps(client, id).await.warn_on_err();
})
.detach()
}
cx.observe_new(move |project: &mut Project, _, cx| {
let client = client.clone();
let http_client = http_client.clone();
let installation_id = installation_id.clone();
let Some(remote_client) = project.remote_client() else {
return;
};
remote_client.update(cx, |remote_client, cx| {
if !client.telemetry().diagnostics_enabled() {
remote_client.update(cx, |client, cx| {
if !TelemetrySettings::get_global(cx).diagnostics {
return;
}
let request = remote_client
.proto_client()
.request(proto::GetCrashFiles {});
let request = client.proto_client().request(proto::GetCrashFiles {});
cx.background_spawn(async move {
let GetCrashFilesResponse { crashes } = request.await?;
@@ -49,9 +53,15 @@ pub fn init(client: Arc<Client>, cx: &mut App) {
} in crashes
{
if let Some(metadata) = serde_json::from_str(&metadata).log_err() {
upload_minidump(client.clone(), endpoint, minidump_contents, &metadata)
.await
.log_err();
upload_minidump(
http_client.clone(),
endpoint,
minidump_contents,
&metadata,
installation_id.clone(),
)
.await
.log_err();
}
}
@@ -63,6 +73,210 @@ pub fn init(client: Arc<Client>, cx: &mut App) {
.detach();
}
#[cfg(target_os = "macos")]
pub fn monitor_main_thread_hangs(
http_client: Arc<HttpClientWithUrl>,
installation_id: Option<String>,
cx: &App,
) {
// This is too noisy to ship to stable for now.
if !matches!(
ReleaseChannel::global(cx),
ReleaseChannel::Dev | ReleaseChannel::Nightly | ReleaseChannel::Preview
) {
return;
}
use nix::sys::signal::{
SaFlags, SigAction, SigHandler, SigSet,
Signal::{self, SIGUSR2},
sigaction,
};
use parking_lot::Mutex;
use http_client::Method;
use release_channel::ReleaseChannel;
use std::{
ffi::c_int,
sync::{OnceLock, mpsc},
time::Duration,
};
use telemetry_events::{BacktraceFrame, HangReport};
use nix::sys::pthread;
let foreground_executor = cx.foreground_executor();
let background_executor = cx.background_executor();
let telemetry_settings = *client::TelemetrySettings::get_global(cx);
// Initialize SIGUSR2 handler to send a backtrace to a channel.
let (backtrace_tx, backtrace_rx) = mpsc::channel();
static BACKTRACE: Mutex<Vec<backtrace::Frame>> = Mutex::new(Vec::new());
static BACKTRACE_SENDER: OnceLock<mpsc::Sender<()>> = OnceLock::new();
BACKTRACE_SENDER.get_or_init(|| backtrace_tx);
BACKTRACE.lock().reserve(100);
fn handle_backtrace_signal() {
unsafe {
extern "C" fn handle_sigusr2(_i: c_int) {
unsafe {
// ASYNC SIGNAL SAFETY: This lock is only accessed one other time,
// which can only be triggered by This signal handler. In addition,
// this signal handler is immediately removed by SA_RESETHAND, and this
// signal handler cannot be re-entrant due to the SIGUSR2 mask defined
// below
let mut bt = BACKTRACE.lock();
bt.clear();
backtrace::trace_unsynchronized(|frame| {
if bt.len() < bt.capacity() {
bt.push(frame.clone());
true
} else {
false
}
});
}
BACKTRACE_SENDER.get().unwrap().send(()).ok();
}
let mut mask = SigSet::empty();
mask.add(SIGUSR2);
sigaction(
Signal::SIGUSR2,
&SigAction::new(
SigHandler::Handler(handle_sigusr2),
SaFlags::SA_RESTART | SaFlags::SA_RESETHAND,
mask,
),
)
.log_err();
}
}
handle_backtrace_signal();
let main_thread = pthread::pthread_self();
let (mut tx, mut rx) = futures::channel::mpsc::channel(3);
foreground_executor
.spawn(async move { while (rx.next().await).is_some() {} })
.detach();
background_executor
.spawn({
let background_executor = background_executor.clone();
async move {
loop {
background_executor.timer(Duration::from_secs(1)).await;
match tx.try_send(()) {
Ok(_) => continue,
Err(e) => {
if e.into_send_error().is_full() {
pthread::pthread_kill(main_thread, SIGUSR2).log_err();
}
// Only detect the first hang
break;
}
}
}
}
})
.detach();
let app_version = release_channel::AppVersion::global(cx);
let os_name = client::telemetry::os_name();
background_executor
.clone()
.spawn(async move {
let os_version = client::telemetry::os_version();
loop {
while backtrace_rx.recv().is_ok() {
if !telemetry_settings.diagnostics {
return;
}
// ASYNC SIGNAL SAFETY: This lock is only accessed _after_
// the backtrace transmitter has fired, which itself is only done
// by the signal handler. And due to SA_RESETHAND the signal handler
// will not run again until `handle_backtrace_signal` is called.
let raw_backtrace = BACKTRACE.lock().drain(..).collect::<Vec<_>>();
let backtrace: Vec<_> = raw_backtrace
.into_iter()
.map(|frame| {
let mut btf = BacktraceFrame {
ip: frame.ip() as usize,
symbol_addr: frame.symbol_address() as usize,
base: frame.module_base_address().map(|addr| addr as usize),
symbols: vec![],
};
backtrace::resolve_frame(&frame, |symbol| {
if let Some(name) = symbol.name() {
btf.symbols.push(name.to_string());
}
});
btf
})
.collect();
// IMPORTANT: Don't move this to before `BACKTRACE.lock()`
handle_backtrace_signal();
log::error!(
"Suspected hang on main thread:\n{}",
backtrace
.iter()
.flat_map(|bt| bt.symbols.first().as_ref().map(|s| s.as_str()))
.collect::<Vec<_>>()
.join("\n")
);
let report = HangReport {
backtrace,
app_version: Some(app_version),
os_name: os_name.clone(),
os_version: Some(os_version.clone()),
architecture: std::env::consts::ARCH.into(),
installation_id: installation_id.clone(),
};
let Some(json_bytes) = serde_json::to_vec(&report).log_err() else {
continue;
};
let Some(checksum) = client::telemetry::calculate_json_checksum(&json_bytes)
else {
continue;
};
let Ok(url) = http_client.build_zed_api_url("/telemetry/hangs", &[]) else {
continue;
};
let Ok(request) = http_client::Request::builder()
.method(Method::POST)
.uri(url.as_ref())
.header("x-zed-checksum", checksum)
.body(json_bytes.into())
else {
continue;
};
if let Some(response) = http_client.send(request).await.log_err()
&& response.status() != 200
{
log::error!("Failed to send hang report: HTTP {:?}", response.status());
}
}
}
})
.detach()
}
fn monitor_hangs(cx: &App) {
let main_thread_id = std::thread::current().id();
@@ -151,7 +365,10 @@ fn save_hang_trace(
);
}
pub async fn upload_previous_minidumps(client: Arc<Client>) -> anyhow::Result<()> {
pub async fn upload_previous_minidumps(
http: Arc<HttpClientWithUrl>,
installation_id: Option<String>,
) -> anyhow::Result<()> {
let Some(minidump_endpoint) = MINIDUMP_ENDPOINT.as_ref() else {
log::warn!("Minidump endpoint not set");
return Ok(());
@@ -168,12 +385,13 @@ pub async fn upload_previous_minidumps(client: Arc<Client>) -> anyhow::Result<()
json_path.set_extension("json");
if let Ok(metadata) = serde_json::from_slice(&smol::fs::read(&json_path).await?)
&& upload_minidump(
client.clone(),
http.clone(),
minidump_endpoint,
smol::fs::read(&child_path)
.await
.context("Failed to read minidump")?,
&metadata,
installation_id.clone(),
)
.await
.log_err()
@@ -187,10 +405,11 @@ pub async fn upload_previous_minidumps(client: Arc<Client>) -> anyhow::Result<()
}
async fn upload_minidump(
client: Arc<Client>,
http: Arc<HttpClientWithUrl>,
endpoint: &str,
minidump: Vec<u8>,
metadata: &crashes::CrashInfo,
installation_id: Option<String>,
) -> Result<()> {
let mut form = Form::new()
.part(
@@ -217,19 +436,8 @@ async fn upload_minidump(
if let Some(minidump_error) = metadata.minidump_error.clone() {
form = form.text("minidump_error", minidump_error);
}
if let Some(id) = client.telemetry().metrics_id() {
form = form.text("sentry[user][id]", id.to_string());
form = form.text(
"sentry[user][is_staff]",
if client.telemetry().is_staff().unwrap_or_default() {
"true"
} else {
"false"
},
);
} else if let Some(id) = client.telemetry().installation_id() {
form = form.text("sentry[user][id]", format!("installation-{}", id))
if let Some(id) = installation_id.clone() {
form = form.text("sentry[user][id]", id)
}
::telemetry::event!(
@@ -297,10 +505,7 @@ async fn upload_minidump(
// TODO: feature-flag-context, and more of device-context like screen resolution, available ram, device model, etc
let mut response_text = String::new();
let mut response = client
.http_client()
.send_multipart_form(endpoint, form)
.await?;
let mut response = http.send_multipart_form(endpoint, form).await?;
response
.body_mut()
.read_to_string(&mut response_text)

View File

@@ -7,10 +7,9 @@ use feature_flags::FeatureFlagAppExt;
use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity};
use language::language_settings::{EditPredictionProvider, all_language_settings};
use language_models::MistralLanguageModelProvider;
use settings::{EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, SettingsStore};
use settings::SettingsStore;
use std::{cell::RefCell, rc::Rc, sync::Arc};
use supermaven::{Supermaven, SupermavenCompletionProvider};
use sweep_ai::{SweepAiEditPredictionProvider, SweepFeatureFlag};
use ui::Window;
use zeta::ZetaEditPredictionProvider;
use zeta2::Zeta2FeatureFlag;
@@ -203,38 +202,6 @@ fn assign_edit_prediction_provider(
let provider = cx.new(|_| CodestralCompletionProvider::new(http_client));
editor.set_edit_prediction_provider(Some(provider), window, cx);
}
EditPredictionProvider::Experimental(name) => {
if name == EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME
&& cx.has_flag::<SweepFeatureFlag>()
{
if let Some(project) = editor.project()
&& let Some(workspace) = editor.workspace()
{
let sweep_ai = sweep_ai::SweepAi::register(cx);
if let Some(buffer) = &singleton_buffer
&& buffer.read(cx).file().is_some()
{
sweep_ai.update(cx, |sweep_ai, cx| {
sweep_ai.register_buffer(buffer, project, cx);
});
}
let provider = cx.new(|_| {
sweep_ai::SweepAiEditPredictionProvider::new(
sweep_ai,
workspace.downgrade(),
project.clone(),
)
});
editor.set_edit_prediction_provider(Some(provider), window, cx);
}
} else {
editor.set_edit_prediction_provider::<SweepAiEditPredictionProvider>(
None, window, cx,
);
}
}
EditPredictionProvider::Zed => {
if user_store.read(cx).current_user().is_some() {
let mut worktree = None;

View File

@@ -183,7 +183,7 @@ macro_rules! time {
$crate::Timer::new($logger, $name)
};
($name:expr) => {
$crate::time!($crate::default_logger!() => $name)
time!($crate::default_logger!() => $name)
};
}

View File

@@ -168,8 +168,6 @@ To ensure you're using your billing method of choice, [open a new Codex thread](
If you are already logged in and want to change your authentication method, type `/logout` in the thread and authenticate again.
If you want to use a third-party provider with Codex, you can configure that with your [Codex config.toml](https://github.com/openai/codex/blob/main/docs/config.md#model-selection) or pass extra [args/env variables](https://github.com/openai/codex/blob/main/docs/config.md#model-selection) to your Codex agent servers settings.
#### Installation
The first time you create a Codex thread, Zed will install [codex-acp](https://github.com/zed-industries/codex-acp). This installation is only available to Zed and is kept up to date as you use the agent.