Compare commits

..

2 Commits

Author SHA1 Message Date
Richard Feldman
e19533e8c4 make it way more complicated but still broken
Co-authored-by: Agus Zubiaga <hi@aguz.me>
2025-03-27 15:49:35 -04:00
Richard Feldman
3148583f79 wip
Co-authored-by: Agus Zubiaga <hi@aguz.me>
2025-03-27 15:29:15 -04:00
120 changed files with 2540 additions and 2961 deletions

17
Cargo.lock generated
View File

@@ -413,6 +413,7 @@ dependencies = [
"menu",
"multi_buffer",
"parking_lot",
"paths",
"pretty_assertions",
"project",
"prompt_library",
@@ -422,6 +423,7 @@ dependencies = [
"rope",
"schemars",
"search",
"semantic_index",
"serde",
"serde_json_lenient",
"settings",
@@ -457,7 +459,6 @@ dependencies = [
"collections",
"command_palette_hooks",
"context_server",
"convert_case 0.8.0",
"db",
"editor",
"feature_flags",
@@ -579,7 +580,6 @@ dependencies = [
"client",
"collections",
"context_server",
"dap",
"env_logger 0.11.7",
"fs",
"futures 0.3.31",
@@ -669,10 +669,14 @@ dependencies = [
"http_client",
"indexed_docs",
"language",
"language_model",
"log",
"pretty_assertions",
"project",
"prompt_store",
"rope",
"schemars",
"semantic_index",
"serde",
"serde_json",
"settings",
@@ -720,7 +724,6 @@ dependencies = [
"itertools 0.14.0",
"language",
"language_model",
"open",
"project",
"rand 0.8.5",
"release_channel",
@@ -3871,7 +3874,6 @@ dependencies = [
"node_runtime",
"parking_lot",
"paths",
"regex",
"schemars",
"serde",
"serde_json",
@@ -3905,6 +3907,7 @@ dependencies = [
"regex",
"serde",
"serde_json",
"sysinfo",
"task",
"util",
]
@@ -4638,7 +4641,6 @@ dependencies = [
"client",
"clock",
"collections",
"dap",
"env_logger 0.11.7",
"feature_flags",
"fs",
@@ -11346,7 +11348,6 @@ dependencies = [
"clap",
"client",
"clock",
"dap",
"env_logger 0.11.7",
"extension",
"extension_host",
@@ -13234,7 +13235,6 @@ dependencies = [
"theme",
"title_bar",
"ui",
"workspace",
]
[[package]]
@@ -16907,7 +16907,6 @@ dependencies = [
"clock",
"collections",
"component",
"dap",
"db",
"derive_more",
"env_logger 0.11.7",
@@ -17284,8 +17283,6 @@ dependencies = [
"command_palette_hooks",
"component_preview",
"copilot",
"dap",
"dap_adapters",
"db",
"debugger_tools",
"debugger_ui",

View File

@@ -470,7 +470,6 @@ mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
nanoid = "0.4"
nbformat = { version = "0.10.0" }
nix = "0.29"
open = "5.0.0"
num-format = "0.4.4"
ordered-float = "2.1.1"
palette = { version = "0.7.5", default-features = false, features = ["std"] }

View File

@@ -0,0 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.27772 1.38585L4.39187 4.07909C4.34653 4.21692 4.26946 4.34219 4.16685 4.44479C4.06425 4.5474 3.93898 4.62447 3.80115 4.66981L1.10791 5.55566L3.80115 6.44151C3.93898 6.48685 4.06425 6.56392 4.16685 6.66653C4.26946 6.76913 4.34653 6.8944 4.39187 7.03223L5.27772 9.72547L6.16357 7.03223C6.20891 6.8944 6.28598 6.76913 6.38859 6.66653C6.49119 6.56392 6.61646 6.48685 6.7543 6.44151L9.44753 5.55566L6.7543 4.66981C6.61646 4.62447 6.49119 4.5474 6.38859 4.44479C6.28598 4.34219 6.20891 4.21692 6.16357 4.07909L5.27772 1.38585Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M8.35938 12.3555C8.35938 12.0664 8.52734 11.8086 9.00781 11.3594L10.2031 10.2344C10.6094 9.85156 10.7891 9.60156 10.7891 9.34375C10.7891 9.05469 10.5781 8.85938 10.2734 8.85938C10.0391 8.85938 9.87109 8.95312 9.66406 9.21094C9.42578 9.50781 9.25391 9.60938 8.99219 9.60938C8.61719 9.60938 8.35156 9.35938 8.35156 9.01172C8.35156 8.25 9.26953 7.57812 10.3594 7.57812C11.4961 7.57812 12.3438 8.26172 12.3438 9.17969C12.3438 9.75391 12.0391 10.3008 11.418 10.8516L10.4961 11.6719V11.7344H11.8047C12.2578 11.7344 12.5391 11.9766 12.5391 12.3711C12.5391 12.7656 12.2656 13 11.8047 13H9.08203C8.65234 13 8.35938 12.7383 8.35938 12.3555Z" fill="black"/>
<path d="M11.0834 1.38585V3.71918M9.91675 2.55248H12.2501" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -643,7 +643,6 @@
"name": "Code Writer",
"tools": {
"bash": true,
"batch-tool": true,
"copy-path": true,
"create-file": true,
"delete-path": true,
@@ -661,7 +660,6 @@
}
}
},
// Shows a notification when the agent needs confirmation before running an edit tool call or when that's concluded.
"notify_when_agent_waiting": true
},
// The settings for slash commands.

View File

@@ -585,13 +585,6 @@ pub enum Thinking {
Enabled { budget_tokens: Option<u32> },
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum StringOrContents {
String(String),
Content(Vec<RequestContent>),
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Request {
pub model: String,
@@ -604,7 +597,7 @@ pub struct Request {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub system: Option<StringOrContents>,
pub system: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]

View File

@@ -48,6 +48,7 @@ lsp.workspace = true
menu.workspace = true
multi_buffer.workspace = true
parking_lot.workspace = true
paths.workspace = true
project.workspace = true
prompt_library.workspace = true
prompt_store.workspace = true
@@ -55,6 +56,7 @@ proto.workspace = true
rope.workspace = true
schemars.workspace = true
search.workspace = true
semantic_index.workspace = true
serde.workspace = true
settings.workspace = true
smol.workspace = true

View File

@@ -10,15 +10,17 @@ use std::sync::Arc;
use assistant_settings::AssistantSettings;
use assistant_slash_command::SlashCommandRegistry;
use assistant_slash_commands::{ProjectSlashCommandFeatureFlag, SearchSlashCommandFeatureFlag};
use client::Client;
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::FeatureFlagAppExt;
use fs::Fs;
use gpui::{actions, App, Global, ReadGlobal, UpdateGlobal};
use gpui::{actions, App, Global, UpdateGlobal};
use language_model::{
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
};
use prompt_store::PromptBuilder;
use semantic_index::{CloudEmbeddingProvider, SemanticDb};
use serde::Deserialize;
use settings::{Settings, SettingsStore};
@@ -84,10 +86,6 @@ impl Assistant {
filter.show_namespace(Self::NAMESPACE);
});
}
pub fn enabled(cx: &App) -> bool {
Self::global(cx).enabled
}
}
pub fn init(
@@ -100,6 +98,33 @@ pub fn init(
AssistantSettings::register(cx);
SlashCommandSettings::register(cx);
cx.spawn({
let client = client.clone();
async move |cx| {
let is_search_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
.await;
let is_project_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<ProjectSlashCommandFeatureFlag>())?
.await;
if !is_search_slash_command_enabled && !is_project_slash_command_enabled {
return Ok(());
}
let embedding_provider = CloudEmbeddingProvider::new(client.clone());
let semantic_index = SemanticDb::new(
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
Arc::new(embedding_provider),
cx,
)
.await?;
cx.update(|cx| cx.set_global(semantic_index))
}
})
.detach();
assistant_context_editor::init(client.clone(), cx);
prompt_library::init(cx);
init_language_model_settings(cx);
@@ -108,7 +133,7 @@ pub fn init(
assistant_panel::init(cx);
context_server::init(cx);
register_slash_commands(cx);
register_slash_commands(Some(prompt_builder.clone()), cx);
inline_assistant::init(
fs.clone(),
prompt_builder.clone(),
@@ -184,7 +209,7 @@ fn update_active_language_model_from_settings(cx: &mut App) {
});
}
fn register_slash_commands(cx: &mut App) {
fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut App) {
let slash_command_registry = SlashCommandRegistry::global(cx);
slash_command_registry.register_command(assistant_slash_commands::FileSlashCommand, true);
@@ -202,6 +227,33 @@ fn register_slash_commands(cx: &mut App) {
.register_command(assistant_slash_commands::DiagnosticsSlashCommand, true);
slash_command_registry.register_command(assistant_slash_commands::FetchSlashCommand, true);
if let Some(prompt_builder) = prompt_builder {
cx.observe_flag::<assistant_slash_commands::ProjectSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
slash_command_registry.register_command(
assistant_slash_commands::ProjectSlashCommand::new(prompt_builder.clone()),
true,
);
}
}
})
.detach();
}
cx.observe_flag::<assistant_slash_commands::AutoSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
// [#auto-staff-ship] TODO remove this when /auto is no longer staff-shipped
slash_command_registry
.register_command(assistant_slash_commands::AutoCommand, true);
}
}
})
.detach();
cx.observe_flag::<assistant_slash_commands::StreamingExampleSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
@@ -218,6 +270,17 @@ fn register_slash_commands(cx: &mut App) {
update_slash_commands_from_settings(cx);
cx.observe_global::<SettingsStore>(update_slash_commands_from_settings)
.detach();
cx.observe_flag::<assistant_slash_commands::SearchSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
slash_command_registry
.register_command(assistant_slash_commands::SearchSlashCommand, true);
}
}
})
.detach();
}
fn update_slash_commands_from_settings(cx: &mut App) {

View File

@@ -1,5 +1,4 @@
use crate::assistant_configuration::{ConfigurationView, ConfigurationViewEvent};
use crate::Assistant;
use crate::{
terminal_inline_assistant::TerminalInlineAssistant, DeployHistory, InlineAssistant, NewChat,
};
@@ -59,7 +58,8 @@ pub fn init(cx: &mut App) {
cx.observe_new(
|terminal_panel: &mut TerminalPanel, _, cx: &mut Context<TerminalPanel>| {
terminal_panel.set_assistant_enabled(Assistant::enabled(cx), cx);
let settings = AssistantSettings::get_global(cx);
terminal_panel.set_assistant_enabled(settings.enabled, cx);
},
)
.detach();
@@ -342,12 +342,12 @@ impl AssistantPanel {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
if workspace
.panel::<Self>(cx)
.is_some_and(|panel| panel.read(cx).enabled(cx))
{
workspace.toggle_panel_focus::<Self>(window, cx);
let settings = AssistantSettings::get_global(cx);
if !settings.enabled {
return;
}
workspace.toggle_panel_focus::<Self>(window, cx);
}
fn watch_client_status(
@@ -595,10 +595,12 @@ impl AssistantPanel {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
let Some(assistant_panel) = workspace
.panel::<AssistantPanel>(cx)
.filter(|panel| panel.read(cx).enabled(cx))
else {
let settings = AssistantSettings::get_global(cx);
if !settings.enabled {
return;
}
let Some(assistant_panel) = workspace.panel::<AssistantPanel>(cx) else {
return;
};
@@ -1296,8 +1298,12 @@ impl Panel for AssistantPanel {
}
fn icon(&self, _: &Window, cx: &App) -> Option<IconName> {
(self.enabled(cx) && AssistantSettings::get_global(cx).button)
.then_some(IconName::ZedAssistant)
let settings = AssistantSettings::get_global(cx);
if !settings.enabled || !settings.button {
return None;
}
Some(IconName::ZedAssistant)
}
fn icon_tooltip(&self, _: &Window, _: &App) -> Option<&'static str> {
@@ -1311,10 +1317,6 @@ impl Panel for AssistantPanel {
fn activation_priority(&self) -> u32 {
4
}
fn enabled(&self, cx: &App) -> bool {
Assistant::enabled(cx)
}
}
impl EventEmitter<PanelEvent> for AssistantPanel {}

View File

@@ -1,6 +1,5 @@
use crate::{
Assistant, AssistantPanel, AssistantPanelEvent, CycleNextInlineAssist,
CyclePreviousInlineAssist,
AssistantPanel, AssistantPanelEvent, CycleNextInlineAssist, CyclePreviousInlineAssist,
};
use anyhow::{anyhow, Context as _, Result};
use assistant_context_editor::{humanize_token_count, RequestType};
@@ -3556,7 +3555,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
_: &mut Window,
cx: &mut App,
) -> Task<Result<Vec<CodeAction>>> {
if !Assistant::enabled(cx) {
if !AssistantSettings::get_global(cx).enabled {
return Task::ready(Ok(Vec::new()));
}

View File

@@ -31,7 +31,6 @@ clock.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
context_server.workspace = true
convert_case.workspace = true
db.workspace = true
editor.workspace = true
feature_flags.workspace = true

View File

@@ -12,9 +12,9 @@ use editor::{Editor, MultiBuffer};
use gpui::{
linear_color_stop, linear_gradient, list, percentage, pulsating_between, AbsoluteLength,
Animation, AnimationExt, AnyElement, App, ClickEvent, DefiniteLength, EdgesRefinement, Empty,
Entity, Focusable, Hsla, Length, ListAlignment, ListState, MouseButton, ScrollHandle, Stateful,
StyleRefinement, Subscription, Task, TextStyleRefinement, Transformation, UnderlineStyle,
WeakEntity, WindowHandle,
Entity, Focusable, Hsla, Length, ListAlignment, ListOffset, ListState, MouseButton,
ScrollHandle, Stateful, StyleRefinement, Subscription, Task, TextStyleRefinement,
Transformation, UnderlineStyle, WeakEntity, WindowHandle,
};
use language::{Buffer, LanguageRegistry};
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
@@ -47,7 +47,6 @@ pub struct ActiveThread {
last_error: Option<ThreadError>,
pop_ups: Vec<WindowHandle<ToolReadyPopUp>>,
_subscriptions: Vec<Subscription>,
pop_up_subscriptions: HashMap<WindowHandle<ToolReadyPopUp>, Vec<Subscription>>,
}
struct RenderedMessage {
@@ -254,7 +253,6 @@ impl ActiveThread {
last_error: None,
pop_ups: Vec::new(),
_subscriptions: subscriptions,
pop_up_subscriptions: HashMap::default(),
};
for message in thread.read(cx).messages().cloned().collect::<Vec<_>>() {
@@ -317,6 +315,10 @@ impl ActiveThread {
let rendered_message =
RenderedMessage::from_segments(segments, self.language_registry.clone(), window, cx);
self.rendered_messages_by_id.insert(*id, rendered_message);
self.list_state.scroll_to(ListOffset {
item_ix: old_len,
offset_in_item: Pixels(0.0),
});
}
fn edited_message(
@@ -379,7 +381,7 @@ impl ActiveThread {
ThreadEvent::DoneStreaming => {
if !self.thread().read(cx).is_generating() {
self.show_notification(
"The assistant response has concluded.",
"Your changes have been applied.",
IconName::Check,
Color::Success,
window,
@@ -546,64 +548,42 @@ impl ActiveThread {
.log_err()
{
if let Some(pop_up) = screen_window.entity(cx).log_err() {
self.pop_up_subscriptions
.entry(screen_window)
.or_insert_with(Vec::new)
.push(cx.subscribe_in(&pop_up, window, {
|this, _, event, window, cx| match event {
ToolReadyPopupEvent::Accepted => {
let handle = window.window_handle();
cx.activate(true); // Switch back to the Zed application
cx.subscribe_in(&pop_up, window, {
|this, _, event, window, cx| match event {
ToolReadyPopupEvent::Accepted => {
let handle = window.window_handle();
cx.activate(true); // Switch back to the Zed application
let workspace_handle = this.workspace.clone();
let workspace_handle = this.workspace.clone();
// If there are multiple Zed windows, activate the correct one.
cx.defer(move |cx| {
handle
.update(cx, |_view, window, _cx| {
window.activate_window();
// If there are multiple Zed windows, activate the correct one.
cx.defer(move |cx| {
handle
.update(cx, |_view, window, _cx| {
window.activate_window();
if let Some(workspace) =
workspace_handle.upgrade()
{
workspace.update(_cx, |workspace, cx| {
workspace
.focus_panel::<AssistantPanel>(
window, cx,
);
});
}
})
.log_err();
});
if let Some(workspace) = workspace_handle.upgrade()
{
workspace.update(_cx, |workspace, cx| {
workspace.focus_panel::<AssistantPanel>(
window, cx,
);
});
}
})
.log_err();
});
this.dismiss_notifications(cx);
}
ToolReadyPopupEvent::Dismissed => {
this.dismiss_notifications(cx);
}
this.dismiss_notifications(cx);
}
}));
ToolReadyPopupEvent::Dismissed => {
this.dismiss_notifications(cx);
}
}
})
.detach();
self.pop_ups.push(screen_window);
// If the user manually refocuses the original window, dismiss the popup.
self.pop_up_subscriptions
.entry(screen_window)
.or_insert_with(Vec::new)
.push({
let pop_up_weak = pop_up.downgrade();
cx.observe_window_activation(window, move |_, window, cx| {
if window.is_window_active() {
if let Some(pop_up) = pop_up_weak.upgrade() {
pop_up.update(cx, |_, cx| {
cx.emit(ToolReadyPopupEvent::Dismissed);
});
}
}
})
});
}
}
}
@@ -1770,8 +1750,6 @@ impl ActiveThread {
window.remove_window();
})
.ok();
self.pop_up_subscriptions.remove(&window);
}
}

View File

@@ -1,89 +1,35 @@
mod profile_modal_header;
use std::sync::Arc;
use assistant_settings::{
AgentProfile, AgentProfileContent, AssistantSettings, AssistantSettingsContent,
ContextServerPresetContent, VersionedAssistantSettingsContent,
};
use assistant_settings::AssistantSettings;
use assistant_tool::ToolWorkingSet;
use convert_case::{Case, Casing as _};
use editor::Editor;
use fs::Fs;
use gpui::{
prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription,
WeakEntity,
};
use settings::{update_settings_file, Settings as _};
use ui::{prelude::*, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry};
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use settings::Settings as _;
use ui::{prelude::*, ListItem, ListItemSpacing, Navigable, NavigableEntry};
use workspace::{ModalView, Workspace};
use crate::assistant_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
use crate::assistant_configuration::profile_picker::{ProfilePicker, ProfilePickerDelegate};
use crate::assistant_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
use crate::{AssistantPanel, ManageProfiles, ThreadStore};
use crate::{AssistantPanel, ManageProfiles};
enum Mode {
ChooseProfile {
profile_picker: Entity<ProfilePicker>,
_subscription: Subscription,
},
NewProfile(NewProfileMode),
ChooseProfile(Entity<ProfilePicker>),
ViewProfile(ViewProfileMode),
ConfigureTools {
profile_id: Arc<str>,
tool_picker: Entity<ToolPicker>,
_subscription: Subscription,
},
}
impl Mode {
pub fn choose_profile(window: &mut Window, cx: &mut Context<ManageProfilesModal>) -> Self {
let this = cx.entity();
let profile_picker = cx.new(|cx| {
let delegate = ProfilePickerDelegate::new(
move |profile_id, window, cx| {
this.update(cx, |this, cx| {
this.view_profile(profile_id.clone(), window, cx);
})
},
cx,
);
ProfilePicker::new(delegate, window, cx)
});
let dismiss_subscription = cx.subscribe_in(
&profile_picker,
window,
|_this, _profile_picker, _: &DismissEvent, _window, cx| {
cx.emit(DismissEvent);
},
);
Self::ChooseProfile {
profile_picker,
_subscription: dismiss_subscription,
}
}
}
#[derive(Clone)]
pub struct ViewProfileMode {
profile_id: Arc<str>,
fork_profile: NavigableEntry,
configure_tools: NavigableEntry,
}
#[derive(Clone)]
pub struct NewProfileMode {
name_editor: Entity<Editor>,
base_profile_id: Option<Arc<str>>,
}
pub struct ManageProfilesModal {
fs: Arc<dyn Fs>,
tools: Arc<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
focus_handle: FocusHandle,
mode: Mode,
}
@@ -97,12 +43,9 @@ impl ManageProfilesModal {
workspace.register_action(|workspace, _: &ManageProfiles, window, cx| {
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
let fs = workspace.app_state().fs.clone();
let thread_store = panel.read(cx).thread_store();
let tools = thread_store.read(cx).tools();
let thread_store = thread_store.downgrade();
workspace.toggle_modal(window, cx, |window, cx| {
Self::new(fs, tools, thread_store, window, cx)
})
let thread_store = panel.read(cx).thread_store().read(cx);
let tools = thread_store.tools();
workspace.toggle_modal(window, cx, |window, cx| Self::new(fs, tools, window, cx))
}
});
}
@@ -110,44 +53,30 @@ impl ManageProfilesModal {
pub fn new(
fs: Arc<dyn Fs>,
tools: Arc<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let focus_handle = cx.focus_handle();
let handle = cx.entity();
Self {
fs,
tools,
thread_store,
focus_handle,
mode: Mode::choose_profile(window, cx),
mode: Mode::ChooseProfile(cx.new(|cx| {
let delegate = ProfilePickerDelegate::new(
move |profile_id, window, cx| {
handle.update(cx, |this, cx| {
this.view_profile(profile_id.clone(), window, cx);
})
},
cx,
);
ProfilePicker::new(delegate, window, cx)
})),
}
}
fn choose_profile(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.mode = Mode::choose_profile(window, cx);
self.focus_handle(cx).focus(window);
}
fn new_profile(
&mut self,
base_profile_id: Option<Arc<str>>,
window: &mut Window,
cx: &mut Context<Self>,
) {
let name_editor = cx.new(|cx| Editor::single_line(window, cx));
name_editor.update(cx, |editor, cx| {
editor.set_placeholder_text("Profile name", cx);
});
self.mode = Mode::NewProfile(NewProfileMode {
name_editor,
base_profile_id,
});
self.focus_handle(cx).focus(window);
}
pub fn view_profile(
&mut self,
profile_id: Arc<str>,
@@ -156,7 +85,6 @@ impl ManageProfilesModal {
) {
self.mode = Mode::ViewProfile(ViewProfileMode {
profile_id,
fork_profile: NavigableEntry::focusable(cx),
configure_tools: NavigableEntry::focusable(cx),
});
self.focus_handle(cx).focus(window);
@@ -177,7 +105,6 @@ impl ManageProfilesModal {
let delegate = ToolPickerDelegate::new(
self.fs.clone(),
self.tools.clone(),
self.thread_store.clone(),
profile_id.clone(),
profile,
cx,
@@ -192,97 +119,15 @@ impl ManageProfilesModal {
});
self.mode = Mode::ConfigureTools {
profile_id,
tool_picker,
_subscription: dismiss_subscription,
};
self.focus_handle(cx).focus(window);
}
fn confirm(&mut self, window: &mut Window, cx: &mut Context<Self>) {
match &self.mode {
Mode::ChooseProfile { .. } => {}
Mode::NewProfile(mode) => {
let settings = AssistantSettings::get_global(cx);
fn confirm(&mut self, _window: &mut Window, _cx: &mut Context<Self>) {}
let base_profile = mode
.base_profile_id
.as_ref()
.and_then(|profile_id| settings.profiles.get(profile_id).cloned());
let name = mode.name_editor.read(cx).text(cx);
let profile_id: Arc<str> = name.to_case(Case::Kebab).into();
let profile = AgentProfile {
name: name.into(),
tools: base_profile
.as_ref()
.map(|profile| profile.tools.clone())
.unwrap_or_default(),
context_servers: base_profile
.map(|profile| profile.context_servers)
.unwrap_or_default(),
};
self.create_profile(profile_id.clone(), profile, cx);
self.view_profile(profile_id, window, cx);
}
Mode::ViewProfile(_) => {}
Mode::ConfigureTools { .. } => {}
}
}
fn cancel(&mut self, window: &mut Window, cx: &mut Context<Self>) {
match &self.mode {
Mode::ChooseProfile { .. } => {}
Mode::NewProfile(mode) => {
if let Some(profile_id) = mode.base_profile_id.clone() {
self.view_profile(profile_id, window, cx);
} else {
self.choose_profile(window, cx);
}
}
Mode::ViewProfile(_) => self.choose_profile(window, cx),
Mode::ConfigureTools { .. } => {}
}
}
fn create_profile(&self, profile_id: Arc<str>, profile: AgentProfile, cx: &mut Context<Self>) {
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, {
move |settings, _cx| match settings {
AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
settings,
)) => {
let profiles = settings.profiles.get_or_insert_default();
if profiles.contains_key(&profile_id) {
log::error!("profile with ID '{profile_id}' already exists");
return;
}
profiles.insert(
profile_id,
AgentProfileContent {
name: profile.name.into(),
tools: profile.tools,
context_servers: profile
.context_servers
.into_iter()
.map(|(server_id, preset)| {
(
server_id,
ContextServerPresetContent {
tools: preset.tools,
},
)
})
.collect(),
},
);
}
_ => {}
}
});
}
fn cancel(&mut self, _window: &mut Window, _cx: &mut Context<Self>) {}
}
impl ModalView for ManageProfilesModal {}
@@ -290,10 +135,9 @@ impl ModalView for ManageProfilesModal {}
impl Focusable for ManageProfilesModal {
fn focus_handle(&self, cx: &App) -> FocusHandle {
match &self.mode {
Mode::ChooseProfile { profile_picker, .. } => profile_picker.focus_handle(cx),
Mode::NewProfile(mode) => mode.name_editor.focus_handle(cx),
Mode::ViewProfile(_) => self.focus_handle.clone(),
Mode::ChooseProfile(profile_picker) => profile_picker.focus_handle(cx),
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
Mode::ViewProfile(_) => self.focus_handle.clone(),
}
}
}
@@ -301,122 +145,55 @@ impl Focusable for ManageProfilesModal {
impl EventEmitter<DismissEvent> for ManageProfilesModal {}
impl ManageProfilesModal {
fn render_new_profile(
&mut self,
mode: NewProfileMode,
_window: &mut Window,
cx: &mut Context<Self>,
) -> impl IntoElement {
v_flex()
.id("new-profile")
.track_focus(&self.focus_handle(cx))
.child(h_flex().p_2().child(mode.name_editor.clone()))
}
fn render_view_profile(
&mut self,
mode: ViewProfileMode,
window: &mut Window,
cx: &mut Context<Self>,
) -> impl IntoElement {
let settings = AssistantSettings::get_global(cx);
let profile_name = settings
.profiles
.get(&mode.profile_id)
.map(|profile| profile.name.clone())
.unwrap_or_else(|| "Unknown".into());
Navigable::new(
div()
.track_focus(&self.focus_handle(cx))
.size_full()
.child(ProfileModalHeader::new(
profile_name,
IconName::ZedAssistant,
))
.child(
v_flex()
.pb_1()
.child(ListSeparator)
.child(
div()
.id("fork-profile")
.track_focus(&mode.fork_profile.focus_handle)
.on_action({
let profile_id = mode.profile_id.clone();
cx.listener(move |this, _: &menu::Confirm, window, cx| {
this.new_profile(Some(profile_id.clone()), window, cx);
})
v_flex().child(
div()
.id("configure-tools")
.track_focus(&mode.configure_tools.focus_handle)
.on_action({
let profile_id = mode.profile_id.clone();
cx.listener(move |this, _: &menu::Confirm, window, cx| {
this.configure_tools(profile_id.clone(), window, cx);
})
.child(
ListItem::new("fork-profile")
.toggle_state(
mode.fork_profile
.focus_handle
.contains_focused(window, cx),
)
.inset(true)
.spacing(ListItemSpacing::Sparse)
.start_slot(Icon::new(IconName::GitBranch))
.child(Label::new("Fork Profile"))
.on_click({
let profile_id = mode.profile_id.clone();
cx.listener(move |this, _, window, cx| {
this.new_profile(
Some(profile_id.clone()),
window,
cx,
);
})
}),
),
)
.child(
div()
.id("configure-tools")
.track_focus(&mode.configure_tools.focus_handle)
.on_action({
let profile_id = mode.profile_id.clone();
cx.listener(move |this, _: &menu::Confirm, window, cx| {
this.configure_tools(profile_id.clone(), window, cx);
})
})
.child(
ListItem::new("configure-tools")
.toggle_state(
mode.configure_tools
.focus_handle
.contains_focused(window, cx),
)
.inset(true)
.spacing(ListItemSpacing::Sparse)
.start_slot(Icon::new(IconName::Cog))
.child(Label::new("Configure Tools"))
.on_click({
let profile_id = mode.profile_id.clone();
cx.listener(move |this, _, window, cx| {
this.configure_tools(
profile_id.clone(),
window,
cx,
);
})
}),
),
),
})
.child(
ListItem::new("configure-tools")
.toggle_state(
mode.configure_tools
.focus_handle
.contains_focused(window, cx),
)
.inset(true)
.spacing(ListItemSpacing::Sparse)
.start_slot(Icon::new(IconName::Cog))
.child(Label::new("Configure Tools"))
.on_click({
let profile_id = mode.profile_id.clone();
cx.listener(move |this, _, window, cx| {
this.configure_tools(profile_id.clone(), window, cx);
})
}),
),
),
)
.into_any_element(),
)
.entry(mode.fork_profile)
.entry(mode.configure_tools)
}
}
impl Render for ManageProfilesModal {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let settings = AssistantSettings::get_global(cx);
div()
.elevation_3(cx)
.w(rems(34.))
@@ -428,37 +205,11 @@ impl Render for ManageProfilesModal {
}))
.on_mouse_down_out(cx.listener(|_this, _, _, cx| cx.emit(DismissEvent)))
.child(match &self.mode {
Mode::ChooseProfile { profile_picker, .. } => div()
.child(ProfileModalHeader::new("Profiles", IconName::ZedAssistant))
.child(ListSeparator)
.child(profile_picker.clone())
.into_any_element(),
Mode::NewProfile(mode) => self
.render_new_profile(mode.clone(), window, cx)
.into_any_element(),
Mode::ChooseProfile(profile_picker) => profile_picker.clone().into_any_element(),
Mode::ViewProfile(mode) => self
.render_view_profile(mode.clone(), window, cx)
.into_any_element(),
Mode::ConfigureTools {
profile_id,
tool_picker,
..
} => {
let profile_name = settings
.profiles
.get(profile_id)
.map(|profile| profile.name.clone())
.unwrap_or_else(|| "Unknown".into());
div()
.child(ProfileModalHeader::new(
format!("{profile_name}: Configure Tools"),
IconName::Cog,
))
.child(ListSeparator)
.child(tool_picker.clone())
.into_any_element()
}
Mode::ConfigureTools { tool_picker, .. } => tool_picker.clone().into_any_element(),
})
}
}

View File

@@ -1,38 +0,0 @@
use ui::prelude::*;
#[derive(IntoElement)]
pub struct ProfileModalHeader {
label: SharedString,
icon: IconName,
}
impl ProfileModalHeader {
pub fn new(label: impl Into<SharedString>, icon: IconName) -> Self {
Self {
label: label.into(),
icon,
}
}
}
impl RenderOnce for ProfileModalHeader {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
h_flex()
.w_full()
.px(DynamicSpacing::Base12.rems(cx))
.pt(DynamicSpacing::Base08.rems(cx))
.pb(DynamicSpacing::Base04.rems(cx))
.rounded_t_sm()
.gap_1p5()
.child(Icon::new(self.icon).size(IconSize::XSmall))
.child(
h_flex().gap_1().overflow_x_hidden().child(
div()
.max_w_96()
.overflow_x_hidden()
.text_ellipsis()
.child(Headline::new(self.label).size(HeadlineSize::XSmall)),
),
)
}
}

View File

@@ -21,7 +21,7 @@ impl ProfilePicker {
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx).modal(false));
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
Self { picker }
}
}

View File

@@ -9,19 +9,17 @@ use fs::Fs;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window};
use picker::{Picker, PickerDelegate};
use settings::{update_settings_file, Settings as _};
use settings::update_settings_file;
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
use util::ResultExt as _;
use crate::ThreadStore;
pub struct ToolPicker {
picker: Entity<Picker<ToolPickerDelegate>>,
}
impl ToolPicker {
pub fn new(delegate: ToolPickerDelegate, window: &mut Window, cx: &mut Context<Self>) -> Self {
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx).modal(false));
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
Self { picker }
}
}
@@ -48,7 +46,6 @@ pub struct ToolEntry {
pub struct ToolPickerDelegate {
tool_picker: WeakEntity<ToolPicker>,
thread_store: WeakEntity<ThreadStore>,
fs: Arc<dyn Fs>,
tools: Vec<ToolEntry>,
profile_id: Arc<str>,
@@ -61,7 +58,6 @@ impl ToolPickerDelegate {
pub fn new(
fs: Arc<dyn Fs>,
tool_set: Arc<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
profile_id: Arc<str>,
profile: AgentProfile,
cx: &mut Context<ToolPicker>,
@@ -77,7 +73,6 @@ impl ToolPickerDelegate {
Self {
tool_picker: cx.entity().downgrade(),
thread_store,
fs,
tools: tool_entries,
profile_id,
@@ -188,15 +183,6 @@ impl PickerDelegate for ToolPickerDelegate {
}
};
let active_profile_id = &AssistantSettings::get_global(cx).default_profile;
if active_profile_id == &self.profile_id {
self.thread_store
.update(cx, |this, _cx| {
this.load_profile(&self.profile);
})
.log_err();
}
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, {
let profile_id = self.profile_id.clone();
let default_profile = self.profile.clone();

View File

@@ -225,12 +225,12 @@ impl AssistantPanel {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
if workspace
.panel::<Self>(cx)
.is_some_and(|panel| panel.read(cx).enabled(cx))
{
workspace.toggle_panel_focus::<Self>(window, cx);
let settings = AssistantSettings::get_global(cx);
if !settings.enabled {
return;
}
workspace.toggle_panel_focus::<Self>(window, cx);
}
pub(crate) fn local_timezone(&self) -> UtcOffset {
@@ -637,8 +637,12 @@ impl Panel for AssistantPanel {
}
fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
(self.enabled(cx) && AssistantSettings::get_global(cx).button)
.then_some(IconName::ZedAssistant)
let settings = AssistantSettings::get_global(cx);
if !settings.enabled || !settings.button {
return None;
}
Some(IconName::ZedAssistant)
}
fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {
@@ -652,10 +656,6 @@ impl Panel for AssistantPanel {
fn activation_priority(&self) -> u32 {
3
}
fn enabled(&self, cx: &App) -> bool {
AssistantSettings::get_global(cx).enabled
}
}
impl AssistantPanel {

View File

@@ -259,6 +259,7 @@ impl ContextPicker {
&path_prefix,
false,
context_store.clone(),
None,
cx,
)
.into_any()
@@ -400,6 +401,7 @@ impl ContextPicker {
RecentEntry::Thread(ThreadContextEntry {
id: thread.id,
summary: thread.summary,
highlight_positions: None,
})
}),
)
@@ -517,6 +519,7 @@ fn recent_context_picker_entries(
RecentEntry::Thread(ThreadContextEntry {
id: thread.id,
summary: thread.summary,
highlight_positions: None,
})
}),
);

View File

@@ -1,6 +1,6 @@
use std::cell::RefCell;
use std::ops::Range;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
@@ -70,18 +70,18 @@ impl ContextPickerCompletionProvider {
.filter_map(|entry| match entry {
super::RecentEntry::File {
project_path,
path_prefix,
} => Some(Self::completion_for_path(
path_prefix: _,
} => Self::completion_for_path(
project_path.clone(),
path_prefix,
true,
false,
excerpt_id,
source_range.clone(),
editor.clone(),
context_store.clone(),
workspace.clone(),
cx,
)),
),
super::RecentEntry::Thread(thread_context_entry) => {
let thread_store = thread_store
.as_ref()
@@ -120,24 +120,56 @@ impl ContextPickerCompletionProvider {
completions
}
fn build_code_label_for_full_path(
file_name: &str,
directory: Option<&str>,
fn full_path_for_entry(
worktree_id: WorktreeId,
path: &Path,
workspace: Entity<Workspace>,
cx: &App,
) -> CodeLabel {
) -> Option<PathBuf> {
let worktree = workspace
.read(cx)
.project()
.read(cx)
.worktree_for_id(worktree_id, cx)?
.read(cx);
let mut full_path = PathBuf::from(worktree.root_name());
full_path.push(path);
Some(full_path)
}
fn build_code_label_for_full_path(
worktree_id: WorktreeId,
path: &Path,
workspace: Entity<Workspace>,
cx: &App,
) -> Option<CodeLabel> {
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
let mut label = CodeLabel::default();
let worktree = workspace
.read(cx)
.project()
.read(cx)
.worktree_for_id(worktree_id, cx)?;
let entry = worktree.read(cx).entry_for_path(&path)?;
let file_name = path.file_name()?.to_string_lossy();
label.push_str(&file_name, None);
label.push_str(" ", None);
if entry.is_dir() {
label.push_str("/ ", None);
} else {
label.push_str(" ", None);
};
if let Some(directory) = directory {
label.push_str(&directory, comment_id);
let mut path_hint = PathBuf::from(worktree.read(cx).root_name());
if let Some(path_to_entry) = path.parent() {
path_hint.push(path_to_entry);
}
label.push_str(&path_hint.to_string_lossy(), comment_id);
label.filter_range = 0..label.text().len();
label
Some(label)
}
fn completion_for_thread(
@@ -242,36 +274,32 @@ impl ContextPickerCompletionProvider {
fn completion_for_path(
project_path: ProjectPath,
path_prefix: &str,
is_recent: bool,
is_directory: bool,
excerpt_id: ExcerptId,
source_range: Range<Anchor>,
editor: Entity<Editor>,
context_store: Entity<ContextStore>,
workspace: Entity<Workspace>,
cx: &App,
) -> Completion {
let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory(
&project_path.path,
path_prefix,
);
) -> Option<Completion> {
let label = Self::build_code_label_for_full_path(
&file_name,
directory.as_ref().map(|s| s.as_ref()),
project_path.worktree_id,
&project_path.path,
workspace.clone(),
cx,
);
let full_path = if let Some(directory) = directory {
format!("{}{}", directory, file_name)
} else {
file_name.to_string()
};
)?;
let full_path = Self::full_path_for_entry(
project_path.worktree_id,
&project_path.path,
workspace.clone(),
cx,
)?;
let crease_icon_path = if is_directory {
FileIcons::get_folder_icon(false, cx).unwrap_or_else(|| IconName::Folder.path().into())
} else {
FileIcons::get_icon(Path::new(&full_path), cx)
.unwrap_or_else(|| IconName::File.path().into())
FileIcons::get_icon(&full_path, cx).unwrap_or_else(|| IconName::File.path().into())
};
let completion_icon_path = if is_recent {
IconName::HistoryRerun.path().into()
@@ -279,9 +307,15 @@ impl ContextPickerCompletionProvider {
crease_icon_path.clone()
};
let new_text = format!("@file {}", full_path);
let crease_name = project_path
.path
.file_name()
.map(|file_name| file_name.to_string_lossy().to_string())
.unwrap_or_else(|| "untitled".to_string());
let new_text = format!("@file {}", full_path.to_string_lossy());
let new_text_len = new_text.len();
Completion {
Some(Completion {
old_range: source_range.clone(),
new_text,
label,
@@ -290,7 +324,7 @@ impl ContextPickerCompletionProvider {
icon_path: Some(completion_icon_path),
confirm: Some(confirm_completion_callback(
crease_icon_path,
file_name,
crease_name.into(),
excerpt_id,
source_range.start,
new_text_len,
@@ -306,7 +340,7 @@ impl ContextPickerCompletionProvider {
})
},
)),
}
})
}
}
@@ -363,34 +397,33 @@ impl CompletionProvider for ContextPickerCompletionProvider {
.update(|cx| {
super::file_context_picker::search_paths(
query,
Arc::<AtomicBool>::default(),
Arc::new(AtomicBool::default()),
&workspace,
cx,
)
})?
.await;
if let Some(editor) = editor.upgrade() {
completions.reserve(path_matches.len());
cx.update(|cx| {
completions.extend(path_matches.iter().map(|mat| {
Self::completion_for_path(
ProjectPath {
worktree_id: WorktreeId::from_usize(mat.worktree_id),
path: mat.path.clone(),
},
&mat.path_prefix,
false,
mat.is_dir,
excerpt_id,
source_range.clone(),
editor.clone(),
context_store.clone(),
cx,
)
}));
})?;
}
completions.reserve(path_matches.len());
cx.update(|cx| {
completions.extend(path_matches.iter().filter_map(|mat| {
let editor = editor.upgrade()?;
Self::completion_for_path(
ProjectPath {
worktree_id: WorktreeId::from_usize(mat.worktree_id),
path: mat.path.clone(),
},
false,
mat.is_dir,
excerpt_id,
source_range.clone(),
editor.clone(),
context_store.clone(),
workspace.clone(),
cx,
)
}));
})?;
}
Some(ContextPickerMode::Fetch) => {
if let Some(editor) = editor.upgrade() {
@@ -738,6 +771,7 @@ mod tests {
.unwrap();
}
//TODO: Construct the editor without an actual buffer that points to a file
let item = workspace
.update_in(&mut cx, |workspace, window, cx| {
workspace.open_path(
@@ -787,10 +821,10 @@ mod tests {
assert_eq!(
current_completion_labels(editor),
&[
"seven.txt dir/b/",
"six.txt dir/b/",
"five.txt dir/b/",
"four.txt dir/a/",
format!("seven.txt {}", separator!("dir/b")).as_str(),
format!("six.txt {}", separator!("dir/b")).as_str(),
format!("five.txt {}", separator!("dir/b")).as_str(),
format!("four.txt {}", separator!("dir/a")).as_str(),
"Files & Directories",
"Fetch"
]
@@ -819,7 +853,10 @@ mod tests {
editor.update(&mut cx, |editor, cx| {
assert_eq!(editor.text(cx), "Lorem @file one");
assert!(editor.has_visible_completions_menu());
assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]);
assert_eq!(
current_completion_labels(editor),
vec![format!("one.txt {}", separator!("dir/a")).as_str(),]
);
});
editor.update_in(&mut cx, |editor, window, cx| {
@@ -828,7 +865,10 @@ mod tests {
});
editor.update(&mut cx, |editor, cx| {
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt",);
assert_eq!(
editor.text(cx),
format!("Lorem @file {}", separator!("dir/a/one.txt"))
);
assert!(!editor.has_visible_completions_menu());
assert_eq!(
crease_ranges(editor, cx),
@@ -839,7 +879,10 @@ mod tests {
cx.simulate_input(" ");
editor.update(&mut cx, |editor, cx| {
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt ",);
assert_eq!(
editor.text(cx),
format!("Lorem @file {} ", separator!("dir/a/one.txt"))
);
assert!(!editor.has_visible_completions_menu());
assert_eq!(
crease_ranges(editor, cx),
@@ -850,7 +893,10 @@ mod tests {
cx.simulate_input("Ipsum ");
editor.update(&mut cx, |editor, cx| {
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt Ipsum ",);
assert_eq!(
editor.text(cx),
format!("Lorem @file {} Ipsum ", separator!("dir/a/one.txt"))
);
assert!(!editor.has_visible_completions_menu());
assert_eq!(
crease_ranges(editor, cx),
@@ -861,7 +907,10 @@ mod tests {
cx.simulate_input("@file ");
editor.update(&mut cx, |editor, cx| {
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt Ipsum @file ",);
assert_eq!(
editor.text(cx),
format!("Lorem @file {} Ipsum @file ", separator!("dir/a/one.txt"))
);
assert!(editor.has_visible_completions_menu());
assert_eq!(
crease_ranges(editor, cx),
@@ -878,7 +927,11 @@ mod tests {
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
"Lorem @file dir/a/one.txt Ipsum @file dir/b/seven.txt"
format!(
"Lorem @file {} Ipsum @file {}",
separator!("dir/a/one.txt"),
separator!("dir/b/seven.txt")
)
);
assert!(!editor.has_visible_completions_menu());
assert_eq!(
@@ -895,7 +948,11 @@ mod tests {
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
"Lorem @file dir/a/one.txt Ipsum @file dir/b/seven.txt\n@"
format!(
"Lorem @file {} Ipsum @file {}\n@",
separator!("dir/a/one.txt"),
separator!("dir/b/seven.txt")
)
);
assert!(editor.has_visible_completions_menu());
assert_eq!(
@@ -916,7 +973,12 @@ mod tests {
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
"Lorem @file dir/a/one.txt Ipsum @file dir/b/seven.txt\n@file dir/b/six.txt"
format!(
"Lorem @file {} Ipsum @file {}\n@file {}",
separator!("dir/a/one.txt"),
separator!("dir/b/seven.txt"),
separator!("dir/b/six.txt"),
)
);
assert!(!editor.has_visible_completions_menu());
assert_eq!(

View File

@@ -9,7 +9,7 @@ use gpui::{
};
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
use ui::{prelude::*, ListItem, Tooltip};
use ui::{prelude::*, HighlightedLabel, ListItem, Tooltip};
use util::ResultExt as _;
use workspace::{notifications::NotifyResultExt, Workspace};
@@ -193,6 +193,7 @@ impl PickerDelegate for FileContextPickerDelegate {
&path_match.path_prefix,
path_match.is_dir,
self.context_store.clone(),
Some(&path_match.positions),
cx,
)),
)
@@ -273,17 +274,18 @@ pub(crate) fn search_paths(
}
}
pub fn extract_file_name_and_directory(
pub fn render_file_context_entry(
id: ElementId,
path: &Path,
path_prefix: &str,
) -> (SharedString, Option<SharedString>) {
if path == Path::new("") {
path_prefix: &Arc<str>,
is_directory: bool,
context_store: WeakEntity<ContextStore>,
highlight_positions: Option<&[usize]>,
cx: &App,
) -> Stateful<Div> {
let (file_name, directory) = if path == Path::new("") {
(
SharedString::from(
path_prefix
.trim_end_matches(std::path::MAIN_SEPARATOR)
.to_string(),
),
SharedString::from(path_prefix.trim_end_matches('/').to_string()),
None,
)
} else {
@@ -294,9 +296,7 @@ pub fn extract_file_name_and_directory(
.to_string()
.into();
let mut directory = path_prefix
.trim_end_matches(std::path::MAIN_SEPARATOR)
.to_string();
let mut directory = path_prefix.to_string();
if !directory.ends_with('/') {
directory.push('/');
}
@@ -305,19 +305,8 @@ pub fn extract_file_name_and_directory(
directory.push('/');
}
(file_name, Some(directory.into()))
}
}
pub fn render_file_context_entry(
id: ElementId,
path: &Path,
path_prefix: &Arc<str>,
is_directory: bool,
context_store: WeakEntity<ContextStore>,
cx: &App,
) -> Stateful<Div> {
let (file_name, directory) = extract_file_name_and_directory(path, path_prefix);
(file_name, Some(directory))
};
let added = context_store.upgrade().and_then(|context_store| {
if is_directory {
@@ -338,6 +327,11 @@ pub fn render_file_context_entry(
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
let label = match highlight_positions {
Some(positions) => HighlightedLabel::new(file_name, positions.to_vec()).into_any_element(),
None => Label::new(file_name).into_any_element(),
};
h_flex()
.id(id)
.gap_1p5()
@@ -346,7 +340,7 @@ pub fn render_file_context_entry(
.child(
h_flex()
.gap_1()
.child(Label::new(file_name))
.child(label)
.children(directory.map(|directory| {
Label::new(directory)
.size(LabelSize::Small)

View File

@@ -3,7 +3,7 @@ use std::sync::Arc;
use fuzzy::StringMatchCandidate;
use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity};
use picker::{Picker, PickerDelegate};
use ui::{prelude::*, ListItem};
use ui::{prelude::*, HighlightedLabel, ListItem};
use crate::context_picker::{ConfirmBehavior, ContextPicker};
use crate::context_store::{self, ContextStore};
@@ -51,6 +51,7 @@ impl Render for ThreadContextPicker {
pub struct ThreadContextEntry {
pub id: ThreadId,
pub summary: SharedString,
pub highlight_positions: Option<Vec<usize>>,
}
pub struct ThreadContextPickerDelegate {
@@ -173,8 +174,18 @@ impl PickerDelegate for ThreadContextPickerDelegate {
) -> Option<Self::ListItem> {
let thread = &self.matches[ix];
let highlights = thread
.highlight_positions
.as_ref()
.map(|vec| vec.as_slice());
Some(ListItem::new(ix).inset(true).toggle_state(selected).child(
render_thread_context_entry(thread, self.context_store.clone(), cx),
render_thread_context_entry_with_highlights(
thread,
self.context_store.clone(),
highlights.as_deref(),
cx,
),
))
}
}
@@ -182,12 +193,31 @@ impl PickerDelegate for ThreadContextPickerDelegate {
pub fn render_thread_context_entry(
thread: &ThreadContextEntry,
context_store: WeakEntity<ContextStore>,
cx: &mut App,
cx: &App,
) -> Div {
render_thread_context_entry_with_highlights(thread, context_store, None, cx)
}
pub fn render_thread_context_entry_with_highlights(
thread: &ThreadContextEntry,
context_store: WeakEntity<ContextStore>,
highlight_positions: Option<&[usize]>,
cx: &App,
) -> Div {
let added = context_store.upgrade().map_or(false, |ctx_store| {
ctx_store.read(cx).includes_thread(&thread.id).is_some()
});
// Choose between regular label or highlighted label based on position data
let summary_element = match highlight_positions {
Some(positions) => HighlightedLabel::new(thread.summary.clone(), positions.to_vec())
.truncate()
.into_any_element(),
None => Label::new(thread.summary.clone())
.truncate()
.into_any_element(),
};
h_flex()
.gap_1p5()
.w_full()
@@ -201,7 +231,7 @@ pub fn render_thread_context_entry(
.size(IconSize::XSmall)
.color(Color::Muted),
)
.child(Label::new(thread.summary.clone()).truncate()),
.child(summary_element),
)
.when(added, |el| {
el.child(
@@ -222,40 +252,60 @@ pub(crate) fn search_threads(
thread_store: Entity<ThreadStore>,
cx: &mut App,
) -> Task<Vec<ThreadContextEntry>> {
let threads = thread_store.update(cx, |this, _cx| {
this.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
.collect::<Vec<_>>()
});
// Get threads from the thread store
let threads = thread_store
.read(cx)
.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
highlight_positions: None, // Initialize with no highlights
})
.collect::<Vec<_>>();
// Return early for empty queries or if there are no threads
if threads.is_empty() || query.is_empty() {
return Task::ready(threads);
}
// Create candidates list for fuzzy matching
let candidates: Vec<_> = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect();
let executor = cx.background_executor().clone();
cx.background_spawn(async move {
if query.is_empty() {
threads
} else {
let candidates = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
let threads_clone = threads.clone();
matches
.into_iter()
.map(|mat| threads[mat.candidate_id].clone())
.collect()
}
// Use background executor for the matching
cx.background_executor().spawn(async move {
// Perform fuzzy matching in background
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
// Create result entries with highlight positions included
let result = matches
.into_iter()
.filter_map(|mat| {
let thread = threads_clone.get(mat.candidate_id)?;
// Create a new entry with the highlight positions
Some(ThreadContextEntry {
id: thread.id.clone(),
summary: thread.summary.clone(),
highlight_positions: Some(mat.positions),
})
})
.collect::<Vec<ThreadContextEntry>>();
result
})
}

View File

@@ -857,13 +857,6 @@ impl Thread {
request.messages.push(request_message);
}
// Set a cache breakpoint at the second-to-last message.
// https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching
let breakpoint_index = request.messages.len() - 2;
for (index, message) in request.messages.iter_mut().enumerate() {
message.cache = index == breakpoint_index;
}
if !referenced_context_ids.is_empty() {
let mut context_message = LanguageModelRequestMessage {
role: Role::User,

View File

@@ -24,7 +24,7 @@ impl ToolReadyPopUp {
pub fn window_options(screen: Rc<dyn PlatformDisplay>, cx: &App) -> WindowOptions {
let size = Size {
width: px(450.),
width: px(440.),
height: px(72.),
};
@@ -93,18 +93,8 @@ impl Render for ToolReadyPopUp {
)
.child(
v_flex()
.child(
div()
.text_size(px(16.))
.text_color(cx.theme().colors().text)
.child("Agent Panel"),
)
.child(
div()
.text_size(px(14.))
.text_color(cx.theme().colors().text_muted)
.child(self.caption.clone()),
),
.child(Headline::new("Agent Panel").size(HeadlineSize::XSmall))
.child(Label::new(self.caption.clone()).color(Color::Muted)),
),
)
.child(

View File

@@ -21,7 +21,6 @@ clap.workspace = true
client.workspace = true
collections.workspace = true
context_server.workspace = true
dap.workspace = true
env_logger.workspace = true
fs.workspace = true
futures.workspace = true

View File

@@ -3,7 +3,6 @@ use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
use assistant_tool::ToolWorkingSet;
use client::{Client, UserStore};
use collections::HashMap;
use dap::DapRegistry;
use futures::StreamExt;
use gpui::{prelude::*, App, AsyncApp, Entity, SemanticVersion, Subscription, Task};
use language::LanguageRegistry;
@@ -51,7 +50,6 @@ impl HeadlessAssistant {
app_state.node_runtime.clone(),
app_state.user_store.clone(),
app_state.languages.clone(),
Arc::new(DapRegistry::default()),
app_state.fs.clone(),
env,
cx,

View File

@@ -29,9 +29,13 @@ html_to_markdown.workspace = true
http_client.workspace = true
indexed_docs.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
project.workspace = true
prompt_store.workspace = true
rope.workspace = true
schemars.workspace = true
semantic_index.workspace = true
serde.workspace = true
serde_json.workspace = true
smol.workspace = true

View File

@@ -1,3 +1,4 @@
mod auto_command;
mod cargo_workspace_command;
mod context_server_command;
mod default_command;
@@ -7,7 +8,9 @@ mod docs_command;
mod fetch_command;
mod file_command;
mod now_command;
mod project_command;
mod prompt_command;
mod search_command;
mod selection_command;
mod streaming_example_command;
mod symbols_command;
@@ -18,6 +21,7 @@ use gpui::App;
use language::{CodeLabel, HighlightId};
use ui::ActiveTheme as _;
pub use crate::auto_command::*;
pub use crate::cargo_workspace_command::*;
pub use crate::context_server_command::*;
pub use crate::default_command::*;
@@ -27,7 +31,9 @@ pub use crate::docs_command::*;
pub use crate::fetch_command::*;
pub use crate::file_command::*;
pub use crate::now_command::*;
pub use crate::project_command::*;
pub use crate::prompt_command::*;
pub use crate::search_command::*;
pub use crate::selection_command::*;
pub use crate::streaming_example_command::*;
pub use crate::symbols_command::*;

View File

@@ -0,0 +1,371 @@
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use feature_flags::FeatureFlag;
use futures::StreamExt;
use gpui::{App, AsyncApp, Task, WeakEntity, Window};
use language::{CodeLabel, LspAdapterDelegate};
use language_model::{
LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, Role,
};
use semantic_index::{FileSummary, SemanticDb};
use smol::channel;
use std::sync::{atomic::AtomicBool, Arc};
use ui::{prelude::*, BorrowAppContext};
use util::ResultExt;
use workspace::Workspace;
use crate::create_label_for_command;
pub struct AutoSlashCommandFeatureFlag;
impl FeatureFlag for AutoSlashCommandFeatureFlag {
const NAME: &'static str = "auto-slash-command";
}
pub struct AutoCommand;
impl SlashCommand for AutoCommand {
fn name(&self) -> String {
"auto".into()
}
fn description(&self) -> String {
"Automatically infer what context to add".into()
}
fn icon(&self) -> IconName {
IconName::Wand
}
fn menu_text(&self) -> String {
self.description()
}
fn label(&self, cx: &App) -> CodeLabel {
create_label_for_command("auto", &["--prompt"], cx)
}
fn complete_argument(
self: Arc<Self>,
_arguments: &[String],
_cancel: Arc<AtomicBool>,
workspace: Option<WeakEntity<Workspace>>,
_window: &mut Window,
cx: &mut App,
) -> Task<Result<Vec<ArgumentCompletion>>> {
// There's no autocomplete for a prompt, since it's arbitrary text.
// However, we can use this opportunity to kick off a drain of the backlog.
// That way, it can hopefully be done resummarizing by the time we've actually
// typed out our prompt. This re-runs on every keystroke during autocomplete,
// but in the future, we could instead do it only once, when /auto is first entered.
let Some(workspace) = workspace.and_then(|ws| ws.upgrade()) else {
log::warn!("workspace was dropped or unavailable during /auto autocomplete");
return Task::ready(Ok(Vec::new()));
};
let project = workspace.read(cx).project().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow!("No project indexer, cannot use /auto")));
};
let cx: &mut App = cx;
cx.spawn(async move |cx| {
let task = project_index.read_with(cx, |project_index, cx| {
project_index.flush_summary_backlogs(cx)
})?;
cx.background_spawn(task).await;
anyhow::Ok(Vec::new())
})
}
fn requires_argument(&self) -> bool {
true
}
fn run(
self: Arc<Self>,
arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
_context_buffer: language::BufferSnapshot,
workspace: WeakEntity<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
window: &mut Window,
cx: &mut App,
) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
if arguments.is_empty() {
return Task::ready(Err(anyhow!("missing prompt")));
};
let argument = arguments.join(" ");
let original_prompt = argument.to_string();
let project = workspace.read(cx).project().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow!("no project indexer")));
};
let task = window.spawn(cx, async move |cx| {
let summaries = project_index
.read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
.await?;
commands_for_summaries(&summaries, &original_prompt, &cx).await
});
// As a convenience, append /auto's argument to the end of the prompt
// so you don't have to write it again.
let original_prompt = argument.to_string();
cx.background_spawn(async move {
let commands = task.await?;
let mut prompt = String::new();
log::info!(
"Translating this response into slash-commands: {:?}",
commands
);
for command in commands {
prompt.push('/');
prompt.push_str(&command.name);
prompt.push(' ');
prompt.push_str(&command.arg);
prompt.push('\n');
}
prompt.push('\n');
prompt.push_str(&original_prompt);
Ok(SlashCommandOutput {
text: prompt,
sections: Vec::new(),
run_commands_in_text: true,
}
.to_event_stream())
})
}
}
const PROMPT_INSTRUCTIONS_BEFORE_SUMMARY: &str = include_str!("prompt_before_summary.txt");
const PROMPT_INSTRUCTIONS_AFTER_SUMMARY: &str = include_str!("prompt_after_summary.txt");
fn summaries_prompt(summaries: &[FileSummary], original_prompt: &str) -> String {
let json_summaries = serde_json::to_string(summaries).unwrap();
format!("{PROMPT_INSTRUCTIONS_BEFORE_SUMMARY}\n{json_summaries}\n{PROMPT_INSTRUCTIONS_AFTER_SUMMARY}\n{original_prompt}")
}
/// The slash commands that the model is told about, and which we look for in the inference response.
const SUPPORTED_SLASH_COMMANDS: &[&str] = &["search", "file"];
#[derive(Debug, Clone)]
struct CommandToRun {
name: String,
arg: String,
}
/// Given the pre-indexed file summaries for this project, as well as the original prompt
/// string passed to `/auto`, get a list of slash commands to run, along with their arguments.
///
/// The prompt's output does not include the slashes (to reduce the chance that it makes a mistake),
/// so taking one of these returned Strings and turning it into a real slash-command-with-argument
/// involves prepending a slash to it.
///
/// This function will validate that each of the returned lines begins with one of SUPPORTED_SLASH_COMMANDS.
/// Any other lines it encounters will be discarded, with a warning logged.
async fn commands_for_summaries(
summaries: &[FileSummary],
original_prompt: &str,
cx: &AsyncApp,
) -> Result<Vec<CommandToRun>> {
if summaries.is_empty() {
log::warn!("Inferring no context because there were no summaries available.");
return Ok(Vec::new());
}
// Use the globally configured model to translate the summaries into slash-commands,
// because Qwen2-7B-Instruct has not done a good job at that task.
let Some(model) = cx.update(|cx| LanguageModelRegistry::read_global(cx).active_model())? else {
log::warn!("Can't infer context because there's no active model.");
return Ok(Vec::new());
};
// Only go up to 90% of the actual max token count, to reduce chances of
// exceeding the token count due to inaccuracies in the token counting heuristic.
let max_token_count = (model.max_token_count() * 9) / 10;
// Rather than recursing (which would require this async function use a pinned box),
// we use an explicit stack of arguments and answers for when we need to "recurse."
let mut stack = vec![summaries];
let mut final_response = Vec::new();
let mut prompts = Vec::new();
// TODO We only need to create multiple Requests because we currently
// don't have the ability to tell if a CompletionProvider::complete response
// was a "too many tokens in this request" error. If we had that, then
// we could try the request once, instead of having to make separate requests
// to check the token count and then afterwards to run the actual prompt.
let make_request = |prompt: String| LanguageModelRequest {
messages: vec![LanguageModelRequestMessage {
role: Role::User,
content: vec![prompt.into()],
// Nothing in here will benefit from caching
cache: false,
}],
tools: Vec::new(),
stop: Vec::new(),
temperature: None,
};
while let Some(current_summaries) = stack.pop() {
// The split can result in one slice being empty and the other having one element.
// Whenever that happens, skip the empty one.
if current_summaries.is_empty() {
continue;
}
log::info!(
"Inferring prompt context using {} file summaries",
current_summaries.len()
);
let prompt = summaries_prompt(&current_summaries, original_prompt);
let start = std::time::Instant::now();
// Per OpenAI, 1 token ~= 4 chars in English (we go with 4.5 to overestimate a bit, because failed API requests cost a lot of perf)
// Verifying this against an actual model.count_tokens() confirms that it's usually within ~5% of the correct answer, whereas
// getting the correct answer from tiktoken takes hundreds of milliseconds (compared to this arithmetic being ~free).
// source: https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
let token_estimate = prompt.len() * 2 / 9;
let duration = start.elapsed();
log::info!(
"Time taken to count tokens for prompt of length {:?}B: {:?}",
prompt.len(),
duration
);
if token_estimate < max_token_count {
prompts.push(prompt);
} else if current_summaries.len() == 1 {
log::warn!("Inferring context for a single file's summary failed because the prompt's token length exceeded the model's token limit.");
} else {
log::info!(
"Context inference using file summaries resulted in a prompt containing {token_estimate} tokens, which exceeded the model's max of {max_token_count}. Retrying as two separate prompts, each including half the number of summaries.",
);
let (left, right) = current_summaries.split_at(current_summaries.len() / 2);
stack.push(right);
stack.push(left);
}
}
let all_start = std::time::Instant::now();
let (tx, rx) = channel::bounded(1024);
let completion_streams = prompts
.into_iter()
.map(|prompt| {
let request = make_request(prompt.clone());
let model = model.clone();
let tx = tx.clone();
let stream = model.stream_completion(request, &cx);
(stream, tx)
})
.collect::<Vec<_>>();
cx.background_spawn(async move {
let futures = completion_streams
.into_iter()
.enumerate()
.map(|(ix, (stream, tx))| async move {
let start = std::time::Instant::now();
let events = stream.await?;
log::info!("Time taken for awaiting /await chunk stream #{ix}: {:?}", start.elapsed());
let completion: String = events
.filter_map(|event| async {
if let Ok(LanguageModelCompletionEvent::Text(text)) = event {
Some(text)
} else {
None
}
})
.collect()
.await;
log::info!("Time taken for all /auto chunks to come back for #{ix}: {:?}", start.elapsed());
for line in completion.split('\n') {
if let Some(first_space) = line.find(' ') {
let command = &line[..first_space].trim();
let arg = &line[first_space..].trim();
tx.send(CommandToRun {
name: command.to_string(),
arg: arg.to_string(),
})
.await?;
} else if !line.trim().is_empty() {
// All slash-commands currently supported in context inference need a space for the argument.
log::warn!(
"Context inference returned a non-blank line that contained no spaces (meaning no argument for the slash command): {:?}",
line
);
}
}
anyhow::Ok(())
})
.collect::<Vec<_>>();
let _ = futures::future::try_join_all(futures).await.log_err();
let duration = all_start.elapsed();
eprintln!("All futures completed in {:?}", duration);
})
.await;
drop(tx); // Close the channel so that rx.collect() won't hang. This is safe because all futures have completed.
let results = rx.collect::<Vec<_>>().await;
eprintln!(
"Finished collecting from the channel with {} results",
results.len()
);
for command in results {
// Don't return empty or duplicate commands
if !command.name.is_empty()
&& !final_response
.iter()
.any(|cmd: &CommandToRun| cmd.name == command.name && cmd.arg == command.arg)
{
if SUPPORTED_SLASH_COMMANDS
.iter()
.any(|supported| &command.name == supported)
{
final_response.push(command);
} else {
log::warn!(
"Context inference returned an unrecognized slash command: {:?}",
command
);
}
}
}
// Sort the commands by name (reversed just so that /search appears before /file)
final_response.sort_by(|cmd1, cmd2| cmd1.name.cmp(&cmd2.name).reverse());
Ok(final_response)
}

View File

@@ -0,0 +1,197 @@
use std::{
fmt::Write as _,
ops::DerefMut,
sync::{atomic::AtomicBool, Arc},
};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use feature_flags::FeatureFlag;
use gpui::{App, Task, WeakEntity};
use language::{Anchor, CodeLabel, LspAdapterDelegate};
use language_model::{LanguageModelRegistry, LanguageModelTool};
use prompt_store::PromptBuilder;
use schemars::JsonSchema;
use semantic_index::SemanticDb;
use serde::Deserialize;
use ui::prelude::*;
use workspace::Workspace;
use super::{create_label_for_command, search_command::add_search_result_section};
pub struct ProjectSlashCommandFeatureFlag;
impl FeatureFlag for ProjectSlashCommandFeatureFlag {
const NAME: &'static str = "project-slash-command";
}
pub struct ProjectSlashCommand {
prompt_builder: Arc<PromptBuilder>,
}
impl ProjectSlashCommand {
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
Self { prompt_builder }
}
}
impl SlashCommand for ProjectSlashCommand {
fn name(&self) -> String {
"project".into()
}
fn label(&self, cx: &App) -> CodeLabel {
create_label_for_command("project", &[], cx)
}
fn description(&self) -> String {
"Generate a semantic search based on context".into()
}
fn icon(&self) -> IconName {
IconName::Folder
}
fn menu_text(&self) -> String {
self.description()
}
fn requires_argument(&self) -> bool {
false
}
fn complete_argument(
self: Arc<Self>,
_arguments: &[String],
_cancel: Arc<AtomicBool>,
_workspace: Option<WeakEntity<Workspace>>,
_window: &mut Window,
_cx: &mut App,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Ok(Vec::new()))
}
fn run(
self: Arc<Self>,
_arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
context_buffer: language::BufferSnapshot,
workspace: WeakEntity<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
window: &mut Window,
cx: &mut App,
) -> Task<SlashCommandResult> {
let model_registry = LanguageModelRegistry::read_global(cx);
let current_model = model_registry.active_model();
let prompt_builder = self.prompt_builder.clone();
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
let project = workspace.read(cx).project().clone();
let fs = project.read(cx).fs().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, async move |cx| {
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
let prompt =
prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
let search_queries = current_model
.use_tool::<SearchQueries>(
language_model::LanguageModelRequest {
messages: vec![language_model::LanguageModelRequestMessage {
role: language_model::Role::User,
content: vec![language_model::MessageContent::Text(prompt)],
cache: false,
}],
tools: vec![],
stop: vec![],
temperature: None,
},
cx.deref_mut(),
)
.await?
.search_queries;
let results = project_index
.read_with(cx, |project_index, cx| {
project_index.search(search_queries.clone(), 25, cx)
})?
.await?;
let results = SemanticDb::load_results(results, &fs, &cx).await?;
cx.background_spawn(async move {
let mut output = "Project context:\n".to_string();
let mut sections = Vec::new();
for (ix, query) in search_queries.into_iter().enumerate() {
let start_ix = output.len();
writeln!(&mut output, "Results for {query}:").unwrap();
let mut has_results = false;
for result in &results {
if result.query_index == ix {
add_search_result_section(result, &mut output, &mut sections);
has_results = true;
}
}
if has_results {
sections.push(SlashCommandOutputSection {
range: start_ix..output.len(),
icon: IconName::MagnifyingGlass,
label: query.into(),
metadata: None,
});
output.push('\n');
} else {
output.truncate(start_ix);
}
}
sections.push(SlashCommandOutputSection {
range: 0..output.len(),
icon: IconName::Book,
label: "Project context".into(),
metadata: None,
});
Ok(SlashCommandOutput {
text: output,
sections,
run_commands_in_text: true,
}
.to_event_stream())
})
.await
})
}
}
#[derive(JsonSchema, Deserialize)]
struct SearchQueries {
/// An array of semantic search queries.
///
/// These queries will be used to search the user's codebase.
/// The function can only accept 4 queries, otherwise it will error.
/// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
search_queries: Vec<String>,
}
impl LanguageModelTool for SearchQueries {
fn name() -> String {
"search_queries".to_string()
}
fn description() -> String {
"Generate semantic search queries based on context".to_string()
}
}

View File

@@ -0,0 +1,24 @@
Actions have a cost, so only include actions that you think
will be helpful to you in doing a great job answering the
prompt in the future.
You must respond ONLY with a list of actions you would like to
perform. Each action should be on its own line, and followed by a space and then its parameter.
Actions can be performed more than once with different parameters.
Here is an example valid response:
```
file path/to/my/file.txt
file path/to/another/file.txt
search something to search for
search something else to search for
```
Once again, do not forget: you must respond ONLY in the format of
one action per line, and the action name should be followed by
its parameter. Your response must not include anything other
than a list of actions, with one action per line, in this format.
It is extremely important that you do not deviate from this format even slightly!
This is the end of my instructions for how to respond. The rest is the prompt:

View File

@@ -0,0 +1,31 @@
I'm going to give you a prompt. I don't want you to respond
to the prompt itself. I want you to figure out which of the following
actions on my project, if any, would help you answer the prompt.
Here are the actions:
## file
This action's parameter is a file path to one of the files
in the project. If you ask for this action, I will tell you
the full contents of the file, so you can learn all the
details of the file.
## search
This action's parameter is a string to do a semantic search for
across the files in the project. (You will have a JSON summary
of all the files in the project.) It will tell you which files this string
(or similar strings; it is a semantic search) appear in,
as well as some context of the lines surrounding each result.
It's very important that you only use this action when you think
that searching across the specific files in this project for the query
in question will be useful. For example, don't use this command to search
for queries you might put into a general Web search engine, because those
will be too general to give useful results in this project-specific search.
---
That was the end of the list of actions.
Here is a JSON summary of each of the files in my project:

View File

@@ -0,0 +1,181 @@
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use feature_flags::FeatureFlag;
use gpui::{App, Task, WeakEntity};
use language::{CodeLabel, LspAdapterDelegate};
use semantic_index::{LoadedSearchResult, SemanticDb};
use std::{
fmt::Write,
sync::{atomic::AtomicBool, Arc},
};
use ui::{prelude::*, IconName};
use workspace::Workspace;
use crate::create_label_for_command;
use crate::file_command::{build_entry_output_section, codeblock_fence_for_path};
pub struct SearchSlashCommandFeatureFlag;
impl FeatureFlag for SearchSlashCommandFeatureFlag {
const NAME: &'static str = "search-slash-command";
fn enabled_for_staff() -> bool {
false
}
}
pub struct SearchSlashCommand;
impl SlashCommand for SearchSlashCommand {
fn name(&self) -> String {
"search".into()
}
fn label(&self, cx: &App) -> CodeLabel {
create_label_for_command("search", &["--n"], cx)
}
fn description(&self) -> String {
"Search your project semantically".into()
}
fn icon(&self) -> IconName {
IconName::SearchCode
}
fn menu_text(&self) -> String {
self.description()
}
fn requires_argument(&self) -> bool {
true
}
fn complete_argument(
self: Arc<Self>,
_arguments: &[String],
_cancel: Arc<AtomicBool>,
_workspace: Option<WeakEntity<Workspace>>,
_window: &mut Window,
_cx: &mut App,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Ok(Vec::new()))
}
fn run(
self: Arc<Self>,
arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
_context_buffer: language::BufferSnapshot,
workspace: WeakEntity<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
window: &mut Window,
cx: &mut App,
) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
if arguments.is_empty() {
return Task::ready(Err(anyhow::anyhow!("missing search query")));
};
let mut limit = None;
let mut query = String::new();
for part in arguments {
if let Some(parameter) = part.strip_prefix("--") {
if let Ok(count) = parameter.parse::<usize>() {
limit = Some(count);
continue;
}
}
query.push_str(part);
query.push(' ');
}
query.pop();
if query.is_empty() {
return Task::ready(Err(anyhow::anyhow!("missing search query")));
}
let project = workspace.read(cx).project().clone();
let fs = project.read(cx).fs().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, async move |cx| {
let results = project_index
.read_with(cx, |project_index, cx| {
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
})?
.await?;
let loaded_results = SemanticDb::load_results(results, &fs, &cx).await?;
let output = cx
.background_spawn(async move {
let mut text = format!("Search results for {query}:\n");
let mut sections = Vec::new();
for loaded_result in &loaded_results {
add_search_result_section(loaded_result, &mut text, &mut sections);
}
let query = SharedString::from(query);
sections.push(SlashCommandOutputSection {
range: 0..text.len(),
icon: IconName::MagnifyingGlass,
label: query,
metadata: None,
});
SlashCommandOutput {
text,
sections,
run_commands_in_text: false,
}
.to_event_stream()
})
.await;
Ok(output)
})
}
}
pub fn add_search_result_section(
loaded_result: &LoadedSearchResult,
text: &mut String,
sections: &mut Vec<SlashCommandOutputSection<usize>>,
) {
let LoadedSearchResult {
path,
full_path,
excerpt_content,
row_range,
..
} = loaded_result;
let section_start_ix = text.len();
text.push_str(&codeblock_fence_for_path(
Some(&path),
Some(row_range.clone()),
));
text.push_str(&excerpt_content);
if !text.ends_with('\n') {
text.push('\n');
}
writeln!(text, "```\n").unwrap();
let section_end_ix = text.len() - 1;
sections.push(build_entry_output_section(
section_start_ix..section_end_ix,
Some(&full_path),
false,
Some(row_range.start() + 1..row_range.end() + 1),
));
}

View File

@@ -35,7 +35,6 @@ ui.workspace = true
util.workspace = true
workspace.workspace = true
worktree.workspace = true
open = { workspace = true }
[dev-dependencies]
collections = { workspace = true, features = ["test-support"] }

View File

@@ -1,5 +1,4 @@
mod bash_tool;
mod batch_tool;
mod copy_path_tool;
mod create_directory_tool;
mod create_file_tool;
@@ -11,7 +10,6 @@ mod find_replace_file_tool;
mod list_directory_tool;
mod move_path_tool;
mod now_tool;
mod open_tool;
mod path_search_tool;
mod read_file_tool;
mod regex_search_tool;
@@ -27,7 +25,6 @@ use http_client::HttpClientWithUrl;
use move_path_tool::MovePathTool;
use crate::bash_tool::BashTool;
use crate::batch_tool::BatchTool;
use crate::create_directory_tool::CreateDirectoryTool;
use crate::create_file_tool::CreateFileTool;
use crate::delete_path_tool::DeletePathTool;
@@ -37,7 +34,6 @@ use crate::fetch_tool::FetchTool;
use crate::find_replace_file_tool::FindReplaceFileTool;
use crate::list_directory_tool::ListDirectoryTool;
use crate::now_tool::NowTool;
use crate::open_tool::OpenTool;
use crate::path_search_tool::PathSearchTool;
use crate::read_file_tool::ReadFileTool;
use crate::regex_search_tool::RegexSearchTool;
@@ -49,7 +45,6 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
let registry = ToolRegistry::global(cx);
registry.register_tool(BashTool);
registry.register_tool(BatchTool);
registry.register_tool(CreateDirectoryTool);
registry.register_tool(CreateFileTool);
registry.register_tool(CopyPathTool);
@@ -60,7 +55,6 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
registry.register_tool(EditFilesTool);
registry.register_tool(ListDirectoryTool);
registry.register_tool(NowTool);
registry.register_tool(OpenTool);
registry.register_tool(PathSearchTool);
registry.register_tool(ReadFileTool);
registry.register_tool(RegexSearchTool);

View File

@@ -5,7 +5,6 @@ use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::path::Path;
use std::sync::Arc;
use ui::IconName;
use util::command::new_smol_command;
@@ -19,41 +18,6 @@ pub struct BashToolInput {
cd: String,
}
/// Custom deserialization function for BashToolInput that handles missing "cd" field
/// Returns a BashToolInput with the project's first worktree root as default "cd" if missing
fn from_value_with_default_cd(value: serde_json::Value, project: Option<&Project>, cx: Option<&App>) -> Result<BashToolInput> {
// Try standard deserialization first
if let Ok(input) = serde_json::from_value::<BashToolInput>(value.clone()) {
return Ok(input);
}
// If that fails, check if it's because "cd" is missing
let mut obj = match value {
serde_json::Value::Object(obj) => obj,
_ => return Err(anyhow!("Expected object for BashToolInput")),
};
if !obj.contains_key("cd") {
// Find first worktree root to use as default if project context is available
if let (Some(project), Some(cx)) = (project, cx) {
if let Some(worktree) = project.worktrees(cx).next() {
let root_name = worktree.read(cx).root_name().to_string();
obj.insert("cd".to_string(), serde_json::Value::String(root_name));
} else {
// No worktrees available, use "." as fallback
obj.insert("cd".to_string(), serde_json::Value::String(".".to_string()));
}
} else {
// No project context, use "." as fallback
obj.insert("cd".to_string(), serde_json::Value::String(".".to_string()));
}
}
// Try to deserialize with the modified object
serde_json::from_value::<BashToolInput>(serde_json::Value::Object(obj))
.map_err(|e| anyhow!("Failed to deserialize BashToolInput: {}", e))
}
pub struct BashTool;
impl Tool for BashTool {
@@ -79,7 +43,7 @@ impl Tool for BashTool {
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match from_value_with_default_cd(input.clone(), None, None) {
match serde_json::from_value::<BashToolInput>(input.clone()) {
Ok(input) => {
if input.command.contains('\n') {
MarkdownString::code_block("bash", &input.command).0
@@ -99,49 +63,15 @@ impl Tool for BashTool {
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input: BashToolInput = match from_value_with_default_cd(input, Some(&project.read(cx)), Some(cx)) {
let input: BashToolInput = match serde_json::from_value(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let project = project.read(cx);
let input_path = Path::new(&input.cd);
let working_dir = if input.cd == "." {
// Accept "." as meaning "the one worktree" if we only have one worktree.
let mut worktrees = project.worktrees(cx);
let only_worktree = match worktrees.next() {
Some(worktree) => worktree,
None => return Task::ready(Err(anyhow!("No worktrees found in the project"))),
};
if worktrees.next().is_some() {
return Task::ready(Err(anyhow!("'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.")));
}
only_worktree.read(cx).abs_path()
} else if input_path.is_absolute() {
// Absolute paths are allowed, but only if they're in one of the project's worktrees.
if !project
.worktrees(cx)
.any(|worktree| input_path.starts_with(&worktree.read(cx).abs_path()))
{
return Task::ready(Err(anyhow!(
"The absolute path must be within one of the project's worktrees"
)));
}
input_path.into()
} else {
let Some(worktree) = project.worktree_for_root_name(&input.cd, cx) else {
return Task::ready(Err(anyhow!(
"`cd` directory {} not found in the project",
&input.cd
)));
};
worktree.read(cx).abs_path()
let Some(worktree) = project.read(cx).worktree_for_root_name(&input.cd, cx) else {
return Task::ready(Err(anyhow!("Working directory not found in the project")));
};
let working_directory = worktree.read(cx).abs_path();
cx.spawn(async move |_| {
// Add 2>&1 to merge stderr into stdout for proper interleaving.
@@ -150,7 +80,7 @@ impl Tool for BashTool {
let output = new_smol_command("bash")
.arg("-c")
.arg(&command)
.current_dir(working_dir)
.current_dir(working_directory)
.output()
.await
.context("Failed to execute bash command")?;

View File

@@ -1,301 +0,0 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool, ToolWorkingSet};
use futures::future::join_all;
use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use ui::IconName;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ToolInvocation {
/// The name of the tool to invoke
pub name: String,
/// The input to the tool in JSON format
pub input: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct BatchToolInput {
/// The tool invocations to run as a batch. These tools will be run either sequentially
/// or concurrently depending on the `run_tools_concurrently` flag.
///
/// <example>
/// Basic file operations (concurrent)
///
/// ```json
/// {
/// "invocations": [
/// {
/// "name": "read-file",
/// "input": {
/// "path": "src/main.rs"
/// }
/// },
/// {
/// "name": "list-directory",
/// "input": {
/// "path": "src/lib"
/// }
/// },
/// {
/// "name": "regex-search",
/// "input": {
/// "regex": "fn run\\("
/// }
/// }
/// ],
/// "run_tools_concurrently": true
/// }
/// ```
/// </example>
///
/// <example>
/// Multiple find-replace operations on the same file (sequential)
///
/// ```json
/// {
/// "invocations": [
/// {
/// "name": "find-replace-file",
/// "input": {
/// "path": "src/config.rs",
/// "display_description": "Update default timeout value",
/// "find": "pub const DEFAULT_TIMEOUT: u64 = 30;\n\npub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.example.com\";",
/// "replace": "pub const DEFAULT_TIMEOUT: u64 = 60;\n\npub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.example.com\";"
/// }
/// },
/// {
/// "name": "find-replace-file",
/// "input": {
/// "path": "src/config.rs",
/// "display_description": "Update API endpoint URL",
/// "find": "pub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.example.com\";\n\npub const API_VERSION: &str = \"v1\";",
/// "replace": "pub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.newdomain.com\";\n\npub const API_VERSION: &str = \"v1\";"
/// }
/// }
/// ],
/// "run_tools_concurrently": false
/// }
/// ```
/// </example>
///
/// <example>
/// Searching and analyzing code (concurrent)
///
/// ```json
/// {
/// "invocations": [
/// {
/// "name": "regex-search",
/// "input": {
/// "regex": "impl Database"
/// }
/// },
/// {
/// "name": "path-search",
/// "input": {
/// "glob": "**/*test*.rs"
/// }
/// }
/// ],
/// "run_tools_concurrently": true
/// }
/// ```
/// </example>
///
/// <example>
/// Multi-file refactoring (concurrent)
///
/// ```json
/// {
/// "invocations": [
/// {
/// "name": "find-replace-file",
/// "input": {
/// "path": "src/models/user.rs",
/// "display_description": "Add email field to User struct",
/// "find": "pub struct User {\n pub id: u64,\n pub username: String,\n pub created_at: DateTime<Utc>,\n}",
/// "replace": "pub struct User {\n pub id: u64,\n pub username: String,\n pub email: String,\n pub created_at: DateTime<Utc>,\n}"
/// }
/// },
/// {
/// "name": "find-replace-file",
/// "input": {
/// "path": "src/db/queries.rs",
/// "display_description": "Update user insertion query",
/// "find": "pub async fn insert_user(conn: &mut Connection, user: &User) -> Result<(), DbError> {\n conn.execute(\n \"INSERT INTO users (id, username, created_at) VALUES ($1, $2, $3)\",\n &[&user.id, &user.username, &user.created_at],\n ).await?;\n \n Ok(())\n}",
/// "replace": "pub async fn insert_user(conn: &mut Connection, user: &User) -> Result<(), DbError> {\n conn.execute(\n \"INSERT INTO users (id, username, email, created_at) VALUES ($1, $2, $3, $4)\",\n &[&user.id, &user.username, &user.email, &user.created_at],\n ).await?;\n \n Ok(())\n}"
/// }
/// }
/// ],
/// "run_tools_concurrently": true
/// }
/// ```
/// </example>
pub invocations: Vec<ToolInvocation>,
/// Whether to run the tools in this batch concurrently. If this is false (the default), the tools will run sequentially.
#[serde(default)]
pub run_tools_concurrently: bool,
}
pub struct BatchTool;
impl Tool for BatchTool {
fn name(&self) -> String {
"batch-tool".into()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("./batch_tool/description.md").into()
}
fn icon(&self) -> IconName {
IconName::Cog
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(BatchToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<BatchToolInput>(input.clone()) {
Ok(input) => {
let count = input.invocations.len();
let mode = if input.run_tools_concurrently {
"concurrently"
} else {
"sequentially"
};
let first_tool_name = input
.invocations
.first()
.map(|inv| inv.name.clone())
.unwrap_or_default();
let all_same = input
.invocations
.iter()
.all(|invocation| invocation.name == first_tool_name);
if all_same {
format!(
"Run `{}` {} times {}",
first_tool_name,
input.invocations.len(),
mode
)
} else {
format!("Run {} tools {}", count, mode)
}
}
Err(_) => "Batch tools".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<BatchToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
if input.invocations.is_empty() {
return Task::ready(Err(anyhow!("No tool invocations provided")));
}
let run_tools_concurrently = input.run_tools_concurrently;
let foreground_task = {
let working_set = ToolWorkingSet::default();
let invocations = input.invocations;
let messages = messages.to_vec();
cx.spawn(async move |cx| {
let mut tasks = Vec::new();
let mut tool_names = Vec::new();
for invocation in invocations {
let tool_name = invocation.name.clone();
tool_names.push(tool_name.clone());
let tool = cx
.update(|cx| working_set.tool(&tool_name, cx))
.map_err(|err| {
anyhow!("Failed to look up tool '{}': {}", tool_name, err)
})?;
let Some(tool) = tool else {
return Err(anyhow!("Tool '{}' not found", tool_name));
};
let project = project.clone();
let action_log = action_log.clone();
let messages = messages.clone();
let task = cx
.update(|cx| tool.run(invocation.input, &messages, project, action_log, cx))
.map_err(|err| anyhow!("Failed to start tool '{}': {}", tool_name, err))?;
tasks.push(task);
}
Ok((tasks, tool_names))
})
};
cx.background_spawn(async move {
let (tasks, tool_names) = foreground_task.await?;
let mut results = Vec::with_capacity(tasks.len());
if run_tools_concurrently {
results.extend(join_all(tasks).await)
} else {
for task in tasks {
results.push(task.await);
}
};
let mut formatted_results = String::new();
let mut error_occurred = false;
for (i, result) in results.into_iter().enumerate() {
let tool_name = &tool_names[i];
match result {
Ok(output) => {
formatted_results
.push_str(&format!("Tool '{}' result:\n{}\n\n", tool_name, output));
}
Err(err) => {
error_occurred = true;
formatted_results
.push_str(&format!("Tool '{}' error: {}\n\n", tool_name, err));
}
}
}
if error_occurred {
formatted_results
.push_str("Note: Some tool invocations failed. See individual results above.");
}
Ok(formatted_results.trim().to_string())
})
}
}

View File

@@ -1,9 +0,0 @@
Invoke multiple other tool calls either sequentially or concurrently.
This tool is useful when you need to perform several operations at once, improving efficiency by reducing the number of back-and-forth interactions needed to complete complex tasks.
If the tool calls are set to be run sequentially, then each tool call within the batch is executed in the order provided. If it's set to run concurrently, then they may run in a different order. Regardless, all tool calls will have the same permissions and context as if they were called individually.
This tool should never be used to run a total of one tool. Instead, just run that one tool directly. You can run batches within batches if desired, which is a way you can mix concurrent and sequential tool call execution.
When it's possible to run tools in a batch, you should run as many as possible in the batch, up to a maximum of 32. For example, don't run multiple consecutive batches of 10 when you could instead run one batch of 30.

View File

@@ -1,7 +1,5 @@
Edit files in the current project by specifying instructions in natural language.
IMPORTANT NOTE: If there is a find-replace tool, use that instead of this tool! This tool is only to be used as a fallback in case that tool is unavailable. Always prefer that tool if it is available.
When using this tool, you should suggest one coherent edit that can be made to the codebase.
When the set of edits you want to make is large or complex, feel free to invoke this tool multiple times, each time focusing on a specific change you wanna make.

View File

@@ -1,68 +0,0 @@
use anyhow::{anyhow, Context as _, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct OpenToolInput {
/// The path or URL to open with the default application.
path_or_url: String,
}
pub struct OpenTool;
impl Tool for OpenTool {
fn name(&self) -> String {
"open".to_string()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("./open_tool/description.md").to_string()
}
fn icon(&self) -> IconName {
IconName::ExternalLink
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(OpenToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<OpenToolInput>(input.clone()) {
Ok(input) => format!("Open `{}`", MarkdownString::escape(&input.path_or_url)),
Err(_) => "Open file or URL".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input: OpenToolInput = match serde_json::from_value(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
cx.background_spawn(async move {
open::that(&input.path_or_url).context("Failed to open URL or file path")?;
Ok(format!("Successfully opened {}", input.path_or_url))
})
}
}

View File

@@ -1,6 +0,0 @@
This tool opens a file or URL with the default application associated with it on the user's operating system:
- On macOS, it's equivalent to the `open` command
- On Windows, it's equivalent to `start`
- On Linux, it uses something like `xdg-open`, `gio open`, `gnome-open`, `kde-open`, `wslview` as appropriate
For example, it can open a web browser with a URL, open a PDF file with the default PDF viewer, etc.

View File

@@ -37,8 +37,6 @@ pub enum Model {
CohereCommandRV1,
CohereCommandRPlusV1,
CohereCommandLightTextV14_4k,
// DeepSeek
DeepSeekR1,
// Meta models
MetaLlama38BInstructV1,
MetaLlama370BInstructV1,
@@ -93,7 +91,6 @@ impl Model {
Model::AmazonNovaLite => "us.amazon.nova-lite-v1:0",
Model::AmazonNovaMicro => "us.amazon.nova-micro-v1:0",
Model::AmazonNovaPro => "us.amazon.nova-pro-v1:0",
Model::DeepSeekR1 => "us.deepseek.r1-v1:0",
Model::AI21J2GrandeInstruct => "ai21.j2-grande-instruct",
Model::AI21J2JumboInstruct => "ai21.j2-jumbo-instruct",
Model::AI21J2Mid => "ai21.j2-mid",
@@ -136,7 +133,6 @@ impl Model {
Self::AmazonNovaLite => "Amazon Nova Lite",
Self::AmazonNovaMicro => "Amazon Nova Micro",
Self::AmazonNovaPro => "Amazon Nova Pro",
Self::DeepSeekR1 => "DeepSeek R1",
Self::AI21J2GrandeInstruct => "AI21 Jurassic2 Grande Instruct",
Self::AI21J2JumboInstruct => "AI21 Jurassic2 Jumbo Instruct",
Self::AI21J2Mid => "AI21 Jurassic2 Mid",

View File

@@ -573,7 +573,7 @@ async fn test_following_tab_order(
client_a
.fs()
.insert_tree(
path!("/a"),
"/a",
json!({
"1.txt": "one",
"2.txt": "two",
@@ -581,7 +581,7 @@ async fn test_following_tab_order(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await

View File

@@ -1,7 +1,6 @@
use crate::tests::TestServer;
use call::ActiveCall;
use collections::{HashMap, HashSet};
use dap::DapRegistry;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::StreamExt as _;
@@ -86,7 +85,6 @@ async fn test_sharing_an_ssh_remote_project(
http_client: remote_http_client,
node_runtime: node,
languages,
debug_adapters: Arc::new(DapRegistry::fake()),
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
},
cx,
@@ -254,7 +252,6 @@ async fn test_ssh_collaboration_git_branches(
http_client: remote_http_client,
node_runtime: node,
languages,
debug_adapters: Arc::new(DapRegistry::fake()),
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
},
cx,
@@ -454,7 +451,6 @@ async fn test_ssh_collaboration_formatting_with_prettier(
http_client: remote_http_client,
node_runtime: NodeRuntime::unavailable(),
languages,
debug_adapters: Arc::new(DapRegistry::fake()),
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
},
cx,

View File

@@ -14,7 +14,6 @@ use client::{
use clock::FakeSystemClock;
use collab_ui::channel_view::ChannelView;
use collections::{HashMap, HashSet};
use dap::DapRegistry;
use fs::FakeFs;
use futures::{channel::oneshot, StreamExt as _};
use git::GitHostingProviderRegistry;
@@ -278,14 +277,12 @@ impl TestServer {
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
let debug_adapters = Arc::new(DapRegistry::default());
let session = cx.new(|cx| AppSession::new(Session::test(), cx));
let app_state = Arc::new(workspace::AppState {
client: client.clone(),
user_store: user_store.clone(),
workspace_store,
languages: language_registry,
debug_adapters,
fs: fs.clone(),
build_window_options: |_, _| Default::default(),
node_runtime: NodeRuntime::unavailable(),
@@ -798,7 +795,6 @@ impl TestClient {
self.app_state.node_runtime.clone(),
self.app_state.user_store.clone(),
self.app_state.languages.clone(),
self.app_state.debug_adapters.clone(),
self.app_state.fs.clone(),
None,
cx,

View File

@@ -1156,7 +1156,20 @@ impl Panel for ChatPanel {
}
fn icon(&self, _window: &Window, cx: &App) -> Option<ui::IconName> {
self.enabled(cx).then(|| ui::IconName::MessageBubbles)
let show_icon = match ChatPanelSettings::get_global(cx).button {
ChatPanelButton::Never => false,
ChatPanelButton::Always => true,
ChatPanelButton::WhenInCall => {
let is_in_call = ActiveCall::global(cx)
.read(cx)
.room()
.map_or(false, |room| room.read(cx).contains_guests());
self.active || is_in_call
}
};
show_icon.then(|| ui::IconName::MessageBubbles)
}
fn icon_tooltip(&self, _: &Window, _: &App) -> Option<&'static str> {
@@ -1177,21 +1190,6 @@ impl Panel for ChatPanel {
fn activation_priority(&self) -> u32 {
7
}
fn enabled(&self, cx: &App) -> bool {
match ChatPanelSettings::get_global(cx).button {
ChatPanelButton::Never => false,
ChatPanelButton::Always => true,
ChatPanelButton::WhenInCall => {
let is_in_call = ActiveCall::global(cx)
.read(cx)
.room()
.map_or(false, |room| room.read(cx).contains_guests());
self.active || is_in_call
}
}
}
}
impl EventEmitter<PanelEvent> for ChatPanel {}

View File

@@ -247,15 +247,12 @@ impl ContextServerManager {
let mut desired_servers = HashMap::default();
let (registry, project) = this.update(cx, |this, cx| {
let location = this
.project
.read(cx)
.visible_worktrees(cx)
.next()
.map(|worktree| settings::SettingsLocation {
let location = this.project.read(cx).worktrees(cx).next().map(|worktree| {
settings::SettingsLocation {
worktree_id: worktree.read(cx).id(),
path: Path::new(""),
});
}
});
let settings = ContextServerSettings::get(location, cx);
desired_servers = settings.context_servers.clone();

View File

@@ -8,10 +8,6 @@ license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/dap.rs"
doctest = false
[features]
test-support = [
"gpui/test-support",
@@ -39,7 +35,6 @@ log.workspace = true
node_runtime.workspace = true
parking_lot.workspace = true
paths.workspace = true
regex.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true

View File

@@ -13,16 +13,15 @@ use serde_json::Value;
use settings::WorktreeId;
use smol::{self, fs::File, lock::Mutex};
use std::{
borrow::Borrow,
collections::{HashMap, HashSet},
ffi::{OsStr, OsString},
fmt::Debug,
net::Ipv4Addr,
ops::Deref,
path::PathBuf,
sync::{Arc, LazyLock},
path::{Path, PathBuf},
sync::Arc,
};
use task::{DebugAdapterConfig, DebugTaskDefinition};
use task::DebugAdapterConfig;
use util::ResultExt;
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -47,7 +46,7 @@ pub trait DapDelegate {
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub struct DebugAdapterName(pub SharedString);
pub struct DebugAdapterName(pub Arc<str>);
impl Deref for DebugAdapterName {
type Target = str;
@@ -63,9 +62,9 @@ impl AsRef<str> for DebugAdapterName {
}
}
impl Borrow<str> for DebugAdapterName {
fn borrow(&self) -> &str {
&self.0
impl AsRef<Path> for DebugAdapterName {
fn as_ref(&self) -> &Path {
Path::new(&*self.0)
}
}
@@ -77,7 +76,7 @@ impl std::fmt::Display for DebugAdapterName {
impl From<DebugAdapterName> for SharedString {
fn from(name: DebugAdapterName) -> Self {
name.0
SharedString::from(name.0)
}
}
@@ -124,7 +123,7 @@ pub async fn download_adapter_from_github(
file_type: DownloadedFileType,
delegate: &dyn DapDelegate,
) -> Result<PathBuf> {
let adapter_path = paths::debug_adapters_dir().join(&adapter_name.as_ref());
let adapter_path = paths::debug_adapters_dir().join(&adapter_name);
let version_path = adapter_path.join(format!("{}_{}", adapter_name, github_version.tag_name));
let fs = delegate.fs();
@@ -289,21 +288,15 @@ pub trait DebugAdapter: 'static + Send + Sync {
) -> Result<DebugAdapterBinary>;
/// Should return base configuration to make the debug adapter work
fn request_args(&self, config: &DebugTaskDefinition) -> Value;
fn attach_processes_filter(&self) -> regex::Regex {
EMPTY_REGEX.clone()
}
fn request_args(&self, config: &DebugAdapterConfig) -> Value;
}
static EMPTY_REGEX: LazyLock<regex::Regex> =
LazyLock::new(|| regex::Regex::new("").expect("Regex compilation to succeed"));
#[cfg(any(test, feature = "test-support"))]
pub struct FakeAdapter {}
#[cfg(any(test, feature = "test-support"))]
impl FakeAdapter {
pub const ADAPTER_NAME: &'static str = "fake-adapter";
const ADAPTER_NAME: &'static str = "fake-adapter";
pub fn new() -> Self {
Self {}
@@ -358,13 +351,13 @@ impl DebugAdapter for FakeAdapter {
unimplemented!("get installed binary");
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
use serde_json::json;
use task::DebugRequestType;
json!({
"request": match config.request {
DebugRequestType::Launch(_) => "launch",
DebugRequestType::Launch => "launch",
DebugRequestType::Attach(_) => "attach",
},
"process_id": if let DebugRequestType::Attach(attach_config) = &config.request {
@@ -374,10 +367,4 @@ impl DebugAdapter for FakeAdapter {
},
})
}
fn attach_processes_filter(&self) -> regex::Regex {
static REGEX: LazyLock<regex::Regex> =
LazyLock::new(|| regex::Regex::new("^fake-binary").unwrap());
REGEX.clone()
}
}

View File

@@ -71,6 +71,7 @@ impl DebugAdapterClient {
let client_id = this.id;
// start handling events/reverse requests
cx.background_spawn(Self::handle_receive_messages(
client_id,
server_rx,
@@ -118,6 +119,7 @@ impl DebugAdapterClient {
Ok(message) => message,
Err(e) => break Err(e.into()),
};
match message {
Message::Event(ev) => {
log::debug!("Client {} received event `{}`", client_id.0, &ev);
@@ -162,6 +164,7 @@ impl DebugAdapterClient {
command: R::COMMAND.to_string(),
arguments: Some(serialized_arguments),
};
self.transport_delegate
.add_pending_request(sequence_id, callback_tx)
.await;
@@ -431,7 +434,7 @@ mod tests {
let client = DebugAdapterClient::start(
crate::client::SessionId(1),
DebugAdapterName("test-adapter".into()),
DebugAdapterName(Arc::from("test-adapter")),
DebugAdapterBinary {
command: "command".into(),
arguments: Default::default(),

View File

@@ -1,17 +0,0 @@
pub mod adapters;
pub mod client;
pub mod debugger_settings;
pub mod proto_conversions;
mod registry;
pub mod transport;
pub use dap_types::*;
pub use registry::DapRegistry;
pub use task::{DebugAdapterConfig, DebugRequestType};
pub type ScopeId = u64;
pub type VariableReference = u64;
pub type StackFrameId = u64;
#[cfg(any(test, feature = "test-support"))]
pub use adapters::FakeAdapter;

38
crates/dap/src/lib.rs Normal file
View File

@@ -0,0 +1,38 @@
pub mod adapters;
pub mod client;
pub mod debugger_settings;
pub mod proto_conversions;
pub mod transport;
pub use dap_types::*;
pub use task::{DebugAdapterConfig, DebugAdapterKind, DebugRequestType};
pub type ScopeId = u64;
pub type VariableReference = u64;
pub type StackFrameId = u64;
#[cfg(any(test, feature = "test-support"))]
pub use adapters::FakeAdapter;
#[cfg(any(test, feature = "test-support"))]
pub fn test_config(
request: DebugRequestType,
fail: Option<bool>,
caps: Option<Capabilities>,
) -> DebugAdapterConfig {
DebugAdapterConfig {
label: "test config".into(),
kind: DebugAdapterKind::Fake((
fail.unwrap_or_default(),
caps.unwrap_or(Capabilities {
supports_step_back: Some(false),
..Default::default()
}),
)),
request,
program: None,
supports_attach: false,
cwd: None,
initialize_args: None,
}
}

View File

@@ -1,39 +0,0 @@
use parking_lot::RwLock;
use crate::adapters::{DebugAdapter, DebugAdapterName};
use std::{collections::BTreeMap, sync::Arc};
#[derive(Default)]
struct DapRegistryState {
adapters: BTreeMap<DebugAdapterName, Arc<dyn DebugAdapter>>,
}
#[derive(Default)]
/// Stores available debug adapters.
pub struct DapRegistry(Arc<RwLock<DapRegistryState>>);
impl DapRegistry {
pub fn add_adapter(&self, adapter: Arc<dyn DebugAdapter>) {
let name = adapter.name();
let _previous_value = self.0.write().adapters.insert(name, adapter);
debug_assert!(
_previous_value.is_none(),
"Attempted to insert a new debug adapter when one is already registered"
);
}
pub fn adapter(&self, name: &str) -> Option<Arc<dyn DebugAdapter>> {
self.0.read().adapters.get(name).cloned()
}
pub fn enumerate_adapters(&self) -> Vec<DebugAdapterName> {
self.0.read().adapters.keys().cloned().collect()
}
#[cfg(any(test, feature = "test-support"))]
pub fn fake() -> Self {
use crate::FakeAdapter;
let register = Self::default();
register.add_adapter(Arc::new(FakeAdapter::new()));
register
}
}

View File

@@ -261,6 +261,8 @@ impl TransportDelegate {
}
}
}
smol::future::yield_now().await;
};
log::debug!("Handle adapter log dropped");
@@ -317,6 +319,8 @@ impl TransportDelegate {
}
Err(error) => break Err(error.into()),
}
smol::future::yield_now().await;
};
log::debug!("Handle adapter input dropped");
@@ -356,6 +360,8 @@ impl TransportDelegate {
}
Err(e) => break Err(e),
}
smol::future::yield_now().await;
};
drop(client_tx);
@@ -387,6 +393,8 @@ impl TransportDelegate {
}
Err(error) => break Err(error.into()),
}
smol::future::yield_now().await;
};
log::debug!("Handle adapter error dropped");

View File

@@ -30,6 +30,7 @@ paths.workspace = true
regex.workspace = true
serde.workspace = true
serde_json.workspace = true
sysinfo.workspace = true
task.workspace = true
util.workspace = true

View File

@@ -0,0 +1,84 @@
use dap::transport::TcpTransport;
use gpui::AsyncApp;
use serde_json::Value;
use std::{collections::HashMap, ffi::OsString, path::PathBuf};
use sysinfo::{Pid, Process};
use task::DebugAdapterConfig;
use crate::*;
pub(crate) struct CustomDebugAdapter {
custom_args: CustomArgs,
}
impl CustomDebugAdapter {
const ADAPTER_NAME: &'static str = "custom_dap";
pub(crate) async fn new(custom_args: CustomArgs) -> Result<Self> {
Ok(CustomDebugAdapter { custom_args })
}
pub fn attach_processes(processes: &HashMap<Pid, Process>) -> Vec<(&Pid, &Process)> {
processes.iter().collect::<Vec<_>>()
}
}
#[async_trait(?Send)]
impl DebugAdapter for CustomDebugAdapter {
fn name(&self) -> DebugAdapterName {
DebugAdapterName(Self::ADAPTER_NAME.into())
}
async fn get_binary(
&self,
_: &dyn DapDelegate,
config: &DebugAdapterConfig,
_: Option<PathBuf>,
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
let connection = if let DebugConnectionType::TCP(connection) = &self.custom_args.connection
{
Some(adapters::TcpArguments {
host: connection.host(),
port: TcpTransport::port(&connection).await?,
timeout: connection.timeout,
})
} else {
None
};
let ret = DebugAdapterBinary {
command: self.custom_args.command.clone(),
arguments: self
.custom_args
.args
.clone()
.map(|args| args.iter().map(OsString::from).collect()),
cwd: config.cwd.clone(),
envs: self.custom_args.envs.clone(),
connection,
};
Ok(ret)
}
async fn fetch_latest_adapter_version(&self, _: &dyn DapDelegate) -> Result<AdapterVersion> {
bail!("Custom debug adapters don't have latest versions")
}
async fn install_binary(&self, _: AdapterVersion, _: &dyn DapDelegate) -> Result<()> {
bail!("Custom debug adapters cannot be installed")
}
async fn get_installed_binary(
&self,
_: &dyn DapDelegate,
_: &DebugAdapterConfig,
_: Option<PathBuf>,
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
bail!("Custom debug adapters cannot be installed")
}
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
json!({"program": config.program})
}
}

View File

@@ -1,3 +1,5 @@
mod custom;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod gdb;
mod go;
mod javascript;
@@ -5,17 +7,16 @@ mod lldb;
mod php;
mod python;
use std::{net::Ipv4Addr, sync::Arc};
use std::{collections::HashMap, sync::Arc};
use anyhow::{anyhow, Result};
use anyhow::{anyhow, bail, Result};
use async_trait::async_trait;
use dap::{
adapters::{
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
GithubRepo,
},
DapRegistry,
use custom::CustomDebugAdapter;
use dap::adapters::{
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
GithubRepo,
};
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
use gdb::GdbDebugAdapter;
use go::GoDebugAdapter;
use javascript::JsDebugAdapter;
@@ -23,28 +24,44 @@ use lldb::LldbDebugAdapter;
use php::PhpDebugAdapter;
use python::PythonDebugAdapter;
use serde_json::{json, Value};
use task::{DebugAdapterConfig, TCPHost};
use sysinfo::{Pid, Process};
use task::{CustomArgs, DebugAdapterConfig, DebugAdapterKind, DebugConnectionType, TCPHost};
pub fn init(registry: Arc<DapRegistry>) {
registry.add_adapter(Arc::from(PythonDebugAdapter));
registry.add_adapter(Arc::from(PhpDebugAdapter));
registry.add_adapter(Arc::from(JsDebugAdapter::default()));
registry.add_adapter(Arc::from(LldbDebugAdapter));
registry.add_adapter(Arc::from(GoDebugAdapter));
registry.add_adapter(Arc::from(GdbDebugAdapter));
pub async fn build_adapter(kind: &DebugAdapterKind) -> Result<Arc<dyn DebugAdapter>> {
match kind {
DebugAdapterKind::Custom(start_args) => {
Ok(Arc::new(CustomDebugAdapter::new(start_args.clone()).await?))
}
DebugAdapterKind::Python(host) => Ok(Arc::new(PythonDebugAdapter::new(host).await?)),
DebugAdapterKind::Php(host) => Ok(Arc::new(PhpDebugAdapter::new(host.clone()).await?)),
DebugAdapterKind::Javascript(host) => {
Ok(Arc::new(JsDebugAdapter::new(host.clone()).await?))
}
DebugAdapterKind::Lldb => Ok(Arc::new(LldbDebugAdapter::new())),
DebugAdapterKind::Go(host) => Ok(Arc::new(GoDebugAdapter::new(host).await?)),
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
DebugAdapterKind::Gdb => Ok(Arc::new(GdbDebugAdapter::new())),
#[cfg(any(test, feature = "test-support"))]
DebugAdapterKind::Fake(_) => Ok(Arc::new(dap::adapters::FakeAdapter::new())),
#[cfg(not(any(test, feature = "test-support")))]
#[allow(unreachable_patterns)]
_ => unreachable!("Fake variant only exists with test-support feature"),
}
}
pub(crate) async fn configure_tcp_connection(
tcp_connection: TCPHost,
) -> Result<(Ipv4Addr, u16, Option<u64>)> {
let host = tcp_connection.host();
let timeout = tcp_connection.timeout;
let port = if let Some(port) = tcp_connection.port {
port
} else {
dap::transport::TcpTransport::port(&tcp_connection).await?
};
Ok((host, port, timeout))
pub fn attach_processes<'a>(
kind: &DebugAdapterKind,
processes: &'a HashMap<Pid, Process>,
) -> Vec<(&'a Pid, &'a Process)> {
match kind {
#[cfg(any(test, feature = "test-support"))]
DebugAdapterKind::Fake(_) => processes
.iter()
.filter(|(pid, _)| pid.as_u32() == std::process::id())
.collect::<Vec<_>>(),
DebugAdapterKind::Custom(_) => CustomDebugAdapter::attach_processes(processes),
DebugAdapterKind::Javascript(_) => JsDebugAdapter::attach_processes(processes),
DebugAdapterKind::Lldb => LldbDebugAdapter::attach_processes(processes),
_ => processes.iter().collect::<Vec<_>>(),
}
}

View File

@@ -1,17 +1,20 @@
use std::ffi::OsStr;
use anyhow::{bail, Result};
use anyhow::Result;
use async_trait::async_trait;
use gpui::AsyncApp;
use task::{DebugAdapterConfig, DebugTaskDefinition};
use task::DebugAdapterConfig;
use crate::*;
#[derive(Default)]
pub(crate) struct GdbDebugAdapter;
pub(crate) struct GdbDebugAdapter {}
impl GdbDebugAdapter {
const ADAPTER_NAME: &'static str = "GDB";
const ADAPTER_NAME: &'static str = "gdb";
pub(crate) fn new() -> Self {
GdbDebugAdapter {}
}
}
#[async_trait(?Send)]
@@ -23,7 +26,7 @@ impl DebugAdapter for GdbDebugAdapter {
async fn get_binary(
&self,
delegate: &dyn DapDelegate,
_: &DebugAdapterConfig,
config: &DebugAdapterConfig,
user_installed_path: Option<std::path::PathBuf>,
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
@@ -31,6 +34,7 @@ impl DebugAdapter for GdbDebugAdapter {
.filter(|p| p.exists())
.and_then(|p| p.to_str().map(|s| s.to_string()));
/* GDB implements DAP natively so just need to */
let gdb_path = delegate
.which(OsStr::new("gdb"))
.and_then(|p| p.to_str().map(|s| s.to_string()))
@@ -46,7 +50,7 @@ impl DebugAdapter for GdbDebugAdapter {
command: gdb_path,
arguments: Some(vec!["-i=dap".into()]),
envs: None,
cwd: None,
cwd: config.cwd.clone(),
connection: None,
})
}
@@ -73,14 +77,7 @@ impl DebugAdapter for GdbDebugAdapter {
unimplemented!("GDB cannot be installed by Zed (yet)")
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
match &config.request {
dap::DebugRequestType::Attach(attach_config) => {
json!({"pid": attach_config.process_id})
}
dap::DebugRequestType::Launch(launch_config) => {
json!({"program": launch_config.program, "cwd": launch_config.cwd})
}
}
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
json!({"program": config.program, "cwd": config.cwd})
}
}

View File

@@ -1,15 +1,25 @@
use anyhow::bail;
use dap::transport::TcpTransport;
use gpui::AsyncApp;
use std::{ffi::OsStr, path::PathBuf};
use task::DebugTaskDefinition;
use std::{ffi::OsStr, net::Ipv4Addr, path::PathBuf};
use crate::*;
#[derive(Default, Debug)]
pub(crate) struct GoDebugAdapter;
pub(crate) struct GoDebugAdapter {
port: u16,
host: Ipv4Addr,
timeout: Option<u64>,
}
impl GoDebugAdapter {
const ADAPTER_NAME: &'static str = "Delve";
const ADAPTER_NAME: &'static str = "delve";
pub(crate) async fn new(host: &TCPHost) -> Result<Self> {
Ok(GoDebugAdapter {
port: TcpTransport::port(host).await?,
host: host.host(),
timeout: host.timeout,
})
}
}
#[async_trait(?Send)]
@@ -63,39 +73,28 @@ impl DebugAdapter for GoDebugAdapter {
.and_then(|p| p.to_str().map(|p| p.to_string()))
.ok_or(anyhow!("Dlv not found in path"))?;
let Some(tcp_connection) = config.tcp_connection.clone() else {
bail!("Go Debug Adapter expects tcp connection arguments to be provided");
};
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
Ok(DebugAdapterBinary {
command: delve_path,
arguments: Some(vec![
"dap".into(),
"--listen".into(),
format!("{}:{}", host, port).into(),
format!("{}:{}", self.host, self.port).into(),
]),
cwd: None,
cwd: config.cwd.clone(),
envs: None,
connection: Some(adapters::TcpArguments {
host,
port,
timeout,
host: self.host,
port: self.port,
timeout: self.timeout,
}),
})
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
match &config.request {
dap::DebugRequestType::Attach(attach_config) => {
json!({
"processId": attach_config.process_id
})
}
dap::DebugRequestType::Launch(launch_config) => json!({
"program": launch_config.program,
"cwd": launch_config.cwd,
}),
}
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
json!({
"program": config.program,
"cwd": config.cwd,
"subProcess": true,
})
}
}

View File

@@ -1,28 +1,39 @@
use adapters::latest_github_release;
use dap::transport::TcpTransport;
use gpui::AsyncApp;
use regex::Regex;
use std::path::PathBuf;
use task::{DebugRequestType, DebugTaskDefinition};
use std::{collections::HashMap, net::Ipv4Addr, path::PathBuf};
use sysinfo::{Pid, Process};
use task::DebugRequestType;
use crate::*;
#[derive(Debug)]
pub(crate) struct JsDebugAdapter {
attach_processes: Regex,
port: u16,
host: Ipv4Addr,
timeout: Option<u64>,
}
impl Default for JsDebugAdapter {
fn default() -> Self {
Self {
attach_processes: Regex::new(r"(?i)^(?:node|bun|iojs)(?:$|\b)")
.expect("Regex compilation to succeed"),
}
}
}
impl JsDebugAdapter {
const ADAPTER_NAME: &'static str = "JavaScript";
const ADAPTER_NPM_NAME: &'static str = "vscode-js-debug";
const ADAPTER_NAME: &'static str = "vscode-js-debug";
const ADAPTER_PATH: &'static str = "js-debug/src/dapDebugServer.js";
pub(crate) async fn new(host: TCPHost) -> Result<Self> {
Ok(JsDebugAdapter {
host: host.host(),
timeout: host.timeout,
port: TcpTransport::port(&host).await?,
})
}
pub fn attach_processes(processes: &HashMap<Pid, Process>) -> Vec<(&Pid, &Process)> {
let regex = Regex::new(r"(?i)^(?:node|bun|iojs)(?:$|\b)").unwrap();
processes
.iter()
.filter(|(_, process)| regex.is_match(&process.name().to_string_lossy()))
.collect::<Vec<_>>()
}
}
#[async_trait(?Send)]
@@ -36,7 +47,7 @@ impl DebugAdapter for JsDebugAdapter {
delegate: &dyn DapDelegate,
) -> Result<AdapterVersion> {
let release = latest_github_release(
&format!("{}/{}", "microsoft", Self::ADAPTER_NPM_NAME),
&format!("{}/{}", "microsoft", Self::ADAPTER_NAME),
true,
false,
delegate.http_client(),
@@ -67,7 +78,7 @@ impl DebugAdapter for JsDebugAdapter {
let adapter_path = if let Some(user_installed_path) = user_installed_path {
user_installed_path
} else {
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
let adapter_path = paths::debug_adapters_dir().join(self.name());
let file_name_prefix = format!("{}_", self.name());
@@ -78,13 +89,6 @@ impl DebugAdapter for JsDebugAdapter {
.ok_or_else(|| anyhow!("Couldn't find JavaScript dap directory"))?
};
let Some(tcp_connection) = config.tcp_connection.clone() else {
anyhow::bail!(
"Javascript Debug Adapter expects tcp connection arguments to be provided"
);
};
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
Ok(DebugAdapterBinary {
command: delegate
.node_runtime()
@@ -94,15 +98,15 @@ impl DebugAdapter for JsDebugAdapter {
.into_owned(),
arguments: Some(vec![
adapter_path.join(Self::ADAPTER_PATH).into(),
port.to_string().into(),
host.to_string().into(),
self.port.to_string().into(),
self.host.to_string().into(),
]),
cwd: None,
cwd: config.cwd.clone(),
envs: None,
connection: Some(adapters::TcpArguments {
host,
port,
timeout,
host: self.host,
port: self.port,
timeout: self.timeout,
}),
})
}
@@ -123,35 +127,22 @@ impl DebugAdapter for JsDebugAdapter {
return Ok(());
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
let mut args = json!({
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
let pid = if let DebugRequestType::Attach(attach_config) = &config.request {
attach_config.process_id
} else {
None
};
json!({
"program": config.program,
"type": "pwa-node",
"request": match config.request {
DebugRequestType::Launch(_) => "launch",
DebugRequestType::Launch => "launch",
DebugRequestType::Attach(_) => "attach",
},
});
let map = args.as_object_mut().unwrap();
match &config.request {
DebugRequestType::Attach(attach) => {
map.insert("processId".into(), attach.process_id.into());
}
DebugRequestType::Launch(launch) => {
map.insert("program".into(), launch.program.clone().into());
map.insert(
"cwd".into(),
launch
.cwd
.as_ref()
.map(|s| s.to_string_lossy().into_owned())
.into(),
);
}
}
args
}
fn attach_processes_filter(&self) -> Regex {
self.attach_processes.clone()
"processId": pid,
"cwd": config.cwd,
})
}
}

View File

@@ -1,17 +1,25 @@
use std::{ffi::OsStr, path::PathBuf};
use std::{collections::HashMap, ffi::OsStr, path::PathBuf};
use anyhow::Result;
use async_trait::async_trait;
use gpui::AsyncApp;
use task::{DebugAdapterConfig, DebugRequestType, DebugTaskDefinition};
use sysinfo::{Pid, Process};
use task::{DebugAdapterConfig, DebugRequestType};
use crate::*;
#[derive(Default)]
pub(crate) struct LldbDebugAdapter;
pub(crate) struct LldbDebugAdapter {}
impl LldbDebugAdapter {
const ADAPTER_NAME: &'static str = "LLDB";
const ADAPTER_NAME: &'static str = "lldb";
pub(crate) fn new() -> Self {
LldbDebugAdapter {}
}
pub fn attach_processes(processes: &HashMap<Pid, Process>) -> Vec<(&Pid, &Process)> {
processes.iter().collect::<Vec<_>>()
}
}
#[async_trait(?Send)]
@@ -23,7 +31,7 @@ impl DebugAdapter for LldbDebugAdapter {
async fn get_binary(
&self,
delegate: &dyn DapDelegate,
_: &DebugAdapterConfig,
config: &DebugAdapterConfig,
user_installed_path: Option<PathBuf>,
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
@@ -40,7 +48,7 @@ impl DebugAdapter for LldbDebugAdapter {
command: lldb_dap_path,
arguments: None,
envs: None,
cwd: None,
cwd: config.cwd.clone(),
connection: None,
})
}
@@ -67,30 +75,21 @@ impl DebugAdapter for LldbDebugAdapter {
unimplemented!("LLDB debug adapter cannot be installed by Zed (yet)")
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
let mut args = json!({
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
let pid = if let DebugRequestType::Attach(attach_config) = &config.request {
attach_config.process_id
} else {
None
};
json!({
"program": config.program,
"request": match config.request {
DebugRequestType::Launch(_) => "launch",
DebugRequestType::Launch => "launch",
DebugRequestType::Attach(_) => "attach",
},
});
let map = args.as_object_mut().unwrap();
match &config.request {
DebugRequestType::Attach(attach) => {
map.insert("pid".into(), attach.process_id.into());
}
DebugRequestType::Launch(launch) => {
map.insert("program".into(), launch.program.clone().into());
map.insert(
"cwd".into(),
launch
.cwd
.as_ref()
.map(|s| s.to_string_lossy().into_owned())
.into(),
);
}
}
args
"pid": pid,
"cwd": config.cwd,
})
}
}

View File

@@ -1,19 +1,27 @@
use adapters::latest_github_release;
use anyhow::bail;
use dap::adapters::TcpArguments;
use dap::{adapters::TcpArguments, transport::TcpTransport};
use gpui::AsyncApp;
use std::path::PathBuf;
use task::DebugTaskDefinition;
use std::{net::Ipv4Addr, path::PathBuf};
use crate::*;
#[derive(Default)]
pub(crate) struct PhpDebugAdapter;
pub(crate) struct PhpDebugAdapter {
port: u16,
host: Ipv4Addr,
timeout: Option<u64>,
}
impl PhpDebugAdapter {
const ADAPTER_NAME: &'static str = "PHP";
const ADAPTER_PACKAGE_NAME: &'static str = "vscode-php-debug";
const ADAPTER_NAME: &'static str = "vscode-php-debug";
const ADAPTER_PATH: &'static str = "extension/out/phpDebug.js";
pub(crate) async fn new(host: TCPHost) -> Result<Self> {
Ok(PhpDebugAdapter {
port: TcpTransport::port(&host).await?,
host: host.host(),
timeout: host.timeout,
})
}
}
#[async_trait(?Send)]
@@ -27,7 +35,7 @@ impl DebugAdapter for PhpDebugAdapter {
delegate: &dyn DapDelegate,
) -> Result<AdapterVersion> {
let release = latest_github_release(
&format!("{}/{}", "xdebug", Self::ADAPTER_PACKAGE_NAME),
&format!("{}/{}", "xdebug", Self::ADAPTER_NAME),
true,
false,
delegate.http_client(),
@@ -58,7 +66,7 @@ impl DebugAdapter for PhpDebugAdapter {
let adapter_path = if let Some(user_installed_path) = user_installed_path {
user_installed_path
} else {
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
let adapter_path = paths::debug_adapters_dir().join(self.name());
let file_name_prefix = format!("{}_", self.name());
@@ -69,11 +77,6 @@ impl DebugAdapter for PhpDebugAdapter {
.ok_or_else(|| anyhow!("Couldn't find PHP dap directory"))?
};
let Some(tcp_connection) = config.tcp_connection.clone() else {
bail!("PHP Debug Adapter expects tcp connection arguments to be provided");
};
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
Ok(DebugAdapterBinary {
command: delegate
.node_runtime()
@@ -83,14 +86,14 @@ impl DebugAdapter for PhpDebugAdapter {
.into_owned(),
arguments: Some(vec![
adapter_path.join(Self::ADAPTER_PATH).into(),
format!("--server={}", port).into(),
format!("--server={}", self.port).into(),
]),
connection: Some(TcpArguments {
port,
host,
timeout,
port: self.port,
host: self.host,
timeout: self.timeout,
}),
cwd: None,
cwd: config.cwd.clone(),
envs: None,
})
}
@@ -111,18 +114,10 @@ impl DebugAdapter for PhpDebugAdapter {
Ok(())
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
match &config.request {
dap::DebugRequestType::Attach(_) => {
// php adapter does not support attaching
json!({})
}
dap::DebugRequestType::Launch(launch_config) => {
json!({
"program": launch_config.program,
"cwd": launch_config.cwd,
})
}
}
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
json!({
"program": config.program,
"cwd": config.cwd,
})
}
}

View File

@@ -1,18 +1,26 @@
use crate::*;
use anyhow::bail;
use dap::DebugRequestType;
use dap::transport::TcpTransport;
use gpui::AsyncApp;
use std::{ffi::OsStr, path::PathBuf};
use task::DebugTaskDefinition;
use std::{ffi::OsStr, net::Ipv4Addr, path::PathBuf};
#[derive(Default)]
pub(crate) struct PythonDebugAdapter;
pub(crate) struct PythonDebugAdapter {
port: u16,
host: Ipv4Addr,
timeout: Option<u64>,
}
impl PythonDebugAdapter {
const ADAPTER_NAME: &'static str = "Debugpy";
const ADAPTER_PACKAGE_NAME: &'static str = "debugpy";
const ADAPTER_NAME: &'static str = "debugpy";
const ADAPTER_PATH: &'static str = "src/debugpy/adapter";
const LANGUAGE_NAME: &'static str = "Python";
pub(crate) async fn new(host: &TCPHost) -> Result<Self> {
Ok(PythonDebugAdapter {
port: TcpTransport::port(host).await?,
host: host.host(),
timeout: host.timeout,
})
}
}
#[async_trait(?Send)]
@@ -26,7 +34,7 @@ impl DebugAdapter for PythonDebugAdapter {
delegate: &dyn DapDelegate,
) -> Result<AdapterVersion> {
let github_repo = GithubRepo {
repo_name: Self::ADAPTER_PACKAGE_NAME.into(),
repo_name: Self::ADAPTER_NAME.into(),
repo_owner: "microsoft".into(),
};
@@ -70,16 +78,12 @@ impl DebugAdapter for PythonDebugAdapter {
cx: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
const BINARY_NAMES: [&str; 3] = ["python3", "python", "py"];
let Some(tcp_connection) = config.tcp_connection.clone() else {
bail!("Python Debug Adapter expects tcp connection arguments to be provided");
};
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
let debugpy_dir = if let Some(user_installed_path) = user_installed_path {
user_installed_path
} else {
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
let file_name_prefix = format!("{}_", Self::ADAPTER_PACKAGE_NAME);
let adapter_path = paths::debug_adapters_dir().join(self.name());
let file_name_prefix = format!("{}_", self.name());
util::fs::find_file_name_in_dir(adapter_path.as_path(), |file_name| {
file_name.starts_with(&file_name_prefix)
@@ -114,36 +118,25 @@ impl DebugAdapter for PythonDebugAdapter {
command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
arguments: Some(vec![
debugpy_dir.join(Self::ADAPTER_PATH).into(),
format!("--port={}", port).into(),
format!("--host={}", host).into(),
format!("--port={}", self.port).into(),
format!("--host={}", self.host).into(),
]),
connection: Some(adapters::TcpArguments {
host,
port,
timeout,
host: self.host,
port: self.port,
timeout: self.timeout,
}),
cwd: None,
cwd: config.cwd.clone(),
envs: None,
})
}
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
match &config.request {
DebugRequestType::Launch(launch_config) => {
json!({
"program": launch_config.program,
"subProcess": true,
"cwd": launch_config.cwd,
"redirectOutput": true,
})
}
dap::DebugRequestType::Attach(attach_config) => {
json!({
"subProcess": true,
"redirectOutput": true,
"processId": attach_config.process_id
})
}
}
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
json!({
"program": config.program,
"subProcess": true,
"cwd": config.cwd,
"redirectOutput": true,
})
}
}

View File

@@ -8,10 +8,6 @@ license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/debugger_ui.rs"
doctest = false
[features]
test-support = [
"dap/test-support",

View File

@@ -3,8 +3,8 @@ use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::Subscription;
use gpui::{DismissEvent, Entity, EventEmitter, Focusable, Render};
use picker::{Picker, PickerDelegate};
use project::debugger::attach_processes;
use std::cell::LazyCell;
use std::sync::Arc;
use sysinfo::System;
use ui::{prelude::*, Context, Tooltip};
@@ -13,10 +13,10 @@ use util::debug_panic;
use workspace::ModalView;
#[derive(Debug, Clone)]
pub(super) struct Candidate {
pub(super) pid: u32,
pub(super) name: SharedString,
pub(super) command: Vec<String>,
struct Candidate {
pid: u32,
name: String,
command: Vec<String>,
}
pub(crate) struct AttachModalDelegate {
@@ -24,20 +24,16 @@ pub(crate) struct AttachModalDelegate {
matches: Vec<StringMatch>,
placeholder_text: Arc<str>,
project: Entity<project::Project>,
debug_config: task::DebugTaskDefinition,
candidates: Arc<[Candidate]>,
debug_config: task::DebugAdapterConfig,
candidates: Option<Vec<Candidate>>,
}
impl AttachModalDelegate {
fn new(
project: Entity<project::Project>,
debug_config: task::DebugTaskDefinition,
candidates: Arc<[Candidate]>,
) -> Self {
pub fn new(project: Entity<project::Project>, debug_config: task::DebugAdapterConfig) -> Self {
Self {
project,
debug_config,
candidates,
candidates: None,
selected_index: 0,
matches: Vec::default(),
placeholder_text: Arc::from("Select the process you want to attach the debugger to"),
@@ -53,56 +49,12 @@ pub struct AttachModal {
impl AttachModal {
pub fn new(
project: Entity<project::Project>,
debug_config: task::DebugTaskDefinition,
debug_config: task::DebugAdapterConfig,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let mut processes: Vec<_> = System::new_all()
.processes()
.values()
.map(|process| {
let name = process.name().to_string_lossy().into_owned();
Candidate {
name: name.into(),
pid: process.pid().as_u32(),
command: process
.cmd()
.iter()
.map(|s| s.to_string_lossy().to_string())
.collect::<Vec<_>>(),
}
})
.collect();
processes.sort_by_key(|k| k.name.clone());
Self::with_processes(project, debug_config, processes, window, cx)
}
pub(super) fn with_processes(
project: Entity<project::Project>,
debug_config: task::DebugTaskDefinition,
processes: Vec<Candidate>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let adapter = project
.read(cx)
.debug_adapters()
.adapter(&debug_config.adapter);
let filter = LazyCell::new(|| adapter.map(|adapter| adapter.attach_processes_filter()));
let processes = processes
.into_iter()
.filter(|process| {
filter
.as_ref()
.map_or(false, |filter| filter.is_match(&process.name))
})
.collect();
let picker = cx.new(|cx| {
Picker::uniform_list(
AttachModalDelegate::new(project, debug_config, processes),
window,
cx,
)
Picker::uniform_list(AttachModalDelegate::new(project, debug_config), window, cx)
});
Self {
_subscription: cx.subscribe(&picker, |_, _, _, cx| {
@@ -164,7 +116,32 @@ impl PickerDelegate for AttachModalDelegate {
) -> gpui::Task<()> {
cx.spawn(async move |this, cx| {
let Some(processes) = this
.update(cx, |this, _| this.delegate.candidates.clone())
.update(cx, |this, _| {
if let Some(processes) = this.delegate.candidates.clone() {
processes
} else {
let system = System::new_all();
let processes =
attach_processes(&this.delegate.debug_config.kind, &system.processes());
let candidates = processes
.into_iter()
.map(|(pid, process)| Candidate {
pid: pid.as_u32(),
name: process.name().to_string_lossy().into_owned(),
command: process
.cmd()
.iter()
.map(|s| s.to_string_lossy().to_string())
.collect::<Vec<_>>(),
})
.collect::<Vec<Candidate>>();
let _ = this.delegate.candidates.insert(candidates.clone());
candidates
}
})
.ok()
else {
return;
@@ -199,6 +176,7 @@ impl PickerDelegate for AttachModalDelegate {
let delegate = &mut this.delegate;
delegate.matches = matches;
delegate.candidates = Some(processes);
if delegate.matches.is_empty() {
delegate.selected_index = 0;
@@ -217,7 +195,7 @@ impl PickerDelegate for AttachModalDelegate {
.get(self.selected_index())
.and_then(|current_match| {
let ix = current_match.candidate_id;
self.candidates.get(ix)
self.candidates.as_ref().map(|candidates| &candidates[ix])
});
let Some(candidate) = candidate else {
@@ -228,7 +206,7 @@ impl PickerDelegate for AttachModalDelegate {
DebugRequestType::Attach(config) => {
config.process_id = Some(candidate.pid);
}
DebugRequestType::Launch(_) => {
DebugRequestType::Launch => {
debug_panic!("Debugger attach modal used on launch debug config");
return;
}
@@ -236,13 +214,7 @@ impl PickerDelegate for AttachModalDelegate {
let config = self.debug_config.clone();
self.project
.update(cx, |project, cx| {
#[cfg(any(test, feature = "test-support"))]
let ret = project.fake_debug_session(config.request, None, false, cx);
#[cfg(not(any(test, feature = "test-support")))]
let ret = project.start_debug_session(config.into(), cx);
ret
})
.update(cx, |project, cx| project.start_debug_session(config, cx))
.detach_and_log_err(cx);
cx.emit(DismissEvent);
@@ -250,6 +222,7 @@ impl PickerDelegate for AttachModalDelegate {
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
self.selected_index = 0;
self.candidates.take();
cx.emit(DismissEvent);
}
@@ -261,8 +234,9 @@ impl PickerDelegate for AttachModalDelegate {
_window: &mut Window,
_: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
let candidates = self.candidates.as_ref()?;
let hit = &self.matches[ix];
let candidate = self.candidates.get(hit.candidate_id)?;
let candidate = &candidates.get(hit.candidate_id)?;
Some(
ListItem::new(SharedString::from(format!("process-entry-{ix}")))
@@ -305,8 +279,9 @@ impl PickerDelegate for AttachModalDelegate {
}
}
#[allow(dead_code)]
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn _process_names(modal: &AttachModal, cx: &mut Context<AttachModal>) -> Vec<String> {
pub(crate) fn process_names(modal: &AttachModal, cx: &mut Context<AttachModal>) -> Vec<String> {
modal.picker.update(cx, |picker, _| {
picker
.delegate

View File

@@ -3,8 +3,8 @@ use anyhow::{anyhow, Result};
use collections::HashMap;
use command_palette_hooks::CommandPaletteFilter;
use dap::{
client::SessionId, debugger_settings::DebuggerSettings, ContinuedEvent, LoadedSourceEvent,
ModuleEvent, OutputEvent, StoppedEvent, ThreadEvent,
client::SessionId, debugger_settings::DebuggerSettings, ContinuedEvent, DebugAdapterConfig,
LoadedSourceEvent, ModuleEvent, OutputEvent, StoppedEvent, ThreadEvent,
};
use futures::{channel::mpsc, SinkExt as _};
use gpui::{
@@ -19,7 +19,6 @@ use project::{
use rpc::proto::{self};
use settings::Settings;
use std::{any::TypeId, path::PathBuf};
use task::DebugTaskDefinition;
use terminal_view::terminal_panel::TerminalPanel;
use ui::prelude::*;
use util::ResultExt;
@@ -53,7 +52,7 @@ pub struct DebugPanel {
project: WeakEntity<Project>,
workspace: WeakEntity<Workspace>,
_subscriptions: Vec<Subscription>,
pub(crate) last_inert_config: Option<DebugTaskDefinition>,
pub(crate) last_inert_config: Option<DebugAdapterConfig>,
}
impl DebugPanel {

View File

@@ -6,6 +6,7 @@ mod starting;
use std::time::Duration;
use dap::client::SessionId;
use dap::DebugAdapterConfig;
use failed::FailedState;
use gpui::{
percentage, Animation, AnimationExt, AnyElement, App, Entity, EventEmitter, FocusHandle,
@@ -18,7 +19,6 @@ use project::Project;
use rpc::proto::{self, PeerId};
use running::RunningState;
use starting::{StartingEvent, StartingState};
use task::DebugTaskDefinition;
use ui::{prelude::*, Indicator};
use util::ResultExt;
use workspace::{
@@ -73,7 +73,7 @@ impl DebugSession {
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
debug_panel: WeakEntity<DebugPanel>,
config: Option<DebugTaskDefinition>,
config: Option<DebugAdapterConfig>,
window: &mut Window,
cx: &mut App,
) -> Entity<Self> {
@@ -171,7 +171,7 @@ impl DebugSession {
.flatten()
.expect("worktree-less project");
let Ok((new_session_id, task)) = dap_store.update(cx, |store, cx| {
store.new_session(config.into(), &worktree, None, cx)
store.new_session(config, &worktree, None, cx)
}) else {
return;
};

View File

@@ -1,10 +1,10 @@
use std::path::PathBuf;
use dap::DebugRequestType;
use dap::{DebugAdapterConfig, DebugAdapterKind, DebugRequestType};
use editor::{Editor, EditorElement, EditorStyle};
use gpui::{App, AppContext, Entity, EventEmitter, FocusHandle, Focusable, TextStyle, WeakEntity};
use settings::Settings as _;
use task::{DebugTaskDefinition, LaunchConfig, TCPHost};
use task::TCPHost;
use theme::ThemeSettings;
use ui::{
div, h_flex, relative, v_flex, ActiveTheme as _, ButtonCommon, ButtonLike, Clickable, Context,
@@ -35,7 +35,7 @@ impl SpawnMode {
impl From<DebugRequestType> for SpawnMode {
fn from(request: DebugRequestType) -> Self {
match request {
DebugRequestType::Launch(_) => SpawnMode::Launch,
DebugRequestType::Launch => SpawnMode::Launch,
DebugRequestType::Attach(_) => SpawnMode::Attach,
}
}
@@ -55,13 +55,18 @@ impl InertState {
pub(super) fn new(
workspace: WeakEntity<Workspace>,
default_cwd: &str,
debug_config: Option<DebugTaskDefinition>,
debug_config: Option<DebugAdapterConfig>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let selected_debugger = debug_config
.as_ref()
.map(|config| SharedString::from(config.adapter.clone()));
let selected_debugger = debug_config.as_ref().and_then(|config| match config.kind {
DebugAdapterKind::Lldb => Some("LLDB".into()),
DebugAdapterKind::Go(_) => Some("Delve".into()),
DebugAdapterKind::Php(_) => Some("PHP".into()),
DebugAdapterKind::Javascript(_) => Some("JavaScript".into()),
DebugAdapterKind::Python(_) => Some("Debugpy".into()),
_ => None,
});
let spawn_mode = debug_config
.as_ref()
@@ -70,10 +75,7 @@ impl InertState {
let program = debug_config
.as_ref()
.and_then(|config| match &config.request {
DebugRequestType::Attach(_) => None,
DebugRequestType::Launch(launch_config) => Some(launch_config.program.clone()),
});
.and_then(|config| config.program.to_owned());
let program_editor = cx.new(|cx| {
let mut editor = Editor::single_line(window, cx);
@@ -86,10 +88,7 @@ impl InertState {
});
let cwd = debug_config
.and_then(|config| match &config.request {
DebugRequestType::Attach(_) => None,
DebugRequestType::Launch(launch_config) => launch_config.cwd.clone(),
})
.and_then(|config| config.cwd.map(|cwd| cwd.to_owned()))
.unwrap_or_else(|| PathBuf::from(default_cwd));
let cwd_editor = cx.new(|cx| {
@@ -117,7 +116,7 @@ impl Focusable for InertState {
}
pub(crate) enum InertEvent {
Spawned { config: DebugTaskDefinition },
Spawned { config: DebugAdapterConfig },
}
impl EventEmitter<InertEvent> for InertState {}
@@ -131,7 +130,6 @@ impl Render for InertState {
cx: &mut ui::Context<'_, Self>,
) -> impl ui::IntoElement {
let weak = cx.weak_entity();
let workspace = self.workspace.clone();
let disable_buttons = self.selected_debugger.is_none();
let spawn_button = ButtonLike::new_rounded_left("spawn-debug-session")
.child(Label::new(self.spawn_mode.label()).size(LabelSize::Small))
@@ -139,26 +137,21 @@ impl Render for InertState {
if this.spawn_mode == SpawnMode::Launch {
let program = this.program_editor.read(cx).text(cx);
let cwd = PathBuf::from(this.cwd_editor.read(cx).text(cx));
let kind = this
.selected_debugger
.as_deref()
.unwrap_or_else(|| {
let kind =
kind_for_label(this.selected_debugger.as_deref().unwrap_or_else(|| {
unimplemented!(
"Automatic selection of a debugger based on users project"
)
})
.to_string();
}));
cx.emit(InertEvent::Spawned {
config: DebugTaskDefinition {
config: DebugAdapterConfig {
label: "hard coded".into(),
adapter: kind,
request: DebugRequestType::Launch(LaunchConfig {
program,
cwd: Some(cwd),
}),
tcp_connection: Some(TCPHost::default()),
kind,
request: DebugRequestType::Launch,
program: Some(program),
cwd: Some(cwd),
initialize_args: None,
supports_attach: false,
},
});
} else {
@@ -166,7 +159,6 @@ impl Render for InertState {
}
}))
.disabled(disable_buttons);
v_flex()
.track_focus(&self.focus_handle)
.size_full()
@@ -187,36 +179,28 @@ impl Render for InertState {
.as_ref()
.unwrap_or_else(|| &SELECT_DEBUGGER_LABEL)
.clone(),
ContextMenu::build(window, cx, move |mut this, _, cx| {
let setter_for_name = |name: SharedString| {
ContextMenu::build(window, cx, move |this, _, _| {
let setter_for_name = |name: &'static str| {
let weak = weak.clone();
move |_: &mut Window, cx: &mut App| {
let name = name.clone();
weak.update(cx, move |this, cx| {
this.selected_debugger = Some(name.clone());
cx.notify();
})
.ok();
let name = name;
(&weak)
.update(cx, move |this, _| {
this.selected_debugger = Some(name.into());
})
.ok();
}
};
let available_adapters = workspace
.update(cx, |this, cx| {
this.project()
.read(cx)
.debug_adapters()
.enumerate_adapters()
})
.ok()
.unwrap_or_default();
for adapter in available_adapters {
this = this.entry(
adapter.0.clone(),
this.entry("GDB", None, setter_for_name("GDB"))
.entry("Delve", None, setter_for_name("Delve"))
.entry("LLDB", None, setter_for_name("LLDB"))
.entry("PHP", None, setter_for_name("PHP"))
.entry(
"JavaScript",
None,
setter_for_name(adapter.0.clone()),
);
}
this
setter_for_name("JavaScript"),
)
.entry("Debugpy", None, setter_for_name("Debugpy"))
}),
)),
),
@@ -281,6 +265,18 @@ impl Render for InertState {
}
}
fn kind_for_label(label: &str) -> DebugAdapterKind {
match label {
"LLDB" => DebugAdapterKind::Lldb,
"Debugpy" => DebugAdapterKind::Python(TCPHost::default()),
"JavaScript" => DebugAdapterKind::Javascript(TCPHost::default()),
"PHP" => DebugAdapterKind::Php(TCPHost::default()),
"Delve" => DebugAdapterKind::Go(TCPHost::default()),
_ => {
unimplemented!()
} // Maybe we should set a toast notification here
}
}
impl InertState {
fn render_editor(editor: &Entity<Editor>, cx: &Context<Self>) -> impl IntoElement {
let settings = ThemeSettings::get_global(cx);
@@ -306,20 +302,19 @@ impl InertState {
}
fn attach(&self, window: &mut Window, cx: &mut Context<Self>) {
let kind = self
.selected_debugger
.as_deref()
.map(|s| s.to_string())
.unwrap_or_else(|| {
unimplemented!("Automatic selection of a debugger based on users project")
});
let cwd = PathBuf::from(self.cwd_editor.read(cx).text(cx));
let kind = kind_for_label(self.selected_debugger.as_deref().unwrap_or_else(|| {
unimplemented!("Automatic selection of a debugger based on users project")
}));
let config = DebugTaskDefinition {
let config = DebugAdapterConfig {
label: "hard coded attach".into(),
adapter: kind,
kind,
request: DebugRequestType::Attach(task::AttachConfig { process_id: None }),
program: None,
cwd: Some(cwd),
initialize_args: None,
tcp_connection: Some(TCPHost::default()),
supports_attach: true,
};
let _ = self.workspace.update(cx, |workspace, cx| {

View File

@@ -1,11 +1,11 @@
use crate::{attach_modal::Candidate, *};
use crate::*;
use attach_modal::AttachModal;
use dap::{client::SessionId, FakeAdapter};
use dap::client::SessionId;
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
use menu::Confirm;
use project::{FakeFs, Project};
use serde_json::json;
use task::{AttachConfig, DebugTaskDefinition, TCPHost};
use task::AttachConfig;
use tests::{init_test, init_test_workspace};
#[gpui::test]
@@ -27,12 +27,14 @@ async fn test_direct_attach_to_process(executor: BackgroundExecutor, cx: &mut Te
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Attach(AttachConfig {
process_id: Some(10),
}),
None,
false,
project.start_debug_session(
dap::test_config(
dap::DebugRequestType::Attach(AttachConfig {
process_id: Some(10),
}),
None,
None,
),
cx,
)
});
@@ -81,32 +83,13 @@ async fn test_show_attach_modal_and_select_process(
let attach_modal = workspace
.update(cx, |workspace, window, cx| {
workspace.toggle_modal(window, cx, |window, cx| {
AttachModal::with_processes(
AttachModal::new(
project.clone(),
DebugTaskDefinition {
adapter: FakeAdapter::ADAPTER_NAME.into(),
request: dap::DebugRequestType::Attach(AttachConfig::default()),
label: "attach example".into(),
initialize_args: None,
tcp_connection: Some(TCPHost::default()),
},
vec![
Candidate {
pid: 0,
name: "fake-binary-1".into(),
command: vec![],
},
Candidate {
pid: 3,
name: "non-fake-binary-1".into(),
command: vec![],
},
Candidate {
pid: 1,
name: "fake-binary-2".into(),
command: vec![],
},
],
dap::test_config(
dap::DebugRequestType::Attach(AttachConfig { process_id: None }),
None,
None,
),
window,
cx,
)
@@ -122,10 +105,10 @@ async fn test_show_attach_modal_and_select_process(
workspace
.update(cx, |_, _, cx| {
let names =
attach_modal.update(cx, |modal, cx| attach_modal::_process_names(&modal, cx));
attach_modal.update(cx, |modal, cx| attach_modal::process_names(&modal, cx));
// we filtered out all processes that are not starting with `fake-binary`
assert_eq!(2, names.len());
// we filtered out all processes that are not the current process(zed itself)
assert_eq!(1, names.len());
})
.unwrap();

View File

@@ -3,7 +3,6 @@ use dap::requests::StackTrace;
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
use project::{FakeFs, Project};
use serde_json::json;
use task::LaunchConfig;
use tests::{init_test, init_test_workspace};
#[gpui::test]
@@ -30,10 +29,8 @@ async fn test_handle_output_event(executor: BackgroundExecutor, cx: &mut TestApp
.unwrap();
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});

View File

@@ -5,8 +5,8 @@ use dap::{
Continue, Disconnect, Launch, Next, RunInTerminal, SetBreakpoints, StackTrace,
StartDebugging, StepBack, StepIn, StepOut, Threads,
},
ErrorResponse, RunInTerminalRequestArguments, SourceBreakpoint, StartDebuggingRequestArguments,
StartDebuggingRequestArgumentsRequest,
DebugRequestType, ErrorResponse, RunInTerminalRequestArguments, SourceBreakpoint,
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
};
use editor::{
actions::{self},
@@ -25,7 +25,6 @@ use std::{
Arc,
},
};
use task::LaunchConfig;
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
use tests::{active_debug_session_panel, init_test, init_test_workspace};
use util::path;
@@ -50,12 +49,7 @@ async fn test_basic_show_debug_panel(executor: BackgroundExecutor, cx: &mut Test
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -207,12 +201,7 @@ async fn test_we_can_only_have_one_panel_per_debug_session(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -396,12 +385,7 @@ async fn test_handle_successful_run_in_terminal_reverse_request(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -491,12 +475,7 @@ async fn test_handle_error_run_in_terminal_reverse_request(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -576,12 +555,7 @@ async fn test_handle_start_debugging_reverse_request(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -694,12 +668,7 @@ async fn test_shutdown_children_when_parent_session_shutdown(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let parent_session = task.await.unwrap();
@@ -807,12 +776,7 @@ async fn test_shutdown_parent_session_if_all_children_are_shutdown(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let parent_session = task.await.unwrap();
@@ -927,13 +891,15 @@ async fn test_debug_panel_item_thread_status_reset_on_failure(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
Some(dap::Capabilities {
supports_step_back: Some(true),
..Default::default()
}),
false,
project.start_debug_session(
dap::test_config(
DebugRequestType::Launch,
None,
Some(dap::Capabilities {
supports_step_back: Some(true),
..Default::default()
}),
),
cx,
)
});
@@ -1156,12 +1122,7 @@ async fn test_send_breakpoints_when_editor_has_been_saved(
.unwrap();
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -1386,12 +1347,7 @@ async fn test_unsetting_breakpoints_on_clear_breakpoint_action(
});
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
cx,
)
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
});
let session = task.await.unwrap();
@@ -1463,10 +1419,8 @@ async fn test_debug_session_is_shutdown_when_attach_and_launch_request_fails(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
true,
project.start_debug_session(
dap::test_config(DebugRequestType::Launch, Some(true), None),
cx,
)
});

View File

@@ -5,7 +5,7 @@ use crate::{
};
use dap::{
requests::{Modules, StackTrace, Threads},
StoppedEvent,
DebugRequestType, StoppedEvent,
};
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
use project::{FakeFs, Project};
@@ -13,7 +13,6 @@ use std::sync::{
atomic::{AtomicBool, AtomicI32, Ordering},
Arc,
};
use task::LaunchConfig;
#[gpui::test]
async fn test_module_list(executor: BackgroundExecutor, cx: &mut TestAppContext) {
@@ -31,13 +30,15 @@ async fn test_module_list(executor: BackgroundExecutor, cx: &mut TestAppContext)
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
Some(dap::Capabilities {
supports_modules_request: Some(true),
..Default::default()
}),
false,
project.start_debug_session(
dap::test_config(
DebugRequestType::Launch,
None,
Some(dap::Capabilities {
supports_modules_request: Some(true),
..Default::default()
}),
),
cx,
)
});

View File

@@ -12,7 +12,6 @@ use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
use project::{FakeFs, Project};
use serde_json::json;
use std::sync::Arc;
use task::LaunchConfig;
use unindent::Unindent as _;
use util::path;
@@ -53,10 +52,8 @@ async fn test_fetch_initial_stack_frames_and_go_to_stack_frame(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});
@@ -243,10 +240,8 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});
@@ -518,10 +513,8 @@ async fn test_collapsed_entries(executor: BackgroundExecutor, cx: &mut TestAppCo
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});

View File

@@ -17,7 +17,6 @@ use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
use menu::{SelectFirst, SelectNext, SelectPrevious};
use project::{FakeFs, Project};
use serde_json::json;
use task::LaunchConfig;
use unindent::Unindent as _;
use util::path;
@@ -57,10 +56,8 @@ async fn test_basic_fetch_initial_scope_and_variables(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});
@@ -286,10 +283,8 @@ async fn test_fetch_variables_for_multiple_scopes(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});
@@ -567,10 +562,8 @@ async fn test_keyboard_navigation(executor: BackgroundExecutor, cx: &mut TestApp
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});
@@ -1369,10 +1362,8 @@ async fn test_variable_list_only_sends_requests_when_rendering(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});
@@ -1648,10 +1639,8 @@ async fn test_it_fetches_scopes_variables_when_you_select_a_stack_frame(
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let task = project.update(cx, |project, cx| {
project.fake_debug_session(
dap::DebugRequestType::Launch(LaunchConfig::default()),
None,
false,
project.start_debug_session(
dap::test_config(dap::DebugRequestType::Launch, None, None),
cx,
)
});

View File

@@ -20,8 +20,8 @@ use crate::hover_popover::hover_markdown_style;
#[derive(Clone, Debug)]
pub struct CommitDetails {
pub sha: SharedString,
pub author_name: SharedString,
pub author_email: SharedString,
pub committer_name: SharedString,
pub committer_email: SharedString,
pub commit_time: OffsetDateTime,
pub message: Option<ParsedCommitMessage>,
}
@@ -133,12 +133,16 @@ impl CommitTooltip {
CommitDetails {
sha: blame.sha.to_string().into(),
commit_time,
author_name: blame
.author
committer_name: blame
.committer_name
.clone()
.unwrap_or("<no name>".to_string())
.into(),
author_email: blame.author_mail.clone().unwrap_or("".to_string()).into(),
committer_email: blame
.committer_email
.clone()
.unwrap_or("".to_string())
.into(),
message: details,
},
window,
@@ -176,9 +180,9 @@ impl Render for CommitTooltip {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let avatar = CommitAvatar::new(&self.commit).render(window, cx);
let author = self.commit.author_name.clone();
let author = self.commit.committer_name.clone();
let author_email = self.commit.author_email.clone();
let author_email = self.commit.committer_email.clone();
let short_commit_id = self
.commit

View File

@@ -1338,8 +1338,7 @@ impl EditorElement {
fn layout_scrollbars(
&self,
snapshot: &EditorSnapshot,
scrollbar_layout_information: ScrollbarLayoutInformation,
content_offset: gpui::Point<Pixels>,
scrollbar_range_data: ScrollbarLayoutInformation,
scroll_position: gpui::Point<f32>,
non_visible_cursors: bool,
window: &mut Window,
@@ -1391,8 +1390,7 @@ impl EditorElement {
Some(EditorScrollbars::from_scrollbar_axes(
scrollbar_settings.axes,
&scrollbar_layout_information,
content_offset,
&scrollbar_range_data,
scroll_position,
self.style.scrollbar_width,
show_scrollbars,
@@ -6615,13 +6613,13 @@ impl Element for EditorElement {
// Offset the content_bounds from the text_bounds by the gutter margin (which
// is roughly half a character wide) to make hit testing work more like how we want.
let content_offset = point(gutter_dimensions.margin, Pixels::ZERO);
let content_origin = text_hitbox.origin + content_offset;
let content_origin =
text_hitbox.origin + point(gutter_dimensions.margin, Pixels::ZERO);
let editor_text_bounds =
let scrollbar_bounds =
Bounds::from_corners(content_origin, bounds.bottom_right());
let height_in_lines = editor_text_bounds.size.height / line_height;
let height_in_lines = scrollbar_bounds.size.height / line_height;
let max_row = snapshot.max_point().row().as_f32();
@@ -6949,7 +6947,7 @@ impl Element for EditorElement {
.width;
let scrollbar_layout_information = ScrollbarLayoutInformation::new(
text_hitbox.bounds,
scrollbar_bounds,
glyph_grid_cell,
size(longest_line_width, max_row.as_f32() * line_height),
longest_line_blame_width,
@@ -7021,7 +7019,7 @@ impl Element for EditorElement {
MultiBufferRow(end_anchor.to_point(&snapshot.buffer_snapshot).row);
let scroll_max = point(
((scroll_width - editor_text_bounds.size.width) / em_width).max(0.0),
((scroll_width - scrollbar_bounds.size.width) / em_width).max(0.0),
max_scroll_top,
);
@@ -7227,7 +7225,6 @@ impl Element for EditorElement {
let scrollbars_layout = self.layout_scrollbars(
&snapshot,
scrollbar_layout_information,
content_offset,
scroll_position,
non_visible_cursors,
window,
@@ -7597,8 +7594,8 @@ pub(super) fn gutter_bounds(
/// Holds information required for layouting the editor scrollbars.
struct ScrollbarLayoutInformation {
/// The bounds of the editor area (excluding the content offset).
editor_bounds: Bounds<Pixels>,
/// The bounds of the editor text area.
editor_text_bounds: Bounds<Pixels>,
/// The available range to scroll within the document.
scroll_range: Size<Pixels>,
/// The space available for one glyph in the editor.
@@ -7607,7 +7604,7 @@ struct ScrollbarLayoutInformation {
impl ScrollbarLayoutInformation {
pub fn new(
editor_bounds: Bounds<Pixels>,
scrollbar_bounds: Bounds<Pixels>,
glyph_grid_cell: Size<Pixels>,
document_size: Size<Pixels>,
longest_line_blame_width: Pixels,
@@ -7616,7 +7613,7 @@ impl ScrollbarLayoutInformation {
settings: &EditorSettings,
) -> Self {
let vertical_overscroll = match settings.scroll_beyond_last_line {
ScrollBeyondLastLine::OnePage => editor_bounds.size.height,
ScrollBeyondLastLine::OnePage => scrollbar_bounds.size.height,
ScrollBeyondLastLine::Off => glyph_grid_cell.height,
ScrollBeyondLastLine::VerticalScrollMargin => {
(1.0 + settings.vertical_scroll_margin) * glyph_grid_cell.height
@@ -7634,7 +7631,7 @@ impl ScrollbarLayoutInformation {
let scroll_range = document_size + overscroll;
ScrollbarLayoutInformation {
editor_bounds,
editor_text_bounds: scrollbar_bounds,
scroll_range,
glyph_grid_cell,
}
@@ -7740,14 +7737,13 @@ impl EditorScrollbars {
pub fn from_scrollbar_axes(
settings_visibility: ScrollbarAxes,
layout_information: &ScrollbarLayoutInformation,
content_offset: gpui::Point<Pixels>,
scroll_position: gpui::Point<f32>,
scrollbar_width: Pixels,
show_scrollbars: bool,
window: &mut Window,
) -> Self {
let ScrollbarLayoutInformation {
editor_bounds,
editor_text_bounds,
scroll_range,
glyph_grid_cell,
} = layout_information;
@@ -7755,20 +7751,20 @@ impl EditorScrollbars {
let scrollbar_bounds_for = |axis: ScrollbarAxis| match axis {
ScrollbarAxis::Horizontal => Bounds::from_corner_and_size(
Corner::BottomLeft,
editor_bounds.bottom_left(),
editor_text_bounds.bottom_left(),
size(
if settings_visibility.vertical {
editor_bounds.size.width - scrollbar_width
editor_text_bounds.size.width - scrollbar_width
} else {
editor_bounds.size.width
editor_text_bounds.size.width
},
scrollbar_width,
),
),
ScrollbarAxis::Vertical => Bounds::from_corner_and_size(
Corner::TopRight,
editor_bounds.top_right(),
size(scrollbar_width, editor_bounds.size.height),
editor_text_bounds.top_right(),
size(scrollbar_width, editor_text_bounds.size.height),
),
};
@@ -7777,24 +7773,23 @@ impl EditorScrollbars {
.along(axis)
.then(|| {
(
editor_bounds.size.along(axis) - content_offset.along(axis),
editor_text_bounds.size.along(axis),
scroll_range.along(axis),
)
})
.filter(|(editor_content_size, scroll_range)| {
.filter(|(editor_size, scroll_range)| {
// The scrollbar should only be rendered if the content does
// not entirely fit into the editor
// However, this only applies to the horizontal scrollbar, as information about the
// vertical scrollbar layout is always needed for scrollbar diagnostics.
axis != ScrollbarAxis::Horizontal || editor_content_size < scroll_range
axis != ScrollbarAxis::Horizontal || editor_size < scroll_range
})
.map(|(editor_content_size, scroll_range)| {
.map(|(editor_size, scroll_range)| {
ScrollbarLayout::new(
window.insert_hitbox(scrollbar_bounds_for(axis), false),
editor_content_size,
editor_size,
scroll_range,
glyph_grid_cell.along(axis),
content_offset.along(axis),
scroll_position.along(axis),
axis,
)
@@ -7829,7 +7824,6 @@ struct ScrollbarLayout {
hitbox: Hitbox,
visible_range: Range<f32>,
text_unit_size: Pixels,
content_offset: Pixels,
thumb_size: Pixels,
axis: ScrollbarAxis,
}
@@ -7842,34 +7836,30 @@ impl ScrollbarLayout {
fn new(
scrollbar_track_hitbox: Hitbox,
editor_content_size: Pixels,
editor_size: Pixels,
scroll_range: Pixels,
glyph_space: Pixels,
content_offset: Pixels,
scroll_position: f32,
axis: ScrollbarAxis,
) -> Self {
let track_bounds = scrollbar_track_hitbox.bounds;
// The length of the track available to the scrollbar thumb. We deliberately
// exclude the content size here so that the thumb aligns with the content.
let track_length = track_bounds.size.along(axis) - content_offset;
let scrollbar_track_bounds = scrollbar_track_hitbox.bounds;
let scrollbar_track_length = scrollbar_track_bounds.size.along(axis);
let text_units_per_page = editor_content_size / glyph_space;
let text_units_per_page = editor_size / glyph_space;
let visible_range = scroll_position..scroll_position + text_units_per_page;
let total_text_units = scroll_range / glyph_space;
let thumb_percentage = text_units_per_page / total_text_units;
let thumb_size = (track_length * thumb_percentage)
let thumb_size = (scrollbar_track_length * thumb_percentage)
.max(ScrollbarLayout::MIN_THUMB_SIZE)
.min(track_length);
let text_unit_size =
(track_length - thumb_size) / (total_text_units - text_units_per_page).max(0.);
.min(scrollbar_track_length);
let text_unit_size = (scrollbar_track_length - thumb_size)
/ (total_text_units - text_units_per_page).max(0.);
ScrollbarLayout {
hitbox: scrollbar_track_hitbox,
visible_range,
text_unit_size,
content_offset,
thumb_size,
axis,
}
@@ -7888,7 +7878,7 @@ impl ScrollbarLayout {
}
fn thumb_origin(&self, origin: Pixels) -> Pixels {
origin + self.content_offset + self.visible_range.start * self.text_unit_size
origin + self.visible_range.start * self.text_unit_size
}
fn marker_quads_for_ranges(

View File

@@ -240,7 +240,7 @@ impl EditorTestContext {
// unlike cx.simulate_keystrokes(), this does not run_until_parked
// so you can use it to test detailed timing
pub fn simulate_keystroke(&mut self, keystroke_text: &str) {
let keystroke = Keystroke::parse_case_insensitive(keystroke_text).unwrap();
let keystroke = Keystroke::parse(keystroke_text).unwrap();
self.cx.dispatch_keystroke(self.window, keystroke);
}

View File

@@ -19,7 +19,6 @@ clap.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
dap.workspace = true
env_logger.workspace = true
feature_flags.workspace = true
fs.workspace = true

View File

@@ -4,7 +4,6 @@ use clap::Parser;
use client::{Client, UserStore};
use clock::RealSystemClock;
use collections::BTreeMap;
use dap::DapRegistry;
use feature_flags::FeatureFlagAppExt as _;
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Entity};
use http_client::{HttpClient, Method};
@@ -303,7 +302,6 @@ async fn run_evaluation(
));
let language_registry = Arc::new(LanguageRegistry::new(executor.clone()));
let debug_adapters = Arc::new(DapRegistry::default());
cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx))
.unwrap();
@@ -348,7 +346,6 @@ async fn run_evaluation(
node_runtime.clone(),
user_store.clone(),
language_registry.clone(),
debug_adapters.clone(),
fs.clone(),
None,
cx,

View File

@@ -837,19 +837,6 @@ impl ExtensionStore {
}
});
fs.remove_dir(
&extension_dir,
RemoveOptions {
recursive: true,
ignore_if_not_exists: true,
},
)
.await?;
// todo(windows)
// Stop the server here.
this.update(cx, |this, cx| this.reload(None, cx))?.await;
fs.remove_dir(
&work_dir,
RemoveOptions {
@@ -859,6 +846,16 @@ impl ExtensionStore {
)
.await?;
fs.remove_dir(
&extension_dir,
RemoveOptions {
recursive: true,
ignore_if_not_exists: true,
},
)
.await?;
this.update(cx, |this, cx| this.reload(None, cx))?.await;
anyhow::Ok(())
})
.detach_and_log_err(cx)

View File

@@ -64,6 +64,17 @@ impl FeatureFlag for PredictEditsRateCompletionsFeatureFlag {
const NAME: &'static str = "predict-edits-rate-completions";
}
/// A feature flag that controls whether "non eager mode" (holding `alt` to preview) is publicized.
pub struct PredictEditsNonEagerModeFeatureFlag;
impl FeatureFlag for PredictEditsNonEagerModeFeatureFlag {
const NAME: &'static str = "predict-edits-non-eager-mode";
fn enabled_for_staff() -> bool {
// Don't show to staff so it doesn't leak into media for the launch.
false
}
}
pub struct Remoting {}
impl FeatureFlag for Remoting {
const NAME: &'static str = "remoting";
@@ -90,6 +101,15 @@ impl FeatureFlag for NotebookFeatureFlag {
const NAME: &'static str = "notebooks";
}
pub struct AutoCommand {}
impl FeatureFlag for AutoCommand {
const NAME: &'static str = "auto-command";
fn enabled_for_staff() -> bool {
false
}
}
pub struct Debugger {}
impl FeatureFlag for Debugger {
const NAME: &'static str = "debugger";

View File

@@ -953,28 +953,38 @@ impl FileFinderDelegate {
let path = &path_match.path;
let path_string = path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let mut path_positions = path_match.positions.clone();
let positions = path_match.positions.clone();
let file_name = path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
// Calculate where the filename starts in the full path
let file_name_start = path_match.path_prefix.len() + path_string.len() - file_name.len();
let file_name_positions = path_positions
.iter()
.filter_map(|pos| {
if pos >= &file_name_start {
Some(pos - file_name_start)
} else {
None
}
})
.collect();
// Create a copy of the full path without the filename (this is the parent directory)
let parent_path = full_path[..full_path.len() - file_name.len()].to_string();
// Process each highlight position
let mut file_name_positions = Vec::new();
let mut parent_path_positions = Vec::new();
for &pos in &positions {
// For the filename part
if pos >= file_name_start && pos < full_path.len() {
// This position is in the filename part
file_name_positions.push(pos - file_name_start);
}
// For the parent path part
if pos < parent_path.len() {
// This position is in the parent path part
parent_path_positions.push(pos);
}
}
let full_path = full_path.trim_end_matches(&file_name).to_string();
path_positions.retain(|idx| *idx < full_path.len());
(file_name, file_name_positions, full_path, path_positions)
(file_name, file_name_positions, parent_path, parent_path_positions)
}
fn lookup_absolute_path(
@@ -1339,7 +1349,119 @@ impl PickerDelegate for FileFinderDelegate {
.size(IconSize::Small.rems())
.into_any_element(),
};
let (file_name_label, full_path_label) = self.labels_for_match(path_match, window, cx, ix);
// Get the path information
let path_info = match &path_match {
Match::History {
path: entry_path,
panel_match,
} => {
let worktree_id = entry_path.project.worktree_id;
let project_relative_path = &entry_path.project.path;
let has_worktree = self
.project
.read(cx)
.worktree_for_id(worktree_id, cx)
.is_some();
// Use window to avoid unused variable warning
let _ = window;
if let Some(absolute_path) =
entry_path.absolute.as_ref().filter(|_| !has_worktree)
{
(
absolute_path
.file_name()
.map_or_else(
|| project_relative_path.to_string_lossy(),
|file_name| file_name.to_string_lossy(),
)
.to_string(),
absolute_path.to_string_lossy().to_string(),
Vec::new(),
)
} else {
let mut path = Arc::clone(project_relative_path);
if project_relative_path.as_ref() == Path::new("") {
if let Some(absolute_path) = &entry_path.absolute {
path = Arc::from(absolute_path.as_path());
}
}
let mut path_match = PathMatch {
score: ix as f64,
positions: Vec::new(),
worktree_id: worktree_id.to_usize(),
path,
is_dir: false, // File finder doesn't support directories
path_prefix: "".into(),
distance_to_relative_ancestor: usize::MAX,
};
if let Some(found_path_match) = &panel_match {
path_match
.positions
.extend(found_path_match.0.positions.iter())
}
let path_string = path_match.path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let positions = path_match.positions.clone();
let file_name = path_match.path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
(file_name, full_path, positions)
}
}
Match::Search(path_match) => {
let path_string = path_match.0.path.to_string_lossy();
let full_path = [path_match.0.path_prefix.as_ref(), path_string.as_ref()].join("");
let positions = path_match.0.positions.clone();
let file_name = path_match.0.path.file_name().map_or_else(
|| path_match.0.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
(file_name, full_path, positions)
}
};
let (file_name, full_path, positions) = path_info;
// Calculate where the filename starts in the full path
let file_name_start = full_path.len() - file_name.len();
// Create a parent path
let parent_path = full_path[..file_name_start].to_string();
// Create parent path label with highlighting
let parent_highlight_positions: Vec<usize> = positions
.iter()
.filter(|&&pos| pos < parent_path.len())
.copied()
.collect();
let parent_path_label = HighlightedLabel::new(parent_path, parent_highlight_positions)
.size(LabelSize::Small)
.color(Color::Muted);
// Create filename label with highlighting
let file_highlight_positions: Vec<usize> = positions
.iter()
.filter_map(|&pos| {
if pos >= file_name_start {
Some(pos - file_name_start)
} else {
None
}
})
.collect();
let file_name_label = HighlightedLabel::new(file_name.clone(), file_highlight_positions);
let file_icon = maybe!({
if !settings.file_icons {
@@ -1362,7 +1484,7 @@ impl PickerDelegate for FileFinderDelegate {
.gap_2()
.py_px()
.child(file_name_label)
.child(full_path_label),
.child(parent_path_label),
),
)
}

View File

@@ -3951,8 +3951,8 @@ impl GitPanelMessageTooltip {
let commit_details = editor::commit_tooltip::CommitDetails {
sha: details.sha.clone(),
author_name: details.committer_name.clone(),
author_email: details.committer_email.clone(),
committer_name: details.committer_name.clone(),
committer_email: details.committer_email.clone(),
commit_time: OffsetDateTime::from_unix_timestamp(details.commit_timestamp)?,
message: Some(editor::commit_tooltip::ParsedCommitMessage {
message: details.message.clone(),

View File

@@ -399,7 +399,7 @@ impl TestAppContext {
pub fn simulate_keystrokes(&mut self, window: AnyWindowHandle, keystrokes: &str) {
for keystroke in keystrokes
.split(' ')
.map(Keystroke::parse_case_insensitive)
.map(Keystroke::parse)
.map(Result::unwrap)
{
self.dispatch_keystroke(window, keystroke);
@@ -413,11 +413,7 @@ impl TestAppContext {
/// will type abc into your current editor
/// This will also run the background executor until it's parked.
pub fn simulate_input(&mut self, window: AnyWindowHandle, input: &str) {
for keystroke in input
.split("")
.map(Keystroke::parse_case_insensitive)
.map(Result::unwrap)
{
for keystroke in input.split("").map(Keystroke::parse).map(Result::unwrap) {
self.dispatch_keystroke(window, keystroke);
}

View File

@@ -42,9 +42,9 @@ impl Display for InvalidKeystrokeError {
}
/// Sentence explaining what keystroke parser expects, starting with "Expected ..."
pub const KEYSTROKE_PARSE_EXPECTED_MESSAGE: &str = "Expected a sequence of lowercase modifiers \
pub const KEYSTROKE_PARSE_EXPECTED_MESSAGE: &str = "Expected a sequence of modifiers \
(`ctrl`, `alt`, `shift`, `fn`, `cmd`, `super`, or `win`) \
followed by a lowercase key, separated by `-`.";
followed by a key, separated by `-`.";
impl Keystroke {
/// When matching a key we cannot know whether the user intended to type
@@ -81,28 +81,6 @@ impl Keystroke {
/// secondary means "cmd" on macOS and "ctrl" on other platforms
/// when matching a key with an key_char set will be matched without it.
pub fn parse(source: &str) -> std::result::Result<Self, InvalidKeystrokeError> {
return Self::parse_impl(source, true);
}
/// Parse a keystroke case-insensitively. This means
/// keystrokes like `ctrl-T` will not be rejected.
/// Useful in tests to allow more concise keystroke inputs,
/// e.g., `simulate_keystrokes("ctrl-T")` instead of `simulate_keystrokes("ctrl-shift-t")`.
/// This also allows `simulate_input` style functions to support capital letters,
/// e.g., `simulate_input("Title Case")` can work by just parsing each character as a keystroke
/// and dispatching it, instead of needing to parse something like
/// `simulate_input("shift-title shift-case")`.
#[cfg(any(test, feature = "test-support"))]
pub fn parse_case_insensitive(
source: &str,
) -> std::result::Result<Self, InvalidKeystrokeError> {
return Self::parse_impl(source, false);
}
fn parse_impl(
source: &str,
case_sensitive: bool,
) -> std::result::Result<Self, InvalidKeystrokeError> {
let mut control = false;
let mut alt = false;
let mut shift = false;
@@ -113,74 +91,38 @@ impl Keystroke {
let mut components = source.split('-').peekable();
while let Some(component) = components.next() {
if component.eq_ignore_ascii_case("ctrl") {
control = true;
continue;
}
if component.eq_ignore_ascii_case("alt") {
alt = true;
continue;
}
if component.eq_ignore_ascii_case("shift") {
shift = true;
continue;
}
if component.eq_ignore_ascii_case("fn") {
function = true;
continue;
}
if component.eq_ignore_ascii_case("secondary") {
if cfg!(target_os = "macos") {
platform = true;
} else {
control = true;
};
continue;
}
let is_platform = component.eq_ignore_ascii_case("cmd")
|| component.eq_ignore_ascii_case("super")
|| component.eq_ignore_ascii_case("win");
if is_platform {
platform = true;
continue;
}
let mut key_str = component.to_string();
if let Some(next) = components.peek() {
if next.is_empty() && source.ends_with('-') {
key = Some(String::from("-"));
break;
} else if next.len() > 1 && next.starts_with('>') {
key = Some(key_str);
key_char = Some(String::from(&next[1..]));
components.next();
} else {
return Err(InvalidKeystrokeError {
keystroke: source.to_owned(),
});
match component {
"ctrl" => control = true,
"alt" => alt = true,
"shift" => shift = true,
"fn" => function = true,
"secondary" => {
if cfg!(target_os = "macos") {
platform = true
} else {
control = true
};
}
continue;
}
if component.len() == 1 && component.as_bytes()[0].is_ascii_uppercase() {
if case_sensitive {
return Err(InvalidKeystrokeError {
keystroke: source.to_owned(),
});
} else {
// Convert to shift + lowercase char if parsing case insensitively
shift = true;
key_str.make_ascii_lowercase();
"cmd" | "super" | "win" => platform = true,
_ => {
if let Some(next) = components.peek() {
if next.is_empty() && source.ends_with('-') {
key = Some(String::from("-"));
break;
} else if next.len() > 1 && next.starts_with('>') {
key = Some(String::from(component));
key_char = Some(String::from(&next[1..]));
components.next();
} else {
return Err(InvalidKeystrokeError {
keystroke: source.to_owned(),
});
}
} else {
key = Some(String::from(component));
}
}
} else if case_sensitive {
// convert ascii chars to lowercase so that named keys like "tab" and "enter"
// are accepted case insensitively and stored how we expect so they are matched properly
key_str.make_ascii_lowercase()
}
key = Some(key_str);
}
// Allow for the user to specify a keystroke modifier as the key itself
@@ -217,7 +159,7 @@ impl Keystroke {
function,
},
key,
key_char,
key_char: key_char,
})
}

View File

@@ -231,6 +231,7 @@ pub enum IconName {
X,
XCircle,
ZedAssistant,
ZedAssistant2,
ZedAssistantFilled,
ZedPredict,
ZedPredictUp,

View File

@@ -586,11 +586,7 @@ pub fn into_anthropic(
model,
messages: new_messages,
max_tokens: max_output_tokens,
system: if system_message.is_empty() {
None
} else {
Some(anthropic::StringOrContents::String(system_message))
},
system: Some(system_message),
thinking: if let AnthropicModelMode::Thinking { budget_tokens } = mode {
Some(anthropic::Thinking::Enabled { budget_tokens })
} else {

View File

@@ -7,5 +7,3 @@
("\"" @open "\"" @close)
("'" @open "'" @close)
("`" @open "`" @close)
((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only))

View File

@@ -4,22 +4,17 @@ pub use pulldown_cmark::TagEnd as MarkdownTagEnd;
use pulldown_cmark::{Alignment, HeadingLevel, LinkType, MetadataBlockKind, Options, Parser};
use std::{collections::HashSet, ops::Range};
const PARSE_OPTIONS: Options = Options::ENABLE_TABLES
.union(Options::ENABLE_FOOTNOTES)
.union(Options::ENABLE_STRIKETHROUGH)
.union(Options::ENABLE_TASKLISTS)
.union(Options::ENABLE_SMART_PUNCTUATION)
.union(Options::ENABLE_HEADING_ATTRIBUTES)
.union(Options::ENABLE_PLUSES_DELIMITED_METADATA_BLOCKS)
.union(Options::ENABLE_OLD_FOOTNOTES)
.union(Options::ENABLE_GFM);
pub fn parse_markdown(text: &str) -> (Vec<(Range<usize>, MarkdownEvent)>, HashSet<SharedString>) {
let mut options = Options::all();
options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST);
options.remove(pulldown_cmark::Options::ENABLE_YAML_STYLE_METADATA_BLOCKS);
options.remove(pulldown_cmark::Options::ENABLE_MATH);
let mut events = Vec::new();
let mut languages = HashSet::new();
let mut within_link = false;
let mut within_metadata = false;
for (pulldown_event, mut range) in Parser::new_ext(text, PARSE_OPTIONS).into_offset_iter() {
for (pulldown_event, mut range) in Parser::new_ext(text, options).into_offset_iter() {
if within_metadata {
if let pulldown_cmark::Event::End(pulldown_cmark::TagEnd::MetadataBlock { .. }) =
pulldown_event
@@ -364,27 +359,3 @@ impl From<pulldown_cmark::Tag<'_>> for MarkdownTag {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
const UNWANTED_OPTIONS: Options = Options::ENABLE_YAML_STYLE_METADATA_BLOCKS
.union(Options::ENABLE_MATH)
.union(Options::ENABLE_DEFINITION_LIST);
#[test]
fn all_options_considered() {
// The purpose of this is to fail when new options are added to pulldown_cmark, so that they
// can be evaluated for inclusion.
assert_eq!(PARSE_OPTIONS.union(UNWANTED_OPTIONS), Options::all());
}
#[test]
fn wanted_and_unwanted_options_disjoint() {
assert_eq!(
PARSE_OPTIONS.intersection(UNWANTED_OPTIONS),
Options::empty()
);
}
}

View File

@@ -37,6 +37,7 @@ client.workspace = true
clock.workspace = true
collections.workspace = true
dap.workspace = true
dap_adapters.workspace = true
extension.workspace = true
fancy-regex.workspace = true
fs.workspace = true

View File

@@ -15,3 +15,5 @@ pub mod breakpoint_store;
pub mod dap_command;
pub mod dap_store;
pub mod session;
pub use dap_adapters::attach_processes;

View File

@@ -20,9 +20,10 @@ use dap::{
Completions, Evaluate, Request as _, RunInTerminal, SetExpression, SetVariable,
StartDebugging,
},
Capabilities, CompletionItem, CompletionsArguments, DapRegistry, ErrorResponse,
EvaluateArguments, EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments,
Capabilities, CompletionItem, CompletionsArguments, ErrorResponse, EvaluateArguments,
EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments,
SetExpressionArguments, SetVariableArguments, Source, StartDebuggingRequestArguments,
StartDebuggingRequestArgumentsRequest,
};
use fs::Fs;
use futures::{
@@ -50,7 +51,7 @@ use std::{
sync::{atomic::Ordering::SeqCst, Arc},
};
use std::{collections::VecDeque, sync::atomic::AtomicU32};
use task::{DebugAdapterConfig, DebugRequestDisposition};
use task::{AttachConfig, DebugAdapterConfig, DebugRequestType};
use util::ResultExt as _;
use worktree::Worktree;
@@ -88,7 +89,6 @@ pub struct LocalDapStore {
worktree_store: Entity<WorktreeStore>,
environment: Entity<ProjectEnvironment>,
language_registry: Arc<LanguageRegistry>,
debug_adapters: Arc<DapRegistry>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
start_debugging_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
_start_debugging_task: Task<()>,
@@ -138,7 +138,6 @@ impl DapStore {
node_runtime: NodeRuntime,
fs: Arc<dyn Fs>,
language_registry: Arc<LanguageRegistry>,
debug_adapters: Arc<DapRegistry>,
environment: Entity<ProjectEnvironment>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
breakpoint_store: Entity<BreakpointStore>,
@@ -179,7 +178,6 @@ impl DapStore {
worktree_store,
toolchain_store,
language_registry,
debug_adapters,
start_debugging_tx,
_start_debugging_task,
next_session_id: Default::default(),
@@ -366,63 +364,52 @@ impl DapStore {
config,
local_store.start_debugging_tx.clone(),
initialized_tx,
local_store.debug_adapters.clone(),
cx,
);
let task = create_new_session(session_id, initialized_rx, start_client_task, cx);
(session_id, task)
}
#[cfg(any(test, feature = "test-support"))]
pub fn new_fake_session(
&mut self,
config: DebugAdapterConfig,
worktree: &Entity<Worktree>,
parent_session: Option<Entity<Session>>,
caps: Capabilities,
fails: bool,
cx: &mut Context<Self>,
) -> (SessionId, Task<Result<Entity<Session>>>) {
let Some(local_store) = self.as_local() else {
unimplemented!("Starting session on remote side");
};
let task = cx.spawn(async move |this, cx| {
let session = match start_client_task.await {
Ok(session) => session,
Err(error) => {
this.update(cx, |_, cx| {
cx.emit(DapStoreEvent::Notification(error.to_string()));
})
.log_err();
let delegate = DapAdapterDelegate::new(
local_store.fs.clone(),
worktree.read(cx).id(),
local_store.node_runtime.clone(),
local_store.http_client.clone(),
local_store.language_registry.clone(),
local_store.toolchain_store.clone(),
local_store.environment.update(cx, |env, cx| {
let worktree = worktree.read(cx);
env.get_environment(Some(worktree.id()), Some(worktree.abs_path()), cx)
}),
);
let session_id = local_store.next_session_id();
return Err(error);
}
};
if let Some(session) = &parent_session {
session.update(cx, |session, _| {
session.add_child_session_id(session_id);
});
}
// we have to insert the session early, so we can handle reverse requests
// that need the session to be available
this.update(cx, |store, cx| {
store.sessions.insert(session_id, session.clone());
cx.emit(DapStoreEvent::DebugClientStarted(session_id));
cx.notify();
})?;
let (initialized_tx, initialized_rx) = oneshot::channel();
match session
.update(cx, |session, cx| {
session.initialize_sequence(initialized_rx, cx)
})?
.await
{
Ok(_) => {}
Err(error) => {
this.update(cx, |this, cx| {
cx.emit(DapStoreEvent::Notification(error.to_string()));
let start_client_task = Session::fake(
self.breakpoint_store.clone(),
session_id,
parent_session,
delegate,
config,
local_store.start_debugging_tx.clone(),
initialized_tx,
caps,
fails,
cx,
);
this.shutdown_session(session_id, cx)
})?
.await
.log_err();
let task = create_new_session(session_id, initialized_rx, start_client_task, cx);
return Err(error);
}
}
Ok(session)
});
(session_id, task)
}
@@ -444,6 +431,7 @@ impl DapStore {
request.arguments.unwrap_or_default(),
)
.expect("To parse StartDebuggingRequestArguments");
let worktree = local_store
.worktree_store
.update(cx, |this, _| this.worktrees().next())
@@ -453,30 +441,25 @@ impl DapStore {
unreachable!("there must be a config for local sessions");
};
let debug_config = DebugAdapterConfig {
label: config.label,
adapter: config.adapter,
request: DebugRequestDisposition::ReverseRequest(args),
initialize_args: config.initialize_args.clone(),
tcp_connection: config.tcp_connection.clone(),
};
#[cfg(any(test, feature = "test-support"))]
let new_session_task = {
let caps = parent_session.read(cx).capabilities.clone();
self.new_fake_session(
debug_config,
&worktree,
Some(parent_session.clone()),
caps,
false,
cx,
)
.1
};
#[cfg(not(any(test, feature = "test-support")))]
let new_session_task = self
.new_session(debug_config, &worktree, Some(parent_session.clone()), cx)
.1;
let (_, new_session_task) = self.new_session(
DebugAdapterConfig {
label: config.label,
kind: config.kind,
request: match &args.request {
StartDebuggingRequestArgumentsRequest::Launch => DebugRequestType::Launch,
StartDebuggingRequestArgumentsRequest::Attach => {
DebugRequestType::Attach(AttachConfig::default())
}
},
program: config.program,
cwd: config.cwd,
initialize_args: Some(args.configuration),
supports_attach: config.supports_attach,
},
&worktree,
Some(parent_session.clone()),
cx,
);
let request_seq = request.seq;
cx.spawn(async move |_, cx| {
@@ -847,58 +830,6 @@ impl DapStore {
}
}
fn create_new_session(
session_id: SessionId,
initialized_rx: oneshot::Receiver<()>,
start_client_task: Task<Result<Entity<Session>, anyhow::Error>>,
cx: &mut Context<'_, DapStore>,
) -> Task<Result<Entity<Session>>> {
let task = cx.spawn(async move |this, cx| {
let session = match start_client_task.await {
Ok(session) => session,
Err(error) => {
this.update(cx, |_, cx| {
cx.emit(DapStoreEvent::Notification(error.to_string()));
})
.log_err();
return Err(error);
}
};
// we have to insert the session early, so we can handle reverse requests
// that need the session to be available
this.update(cx, |store, cx| {
store.sessions.insert(session_id, session.clone());
cx.emit(DapStoreEvent::DebugClientStarted(session_id));
cx.notify();
})?;
match session
.update(cx, |session, cx| {
session.initialize_sequence(initialized_rx, cx)
})?
.await
{
Ok(_) => {}
Err(error) => {
this.update(cx, |this, cx| {
cx.emit(DapStoreEvent::Notification(error.to_string()));
this.shutdown_session(session_id, cx)
})?
.await
.log_err();
return Err(error);
}
}
Ok(session)
});
task
}
#[derive(Clone)]
pub struct DapAdapterDelegate {
fs: Arc<dyn Fs>,

View File

@@ -14,6 +14,7 @@ use anyhow::{anyhow, Result};
use collections::{HashMap, HashSet, IndexMap, IndexSet};
use dap::adapters::{DebugAdapter, DebugAdapterBinary};
use dap::messages::Response;
use dap::OutputEventCategory;
use dap::{
adapters::{DapDelegate, DapStatus},
client::{DebugAdapterClient, SessionId},
@@ -21,7 +22,7 @@ use dap::{
Capabilities, ContinueArguments, EvaluateArgumentsContext, Module, Source, StackFrameId,
SteppingGranularity, StoppedEvent, VariableReference,
};
use dap::{DapRegistry, DebugRequestType, OutputEventCategory};
use dap_adapters::build_adapter;
use futures::channel::oneshot;
use futures::{future::Shared, FutureExt};
use gpui::{
@@ -41,7 +42,7 @@ use std::{
path::Path,
sync::Arc,
};
use task::{DebugAdapterConfig, DebugTaskDefinition};
use task::DebugAdapterConfig;
use text::{PointUtf16, ToPointUtf16};
use util::{merge_json_value_into, ResultExt};
@@ -182,7 +183,6 @@ fn client_source(abs_path: &Path) -> dap::Source {
impl LocalMode {
fn new(
debug_adapters: Arc<DapRegistry>,
session_id: SessionId,
parent_session: Option<Entity<Session>>,
breakpoint_store: Entity<BreakpointStore>,
@@ -190,168 +190,9 @@ impl LocalMode {
delegate: DapAdapterDelegate,
messages_tx: futures::channel::mpsc::UnboundedSender<Message>,
cx: AsyncApp,
) -> Task<Result<(Self, Capabilities)>> {
Self::new_inner(
debug_adapters,
session_id,
parent_session,
breakpoint_store,
config,
delegate,
messages_tx,
async |_, _| {},
cx,
)
}
#[cfg(any(test, feature = "test-support"))]
fn new_fake(
session_id: SessionId,
parent_session: Option<Entity<Session>>,
breakpoint_store: Entity<BreakpointStore>,
config: DebugAdapterConfig,
delegate: DapAdapterDelegate,
messages_tx: futures::channel::mpsc::UnboundedSender<Message>,
caps: Capabilities,
fail: bool,
cx: AsyncApp,
) -> Task<Result<(Self, Capabilities)>> {
use task::DebugRequestDisposition;
let request = match config.request.clone() {
DebugRequestDisposition::UserConfigured(request) => request,
DebugRequestDisposition::ReverseRequest(reverse_request_args) => {
match reverse_request_args.request {
dap::StartDebuggingRequestArgumentsRequest::Launch => {
DebugRequestType::Launch(task::LaunchConfig {
program: "".to_owned(),
cwd: None,
})
}
dap::StartDebuggingRequestArgumentsRequest::Attach => {
DebugRequestType::Attach(task::AttachConfig {
process_id: Some(0),
})
}
}
}
};
let callback = async move |session: &mut LocalMode, cx: AsyncApp| {
session
.client
.on_request::<dap::requests::Initialize, _>(move |_, _| Ok(caps.clone()))
.await;
let paths = cx
.update(|cx| session.breakpoint_store.read(cx).breakpoint_paths())
.expect("Breakpoint store should exist in all tests that start debuggers");
session
.client
.on_request::<dap::requests::SetBreakpoints, _>(move |_, args| {
let p = Arc::from(Path::new(&args.source.path.unwrap()));
if !paths.contains(&p) {
panic!("Sent breakpoints for path without any")
}
Ok(dap::SetBreakpointsResponse {
breakpoints: Vec::default(),
})
})
.await;
match request {
dap::DebugRequestType::Launch(_) => {
if fail {
session
.client
.on_request::<dap::requests::Launch, _>(move |_, _| {
Err(dap::ErrorResponse {
error: Some(dap::Message {
id: 1,
format: "error".into(),
variables: None,
send_telemetry: None,
show_user: None,
url: None,
url_label: None,
}),
})
})
.await;
} else {
session
.client
.on_request::<dap::requests::Launch, _>(move |_, _| Ok(()))
.await;
}
}
dap::DebugRequestType::Attach(attach_config) => {
if fail {
session
.client
.on_request::<dap::requests::Attach, _>(move |_, _| {
Err(dap::ErrorResponse {
error: Some(dap::Message {
id: 1,
format: "error".into(),
variables: None,
send_telemetry: None,
show_user: None,
url: None,
url_label: None,
}),
})
})
.await;
} else {
session
.client
.on_request::<dap::requests::Attach, _>(move |_, args| {
assert_eq!(
json!({"request": "attach", "process_id": attach_config.process_id.unwrap()}),
args.raw
);
Ok(())
})
.await;
}
}
}
session
.client
.on_request::<dap::requests::Disconnect, _>(move |_, _| Ok(()))
.await;
session.client.fake_event(Events::Initialized(None)).await;
};
Self::new_inner(
DapRegistry::fake().into(),
session_id,
parent_session,
breakpoint_store,
config,
delegate,
messages_tx,
callback,
cx,
)
}
fn new_inner(
registry: Arc<DapRegistry>,
session_id: SessionId,
parent_session: Option<Entity<Session>>,
breakpoint_store: Entity<BreakpointStore>,
config: DebugAdapterConfig,
delegate: DapAdapterDelegate,
messages_tx: futures::channel::mpsc::UnboundedSender<Message>,
on_initialized: impl AsyncFnOnce(&mut LocalMode, AsyncApp) + 'static,
cx: AsyncApp,
) -> Task<Result<(Self, Capabilities)>> {
cx.spawn(async move |cx| {
let (adapter, binary) =
Self::get_adapter_binary(&registry, &config, &delegate, cx).await?;
let (adapter, binary) = Self::get_adapter_binary(&config, &delegate, cx).await?;
let message_handler = Box::new(move |message| {
messages_tx.unbounded_send(message).ok();
@@ -378,14 +219,99 @@ impl LocalMode {
);
let adapter_id = adapter.name().to_string().to_owned();
let mut session = Self {
let session = Self {
client,
adapter,
breakpoint_store,
config: config.clone(),
};
on_initialized(&mut session, cx.clone()).await;
#[cfg(any(test, feature = "test-support"))]
{
let dap::DebugAdapterKind::Fake((fail, caps)) = session.config.kind.clone() else {
panic!("Only fake debug adapter configs should be used in tests");
};
session
.client
.on_request::<dap::requests::Initialize, _>(move |_, _| Ok(caps.clone()))
.await;
let paths = cx.update(|cx| session.breakpoint_store.read(cx).breakpoint_paths()).expect("Breakpoint store should exist in all tests that start debuggers");
session.client.on_request::<dap::requests::SetBreakpoints, _>(move |_, args| {
let p = Arc::from(Path::new(&args.source.path.unwrap()));
if !paths.contains(&p) {
panic!("Sent breakpoints for path without any")
}
Ok(dap::SetBreakpointsResponse {
breakpoints: Vec::default(),
})
}).await;
match config.request.clone() {
dap::DebugRequestType::Launch if fail => {
session
.client
.on_request::<dap::requests::Launch, _>(move |_, _| {
Err(dap::ErrorResponse {
error: Some(dap::Message {
id: 1,
format: "error".into(),
variables: None,
send_telemetry: None,
show_user: None,
url: None,
url_label: None,
}),
})
})
.await;
}
dap::DebugRequestType::Launch => {
session
.client
.on_request::<dap::requests::Launch, _>(move |_, _| Ok(()))
.await;
}
dap::DebugRequestType::Attach(_) if fail => {
session
.client
.on_request::<dap::requests::Attach, _>(move |_, _| {
Err(dap::ErrorResponse {
error: Some(dap::Message {
id: 1,
format: "error".into(),
variables: None,
send_telemetry: None,
show_user: None,
url: None,
url_label: None,
}),
})
})
.await;
}
dap::DebugRequestType::Attach(attach_config) => {
session
.client
.on_request::<dap::requests::Attach, _>(move |_, args| {
assert_eq!(
json!({"request": "attach", "process_id": attach_config.process_id.unwrap()}),
args.raw
);
Ok(())
})
.await;
}
}
session.client.on_request::<dap::requests::Disconnect, _>(move |_, _| Ok(())).await;
session.client.fake_event(Events::Initialized(None)).await;
}
let capabilities = session
.request(Initialize { adapter_id }, cx.background_executor().clone())
.await?;
@@ -494,14 +420,11 @@ impl LocalMode {
}
async fn get_adapter_binary(
registry: &Arc<DapRegistry>,
config: &DebugAdapterConfig,
delegate: &DapAdapterDelegate,
cx: &mut AsyncApp,
) -> Result<(Arc<dyn DebugAdapter>, DebugAdapterBinary)> {
let adapter = registry
.adapter(&config.adapter)
.ok_or_else(|| anyhow!("Debug adapter with name `{}` was not found", config.adapter))?;
let adapter = build_adapter(&config.kind).await?;
let binary = cx.update(|cx| {
ProjectSettings::get_global(cx)
@@ -542,36 +465,20 @@ impl LocalMode {
initialized_rx: oneshot::Receiver<()>,
cx: &App,
) -> Task<Result<()>> {
let (mut raw, is_launch) = match &self.config.request {
task::DebugRequestDisposition::UserConfigured(_) => {
let Ok(raw) = DebugTaskDefinition::try_from(self.config.clone()) else {
debug_assert!(false, "This part of code should be unreachable in practice");
return Task::ready(Err(anyhow!(
"Expected debug config conversion to succeed"
)));
};
let is_launch = matches!(raw.request, DebugRequestType::Launch(_));
let raw = self.adapter.request_args(&raw);
(raw, is_launch)
}
task::DebugRequestDisposition::ReverseRequest(start_debugging_request_arguments) => (
start_debugging_request_arguments.configuration.clone(),
matches!(
start_debugging_request_arguments.request,
dap::StartDebuggingRequestArgumentsRequest::Launch
),
),
};
let mut raw = self.adapter.request_args(&self.config);
merge_json_value_into(
self.config.initialize_args.clone().unwrap_or(json!({})),
&mut raw,
);
// Of relevance: https://github.com/microsoft/vscode/issues/4902#issuecomment-368583522
let launch = if is_launch {
self.request(Launch { raw }, cx.background_executor().clone())
} else {
self.request(Attach { raw }, cx.background_executor().clone())
let launch = match &self.config.request {
dap::DebugRequestType::Launch => {
self.request(Launch { raw }, cx.background_executor().clone())
}
dap::DebugRequestType::Attach(_) => {
self.request(Attach { raw }, cx.background_executor().clone())
}
};
let configuration_done_supported = ConfigurationDone::is_supported(capabilities);
@@ -838,14 +745,12 @@ impl Session {
config: DebugAdapterConfig,
start_debugging_requests_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
initialized_tx: oneshot::Sender<()>,
debug_adapters: Arc<DapRegistry>,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
let (message_tx, message_rx) = futures::channel::mpsc::unbounded();
let (message_tx, mut message_rx) = futures::channel::mpsc::unbounded();
cx.spawn(async move |cx| {
let (mode, capabilities) = LocalMode::new(
debug_adapters,
session_id,
parent_session.clone(),
breakpoint_store.clone(),
@@ -857,62 +762,74 @@ impl Session {
.await?;
cx.new(|cx| {
create_local_session(
breakpoint_store,
session_id,
parent_session,
start_debugging_requests_tx,
initialized_tx,
message_rx,
mode,
let _background_tasks = vec![cx.spawn(async move |this: WeakEntity<Self>, cx| {
let mut initialized_tx = Some(initialized_tx);
while let Some(message) = message_rx.next().await {
if let Message::Event(event) = message {
if let Events::Initialized(_) = *event {
if let Some(tx) = initialized_tx.take() {
tx.send(()).ok();
}
} else {
let Ok(_) = this.update(cx, |session, cx| {
session.handle_dap_event(event, cx);
}) else {
break;
};
}
} else {
let Ok(_) =
start_debugging_requests_tx.unbounded_send((session_id, message))
else {
break;
};
}
}
})];
cx.subscribe(&breakpoint_store, |this, _, event, cx| match event {
BreakpointStoreEvent::BreakpointsUpdated(path, reason) => {
if let Some(local) = (!this.ignore_breakpoints)
.then(|| this.as_local_mut())
.flatten()
{
local
.send_breakpoints_from_path(path.clone(), *reason, cx)
.detach();
};
}
BreakpointStoreEvent::BreakpointsCleared(paths) => {
if let Some(local) = (!this.ignore_breakpoints)
.then(|| this.as_local_mut())
.flatten()
{
local.unset_breakpoints_from_paths(paths, cx).detach();
}
}
BreakpointStoreEvent::ActiveDebugLineChanged => {}
})
.detach();
Self {
mode: Mode::Local(mode),
id: session_id,
child_session_ids: HashSet::default(),
parent_id: parent_session.map(|session| session.read(cx).id),
variables: Default::default(),
capabilities,
cx,
)
})
})
}
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn fake(
breakpoint_store: Entity<BreakpointStore>,
session_id: SessionId,
parent_session: Option<Entity<Session>>,
delegate: DapAdapterDelegate,
config: DebugAdapterConfig,
start_debugging_requests_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
initialized_tx: oneshot::Sender<()>,
caps: Capabilities,
fails: bool,
cx: &mut App,
) -> Task<Result<Entity<Session>>> {
let (message_tx, message_rx) = futures::channel::mpsc::unbounded();
cx.spawn(async move |cx| {
let (mode, capabilities) = LocalMode::new_fake(
session_id,
parent_session.clone(),
breakpoint_store.clone(),
config.clone(),
delegate,
message_tx,
caps,
fails,
cx.clone(),
)
.await?;
cx.new(|cx| {
create_local_session(
breakpoint_store,
session_id,
parent_session,
start_debugging_requests_tx,
initialized_tx,
message_rx,
mode,
capabilities,
cx,
)
thread_states: ThreadStates::default(),
output_token: OutputToken(0),
ignore_breakpoints: false,
output: circular_buffer::CircularBuffer::boxed(),
requests: HashMap::default(),
modules: Vec::default(),
loaded_sources: Vec::default(),
threads: IndexMap::default(),
stack_frames: IndexMap::default(),
locations: Default::default(),
_background_tasks,
is_session_terminated: false,
}
})
})
}
@@ -1921,83 +1838,3 @@ impl Session {
}
}
}
fn create_local_session(
breakpoint_store: Entity<BreakpointStore>,
session_id: SessionId,
parent_session: Option<Entity<Session>>,
start_debugging_requests_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
initialized_tx: oneshot::Sender<()>,
mut message_rx: futures::channel::mpsc::UnboundedReceiver<Message>,
mode: LocalMode,
capabilities: Capabilities,
cx: &mut Context<'_, Session>,
) -> Session {
let _background_tasks = vec![cx.spawn(async move |this: WeakEntity<Session>, cx| {
let mut initialized_tx = Some(initialized_tx);
while let Some(message) = message_rx.next().await {
if let Message::Event(event) = message {
if let Events::Initialized(_) = *event {
if let Some(tx) = initialized_tx.take() {
tx.send(()).ok();
}
} else {
let Ok(_) = this.update(cx, |session, cx| {
session.handle_dap_event(event, cx);
}) else {
break;
};
}
} else {
let Ok(_) = start_debugging_requests_tx.unbounded_send((session_id, message))
else {
break;
};
}
}
})];
cx.subscribe(&breakpoint_store, |this, _, event, cx| match event {
BreakpointStoreEvent::BreakpointsUpdated(path, reason) => {
if let Some(local) = (!this.ignore_breakpoints)
.then(|| this.as_local_mut())
.flatten()
{
local
.send_breakpoints_from_path(path.clone(), *reason, cx)
.detach();
};
}
BreakpointStoreEvent::BreakpointsCleared(paths) => {
if let Some(local) = (!this.ignore_breakpoints)
.then(|| this.as_local_mut())
.flatten()
{
local.unset_breakpoints_from_paths(paths, cx).detach();
}
}
BreakpointStoreEvent::ActiveDebugLineChanged => {}
})
.detach();
Session {
mode: Mode::Local(mode),
id: session_id,
child_session_ids: HashSet::default(),
parent_id: parent_session.map(|session| session.read(cx).id),
variables: Default::default(),
capabilities,
thread_states: ThreadStates::default(),
output_token: OutputToken(0),
ignore_breakpoints: false,
output: circular_buffer::CircularBuffer::boxed(),
requests: HashMap::default(),
modules: Vec::default(),
loaded_sources: Vec::default(),
threads: IndexMap::default(),
stack_frames: IndexMap::default(),
locations: Default::default(),
_background_tasks,
is_session_terminated: false,
}
}

View File

@@ -2092,16 +2092,11 @@ impl LspCommand for GetCompletions {
completions.retain(|lsp_completion| {
let lsp_edit = lsp_completion.text_edit.clone().or_else(|| {
let default_text_edit = lsp_defaults.as_deref()?.edit_range.as_ref()?;
let new_text = lsp_completion
.insert_text
.as_ref()
.unwrap_or(&lsp_completion.label)
.clone();
match default_text_edit {
CompletionListItemDefaultsEditRange::Range(range) => {
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text,
new_text: lsp_completion.label.clone(),
}))
}
CompletionListItemDefaultsEditRange::InsertAndReplace {
@@ -2109,7 +2104,7 @@ impl LspCommand for GetCompletions {
replace,
} => Some(lsp::CompletionTextEdit::InsertAndReplace(
lsp::InsertReplaceEdit {
new_text,
new_text: lsp_completion.label.clone(),
insert: *insert,
replace: *replace,
},
@@ -2172,7 +2167,6 @@ impl LspCommand for GetCompletions {
.clone()
};
// We already know text_edit is None here
let text = lsp_completion
.insert_text
.as_ref()

View File

@@ -3891,7 +3891,7 @@ impl LspStore {
*refcount += 1;
}
if ignore_refcounts || *refcount == 1 {
if !ignore_refcounts || *refcount == 1 {
local.register_buffer_with_language_servers(buffer, cx);
}
if !ignore_refcounts {

View File

@@ -38,7 +38,7 @@ use client::{
};
use clock::ReplicaId;
use dap::{client::DebugAdapterClient, DapRegistry, DebugAdapterConfig};
use dap::{client::DebugAdapterClient, DebugAdapterConfig};
use collections::{BTreeSet, HashMap, HashSet};
use debounced_delay::DebouncedDelay;
@@ -163,7 +163,6 @@ pub struct Project {
active_entry: Option<ProjectEntryId>,
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
languages: Arc<LanguageRegistry>,
debug_adapters: Arc<DapRegistry>,
dap_store: Entity<DapStore>,
breakpoint_store: Entity<BreakpointStore>,
client: Arc<client::Client>,
@@ -819,7 +818,6 @@ impl Project {
node: NodeRuntime,
user_store: Entity<UserStore>,
languages: Arc<LanguageRegistry>,
debug_adapters: Arc<DapRegistry>,
fs: Arc<dyn Fs>,
env: Option<HashMap<String, String>>,
cx: &mut App,
@@ -856,7 +854,6 @@ impl Project {
node.clone(),
fs.clone(),
languages.clone(),
debug_adapters.clone(),
environment.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
breakpoint_store.clone(),
@@ -943,7 +940,6 @@ impl Project {
active_entry: None,
snippets,
languages,
debug_adapters,
client,
task_store,
user_store,
@@ -1106,7 +1102,6 @@ impl Project {
active_entry: None,
snippets,
languages,
debug_adapters: Arc::new(DapRegistry::default()),
client,
task_store,
user_store,
@@ -1244,6 +1239,7 @@ impl Project {
let breakpoint_store =
cx.new(|_| BreakpointStore::remote(remote_id, client.clone().into()))?;
let dap_store = cx.new(|_cx| {
DapStore::new_remote(remote_id, client.clone().into(), breakpoint_store.clone())
})?;
@@ -1330,7 +1326,6 @@ impl Project {
collaborators: Default::default(),
join_project_response_message_id: response.message_id,
languages,
debug_adapters: Arc::new(DapRegistry::default()),
user_store: user_store.clone(),
task_store,
snippets,
@@ -1455,7 +1450,13 @@ impl Project {
config: DebugAdapterConfig,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Session>>> {
let worktree = maybe!({ self.worktrees(cx).next() });
let worktree = maybe!({
if let Some(cwd) = &config.cwd {
Some(self.find_worktree(cwd.as_path(), cx)?.0)
} else {
self.worktrees(cx).next()
}
});
let Some(worktree) = &worktree else {
return Task::ready(Err(anyhow!("Failed to find a worktree")));
@@ -1468,40 +1469,6 @@ impl Project {
.1
}
#[cfg(any(test, feature = "test-support"))]
pub fn fake_debug_session(
&mut self,
request: task::DebugRequestType,
caps: Option<dap::Capabilities>,
fails: bool,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Session>>> {
use dap::{Capabilities, FakeAdapter};
use task::DebugRequestDisposition;
let worktree = maybe!({ self.worktrees(cx).next() });
let Some(worktree) = &worktree else {
return Task::ready(Err(anyhow!("Failed to find a worktree")));
};
let config = DebugAdapterConfig {
label: "test config".into(),
adapter: FakeAdapter::ADAPTER_NAME.into(),
request: DebugRequestDisposition::UserConfigured(request),
initialize_args: None,
tcp_connection: None,
};
let caps = caps.unwrap_or(Capabilities {
supports_step_back: Some(false),
..Default::default()
});
self.dap_store
.update(cx, |dap_store, cx| {
dap_store.new_fake_session(config, worktree, None, caps, fails, cx)
})
.1
}
#[cfg(any(test, feature = "test-support"))]
pub async fn example(
root_paths: impl IntoIterator<Item = &Path>,
@@ -1511,7 +1478,6 @@ impl Project {
let fs = Arc::new(RealFs::new(None, cx.background_executor().clone()));
let languages = LanguageRegistry::test(cx.background_executor().clone());
let debug_adapters = DapRegistry::default().into();
let clock = Arc::new(FakeSystemClock::new());
let http_client = http_client::FakeHttpClient::with_404_response();
let client = cx
@@ -1525,7 +1491,6 @@ impl Project {
node_runtime::NodeRuntime::unavailable(),
user_store,
Arc::new(languages),
debug_adapters,
fs,
None,
cx,
@@ -1556,7 +1521,6 @@ impl Project {
use clock::FakeSystemClock;
let languages = LanguageRegistry::test(cx.executor());
let debug_adapters = DapRegistry::fake();
let clock = Arc::new(FakeSystemClock::new());
let http_client = http_client::FakeHttpClient::with_404_response();
let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
@@ -1567,7 +1531,6 @@ impl Project {
node_runtime::NodeRuntime::unavailable(),
user_store,
Arc::new(languages),
Arc::new(debug_adapters),
fs,
None,
cx,
@@ -1611,10 +1574,6 @@ impl Project {
&self.languages
}
pub fn debug_adapters(&self) -> &Arc<DapRegistry> {
&self.debug_adapters
}
pub fn client(&self) -> Arc<Client> {
self.client.clone()
}

View File

@@ -2776,210 +2776,6 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
}
}
#[gpui::test]
async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
"a.ts": "",
}),
)
.await;
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(typescript_lang());
let mut fake_language_servers = language_registry.register_fake_lsp(
"TypeScript",
FakeLspAdapter {
capabilities: lsp::ServerCapabilities {
completion_provider: Some(lsp::CompletionOptions {
trigger_characters: Some(vec![".".to_string()]),
..Default::default()
}),
..Default::default()
},
..Default::default()
},
);
let (buffer, _handle) = project
.update(cx, |p, cx| {
p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
})
.await
.unwrap();
let fake_server = fake_language_servers.next().await.unwrap();
// When text_edit exists, it takes precedence over insert_text and label
let text = "let a = obj.fqn";
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let completions = project.update(cx, |project, cx| {
project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
});
fake_server
.set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
Ok(Some(lsp::CompletionResponse::Array(vec![
lsp::CompletionItem {
label: "labelText".into(),
insert_text: Some("insertText".into()),
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(0, text.len() as u32 - 3),
lsp::Position::new(0, text.len() as u32),
),
new_text: "textEditText".into(),
})),
..Default::default()
},
])))
})
.next()
.await;
let completions = completions.await.unwrap().unwrap();
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
assert_eq!(completions.len(), 1);
assert_eq!(completions[0].new_text, "textEditText");
assert_eq!(
completions[0].old_range.to_offset(&snapshot),
text.len() - 3..text.len()
);
}
#[gpui::test]
async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
"a.ts": "",
}),
)
.await;
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(typescript_lang());
let mut fake_language_servers = language_registry.register_fake_lsp(
"TypeScript",
FakeLspAdapter {
capabilities: lsp::ServerCapabilities {
completion_provider: Some(lsp::CompletionOptions {
trigger_characters: Some(vec![".".to_string()]),
..Default::default()
}),
..Default::default()
},
..Default::default()
},
);
let (buffer, _handle) = project
.update(cx, |p, cx| {
p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
})
.await
.unwrap();
let fake_server = fake_language_servers.next().await.unwrap();
let text = "let a = obj.fqn";
// Test 1: When text_edit is None but insert_text exists with default edit_range
{
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let completions = project.update(cx, |project, cx| {
project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
});
fake_server
.set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
is_incomplete: false,
item_defaults: Some(lsp::CompletionListItemDefaults {
edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
lsp::Range::new(
lsp::Position::new(0, text.len() as u32 - 3),
lsp::Position::new(0, text.len() as u32),
),
)),
..Default::default()
}),
items: vec![lsp::CompletionItem {
label: "labelText".into(),
insert_text: Some("insertText".into()),
text_edit: None,
..Default::default()
}],
})))
})
.next()
.await;
let completions = completions.await.unwrap().unwrap();
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
assert_eq!(completions.len(), 1);
assert_eq!(completions[0].new_text, "insertText");
assert_eq!(
completions[0].old_range.to_offset(&snapshot),
text.len() - 3..text.len()
);
}
// Test 2: When both text_edit and insert_text are None with default edit_range
{
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let completions = project.update(cx, |project, cx| {
project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
});
fake_server
.set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
is_incomplete: false,
item_defaults: Some(lsp::CompletionListItemDefaults {
edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
lsp::Range::new(
lsp::Position::new(0, text.len() as u32 - 3),
lsp::Position::new(0, text.len() as u32),
),
)),
..Default::default()
}),
items: vec![lsp::CompletionItem {
label: "labelText".into(),
insert_text: None,
text_edit: None,
..Default::default()
}],
})))
})
.next()
.await;
let completions = completions.await.unwrap().unwrap();
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
assert_eq!(completions.len(), 1);
assert_eq!(completions[0].new_text, "labelText");
assert_eq!(
completions[0].old_range.to_offset(&snapshot),
text.len() - 3..text.len()
);
}
}
#[gpui::test]
async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -3020,7 +2816,6 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
let fake_server = fake_language_servers.next().await.unwrap();
// Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
let text = "let a = b.fqn";
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let completions = project.update(cx, |project, cx| {
@@ -3048,7 +2843,6 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
text.len() - 3..text.len()
);
// Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
let text = "let a = \"atoms/cmp\"";
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let completions = project.update(cx, |project, cx| {

View File

@@ -557,7 +557,6 @@ pub async fn open_ssh_project(
app_state.node_runtime.clone(),
app_state.user_store.clone(),
app_state.languages.clone(),
app_state.debug_adapters.clone(),
app_state.fs.clone(),
None,
cx,

View File

@@ -28,7 +28,6 @@ backtrace = "0.3"
chrono.workspace = true
clap.workspace = true
client.workspace = true
dap.workspace = true
env_logger.workspace = true
extension.workspace = true
extension_host.workspace = true
@@ -70,7 +69,6 @@ libc.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
clock = { workspace = true, features = ["test-support"] }
dap = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }

View File

@@ -1,6 +1,5 @@
use ::proto::{FromProto, ToProto};
use anyhow::{anyhow, Result};
use dap::DapRegistry;
use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore;
use fs::Fs;
@@ -53,7 +52,6 @@ pub struct HeadlessAppState {
pub http_client: Arc<dyn HttpClient>,
pub node_runtime: NodeRuntime,
pub languages: Arc<LanguageRegistry>,
pub debug_adapters: Arc<DapRegistry>,
pub extension_host_proxy: Arc<ExtensionHostProxy>,
}
@@ -71,7 +69,6 @@ impl HeadlessProject {
http_client,
node_runtime,
languages,
debug_adapters,
extension_host_proxy: proxy,
}: HeadlessAppState,
cx: &mut Context<Self>,
@@ -111,7 +108,6 @@ impl HeadlessProject {
node_runtime.clone(),
fs.clone(),
languages.clone(),
debug_adapters.clone(),
environment.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
breakpoint_store.clone(),

View File

@@ -4,7 +4,6 @@
use crate::headless_project::HeadlessProject;
use client::{Client, UserStore};
use clock::FakeSystemClock;
use dap::DapRegistry;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs};
use gpui::{AppContext as _, Entity, SemanticVersion, TestAppContext};
@@ -1446,7 +1445,6 @@ pub async fn init_test(
let http_client = Arc::new(BlockedHttpClient);
let node_runtime = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(cx.executor()));
let debug_adapters = DapRegistry::default().into();
let proxy = Arc::new(ExtensionHostProxy::new());
server_cx.update(HeadlessProject::init);
let headless = server_cx.new(|cx| {
@@ -1459,7 +1457,6 @@ pub async fn init_test(
http_client,
node_runtime,
languages,
debug_adapters,
extension_host_proxy: proxy,
},
cx,

Some files were not shown because too many files have changed in this diff Show More