Compare commits

..

1 Commits

Author SHA1 Message Date
Ben Brandt
4ee0fa561a agent: Add action to close agent panel
Adds an action to close the Agent panel, as well as binds it to cmd-w when focused on the panel.
2025-05-09 12:00:16 +02:00
137 changed files with 1782 additions and 3933 deletions

51
Cargo.lock generated
View File

@@ -3068,7 +3068,6 @@ dependencies = [
"gpui",
"http_client",
"language",
"log",
"menu",
"notifications",
"picker",
@@ -3184,6 +3183,32 @@ dependencies = [
"workspace-hack",
]
[[package]]
name = "component_preview"
version = "0.1.0"
dependencies = [
"agent",
"anyhow",
"assistant_tool",
"client",
"collections",
"component",
"db",
"futures 0.3.31",
"gpui",
"languages",
"log",
"notifications",
"project",
"prompt_store",
"serde",
"ui",
"ui_input",
"util",
"workspace",
"workspace-hack",
]
[[package]]
name = "concurrent-queue"
version = "2.5.0"
@@ -3309,7 +3334,6 @@ dependencies = [
"http_client",
"indoc",
"inline_completion",
"itertools 0.14.0",
"language",
"log",
"lsp",
@@ -3319,9 +3343,11 @@ dependencies = [
"paths",
"project",
"rpc",
"schemars",
"serde",
"serde_json",
"settings",
"strum 0.27.1",
"task",
"theme",
"ui",
@@ -3507,9 +3533,9 @@ dependencies = [
[[package]]
name = "cosmic-text"
version = "0.14.0"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e1ecbb5db9a4c2ee642df67bcfa8f044dd867dbbaa21bfab139cbc204ffbf67"
checksum = "e418dd4f5128c3e93eab12246391c54a20c496811131f85754dc8152ee207892"
dependencies = [
"bitflags 2.9.0",
"fontdb 0.16.2",
@@ -4182,7 +4208,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
"shlex",
"sysinfo",
"task",
"tasks_ui",
@@ -7225,6 +7250,7 @@ dependencies = [
"lsp",
"paths",
"project",
"proto",
"regex",
"serde_json",
"settings",
@@ -7232,6 +7258,7 @@ dependencies = [
"telemetry",
"theme",
"ui",
"util",
"workspace",
"workspace-hack",
"zed_actions",
@@ -7786,12 +7813,9 @@ version = "0.1.0"
dependencies = [
"collections",
"feature_flags",
"futures 0.3.31",
"fuzzy",
"gpui",
"language_model",
"log",
"ordered-float 2.10.1",
"picker",
"proto",
"ui",
@@ -18535,7 +18559,6 @@ dependencies = [
"assets",
"assistant_context_editor",
"assistant_settings",
"assistant_tool",
"assistant_tools",
"async-watch",
"audio",
@@ -18552,7 +18575,7 @@ dependencies = [
"collab_ui",
"collections",
"command_palette",
"component",
"component_preview",
"copilot",
"dap",
"dap_adapters",
@@ -18578,7 +18601,6 @@ dependencies = [
"gpui_tokio",
"http_client",
"image_viewer",
"indoc",
"inline_completion_button",
"install_cli",
"journal",
@@ -18591,7 +18613,6 @@ dependencies = [
"languages",
"libc",
"log",
"markdown",
"markdown_preview",
"menu",
"migrator",
@@ -18643,7 +18664,6 @@ dependencies = [
"tree-sitter-md",
"tree-sitter-rust",
"ui",
"ui_input",
"ui_prompt",
"url",
"urlencoding",
@@ -18718,9 +18738,9 @@ dependencies = [
[[package]]
name = "zed_llm_client"
version = "0.8.1"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16d993fc42f9ec43ab76fa46c6eb579a66e116bb08cd2bc9a67f3afcaa05d39d"
checksum = "a23b2fd00776b0c55072f389654910ceb501eb0083d7f78905ab0e5cc86949ec"
dependencies = [
"anyhow",
"serde",
@@ -18928,7 +18948,6 @@ dependencies = [
"paths",
"postage",
"project",
"proto",
"regex",
"release_channel",
"reqwest_client",

View File

@@ -31,6 +31,7 @@ members = [
"crates/command_palette",
"crates/command_palette_hooks",
"crates/component",
"crates/component_preview",
"crates/context_server",
"crates/copilot",
"crates/credentials_provider",
@@ -237,6 +238,7 @@ collections = { path = "crates/collections" }
command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" }
component = { path = "crates/component" }
component_preview = { path = "crates/component_preview" }
context_server = { path = "crates/context_server" }
copilot = { path = "crates/copilot" }
credentials_provider = { path = "crates/credentials_provider" }
@@ -606,7 +608,7 @@ wasmtime-wasi = "29"
which = "6.0.0"
wit-component = "0.221"
workspace-hack = "0.1.0"
zed_llm_client = "0.8.1"
zed_llm_client = "0.8.0"
zstd = "0.11"
[workspace.dependencies.async-stripe]

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-loader-circle-icon lucide-loader-circle"><path d="M21 12a9 9 0 1 1-6.219-8.56"/></svg>

Before

Width:  |  Height:  |  Size: 289 B

View File

@@ -242,9 +242,10 @@
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-alt-/": "agent::ToggleModelSelector",
"ctrl-shift-a": "agent::ToggleContextPicker",
"ctrl-w": "agent::Close",
"ctrl-shift-o": "agent::ToggleNavigationMenu",
"ctrl-shift-i": "agent::ToggleOptionsMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"shift-escape": "agent::ExpandMessageEditor",
"ctrl-alt-e": "agent::RemoveAllContext",
"ctrl-shift-e": "project_panel::ToggleFocus"
}
@@ -538,7 +539,6 @@
"ctrl-alt-b": "workspace::ToggleRightDock",
"ctrl-b": "workspace::ToggleLeftDock",
"ctrl-j": "workspace::ToggleBottomDock",
"ctrl-w": "workspace::CloseActiveDock",
"ctrl-alt-y": "workspace::CloseAllDocks",
"shift-find": "pane::DeploySearch",
"ctrl-shift-f": "pane::DeploySearch",

View File

@@ -288,9 +288,10 @@
"cmd-i": "agent::ToggleProfileSelector",
"cmd-alt-/": "agent::ToggleModelSelector",
"cmd-shift-a": "agent::ToggleContextPicker",
"cmd-w": "agent::Close",
"cmd-shift-o": "agent::ToggleNavigationMenu",
"cmd-shift-i": "agent::ToggleOptionsMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"shift-escape": "agent::ExpandMessageEditor",
"cmd-alt-e": "agent::RemoveAllContext",
"cmd-shift-e": "project_panel::ToggleFocus"
}
@@ -608,7 +609,6 @@
"cmd-b": "workspace::ToggleLeftDock",
"cmd-r": "workspace::ToggleRightDock",
"cmd-j": "workspace::ToggleBottomDock",
"cmd-w": "workspace::CloseActiveDock",
"alt-cmd-y": "workspace::CloseAllDocks",
"cmd-shift-f": "pane::DeploySearch",
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],

View File

@@ -49,9 +49,10 @@ And here's the section to rewrite based on that prompt again for reference:
</rewrite_this>
{{#if diagnostic_errors}}
{{#each diagnostic_errors}}
Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to.
{{#each diagnostic_errors}}
<diagnostic_error>
<line_number>{{line_number}}</line_number>
<error_message>{{error_message}}</error_message>

View File

@@ -328,10 +328,6 @@
"title_bar": {
// Whether to show the branch icon beside branch switcher in the titlebar.
"show_branch_icon": false,
// Whether to show the branch name button in the titlebar.
"show_branch_name": true,
// Whether to show the project host and name in the titlebar.
"show_project_items": true,
// Whether to show onboarding banners in the titlebar.
"show_onboarding_banner": true,
// Whether to show user picture in the titlebar.
@@ -474,8 +470,6 @@
"search_wrap": true,
// Search options to enable by default when opening new project and buffer searches.
"search": {
// Whether to show the project search button in the status bar.
"button": true,
"whole_word": false,
"case_sensitive": false,
"include_ignored": false,
@@ -1008,8 +1002,6 @@
"auto_update": true,
// Diagnostics configuration.
"diagnostics": {
// Whether to show the project diagnostics button in the status bar.
"button": true,
// Whether to show warnings or not by default.
"include_warnings": true,
// Settings for inline diagnostics
@@ -1305,22 +1297,21 @@
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json"],
"Shell Script": [".env.*"]
},
// Settings for which version of Node.js and NPM to use when installing
// language servers and Copilot.
//
// Note: changing this setting currently requires restarting Zed.
"node": {
// By default, Zed will look for `node` and `npm` on your `$PATH`, and use the
// existing executables if their version is recent enough. Set this to `true`
// to prevent this, and force Zed to always download and install its own
// version of Node.
"ignore_system_version": false,
// You can also specify alternative paths to Node and NPM. If you specify
// `path`, but not `npm_path`, Zed will assume that `npm` is located at
// `${path}/../npm`.
"path": null,
"npm_path": null
},
// By default use a recent system version of node, or install our own.
// You can override this to use a version of node that is not in $PATH with:
// {
// "node": {
// "path": "/path/to/node"
// "npm_path": "/path/to/npm" (defaults to node_path/../npm)
// }
// }
// or to ensure Zed always downloads and installs an isolated version of node:
// {
// "node": {
// "ignore_system_version": true,
// }
// NOTE: changing this setting currently requires restarting Zed.
"node": {},
// The extensions that Zed should automatically install on startup.
//
// If you don't want any of these extensions, add this field to your settings

View File

@@ -3,10 +3,9 @@ use crate::context::{AgentContextHandle, RULES_ICON};
use crate::context_picker::{ContextPicker, MentionLink};
use crate::context_store::ContextStore;
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
use crate::message_editor::insert_message_creases;
use crate::thread::{
LastRestoreCheckpoint, MessageCrease, MessageId, MessageSegment, Thread, ThreadError,
ThreadEvent, ThreadFeedback, ThreadSummary,
LastRestoreCheckpoint, MessageId, MessageSegment, Thread, ThreadError, ThreadEvent,
ThreadFeedback,
};
use crate::thread_store::{RulesLoadingError, TextThreadStore, ThreadStore};
use crate::tool_use::{PendingToolUseStatus, ToolUse};
@@ -328,7 +327,6 @@ fn tool_use_markdown_style(window: &Window, cx: &mut App) -> MarkdownStyle {
}
}
const CODEBLOCK_CONTAINER_GROUP: &str = "codeblock_container";
const MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK: usize = 10;
fn render_markdown_code_block(
@@ -487,13 +485,12 @@ fn render_markdown_code_block(
.copied_code_block_ids
.contains(&(message_id, ix));
let can_expand = metadata.line_count >= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
let is_expanded = if can_expand {
active_thread.read(cx).is_codeblock_expanded(message_id, ix)
} else {
false
};
let is_expanded = active_thread
.read(cx)
.expanded_code_blocks
.get(&(message_id, ix))
.copied()
.unwrap_or(true);
let codeblock_header_bg = cx
.theme()
@@ -514,7 +511,7 @@ fn render_markdown_code_block(
.children(label)
.child(
h_flex()
.visible_on_hover(CODEBLOCK_CONTAINER_GROUP)
.visible_on_hover("codeblock_container")
.gap_1()
.child(
IconButton::new(
@@ -556,38 +553,45 @@ fn render_markdown_code_block(
}
}),
)
.when(can_expand, |header| {
header.child(
IconButton::new(
("expand-collapse-code", ix),
if is_expanded {
IconName::ChevronUp
.when(
metadata.line_count > MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK,
|header| {
header.child(
IconButton::new(
("expand-collapse-code", ix),
if is_expanded {
IconName::ChevronUp
} else {
IconName::ChevronDown
},
)
.icon_color(Color::Muted)
.shape(ui::IconButtonShape::Square)
.tooltip(Tooltip::text(if is_expanded {
"Collapse Code"
} else {
IconName::ChevronDown
},
"Expand Code"
}))
.on_click({
let active_thread = active_thread.clone();
move |_event, _window, cx| {
active_thread.update(cx, |this, cx| {
let is_expanded = this
.expanded_code_blocks
.entry((message_id, ix))
.or_insert(true);
*is_expanded = !*is_expanded;
cx.notify();
});
}
}),
)
.icon_color(Color::Muted)
.shape(ui::IconButtonShape::Square)
.tooltip(Tooltip::text(if is_expanded {
"Collapse Code"
} else {
"Expand Code"
}))
.on_click({
let active_thread = active_thread.clone();
move |_event, _window, cx| {
active_thread.update(cx, |this, cx| {
this.toggle_codeblock_expanded(message_id, ix);
cx.notify();
});
}
}),
)
}),
},
),
);
v_flex()
.group(CODEBLOCK_CONTAINER_GROUP)
.group("codeblock_container")
.my_2()
.overflow_hidden()
.rounded_lg()
@@ -595,7 +599,16 @@ fn render_markdown_code_block(
.border_color(cx.theme().colors().border.opacity(0.6))
.bg(cx.theme().colors().editor_background)
.child(codeblock_header)
.when(can_expand && !is_expanded, |this| this.max_h_80())
.when(
metadata.line_count > MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK,
|this| {
if is_expanded {
this.h_full()
} else {
this.max_h_80()
}
},
)
}
fn render_code_language(
@@ -814,12 +827,12 @@ impl ActiveThread {
self.messages.is_empty()
}
pub fn summary<'a>(&'a self, cx: &'a App) -> &'a ThreadSummary {
pub fn summary(&self, cx: &App) -> Option<SharedString> {
self.thread.read(cx).summary()
}
pub fn regenerate_summary(&self, cx: &mut App) {
self.thread.update(cx, |thread, cx| thread.summarize(cx))
pub fn summary_or_default(&self, cx: &App) -> SharedString {
self.thread.read(cx).summary_or_default()
}
pub fn cancel_last_completion(&mut self, window: &mut Window, cx: &mut App) -> bool {
@@ -1125,7 +1138,11 @@ impl ActiveThread {
return;
}
let title = self.thread.read(cx).summary().unwrap_or("Agent Panel");
let title = self
.thread
.read(cx)
.summary()
.unwrap_or("Agent Panel".into());
match AssistantSettings::get_global(cx).notify_when_agent_waiting {
NotifyWhenAgentWaiting::PrimaryScreen => {
@@ -1255,7 +1272,6 @@ impl ActiveThread {
&mut self,
message_id: MessageId,
message_segments: &[MessageSegment],
message_creases: &[MessageCrease],
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -1275,7 +1291,6 @@ impl ActiveThread {
);
editor.update(cx, |editor, cx| {
editor.set_text(message_text.clone(), window, cx);
insert_message_creases(editor, message_creases, &self.context_store, window, cx);
editor.focus_handle(cx).focus(window);
editor.move_to_end(&editor::actions::MoveToEnd, window, cx);
});
@@ -1730,7 +1745,6 @@ impl ActiveThread {
let Some(message) = self.thread.read(cx).message(message_id) else {
return Empty.into_any();
};
let message_creases = message.creases.clone();
let Some(rendered_message) = self.rendered_messages_by_id.get(&message_id) else {
return Empty.into_any();
@@ -2022,7 +2036,6 @@ impl ActiveThread {
this.start_editing_message(
message_id,
&message_segments,
&message_creases,
window,
cx,
);
@@ -2227,7 +2240,7 @@ impl ActiveThread {
// Backdrop to dim out the whole thread below the editing user message
parent.relative().child(
div()
.stop_mouse_events_except_scroll()
.occlude()
.absolute()
.inset_0()
.size_full()
@@ -2346,19 +2359,19 @@ impl ActiveThread {
let editor_bg = cx.theme().colors().editor_background;
move |el, range, metadata, _, cx| {
let can_expand = metadata.line_count
>= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
if !can_expand {
return el;
}
let is_expanded = active_thread
.read(cx)
.is_codeblock_expanded(message_id, range.start);
if is_expanded {
.expanded_code_blocks
.get(&(message_id, range.start))
.copied()
.unwrap_or(true);
if is_expanded
|| metadata.line_count
<= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK
{
return el;
}
el.child(
div()
.absolute()
@@ -2384,7 +2397,6 @@ impl ActiveThread {
markdown_element.code_block_renderer(
markdown::CodeBlockRenderer::Default {
copy_button: false,
copy_button_on_hover: false,
border: true,
},
)
@@ -2704,7 +2716,6 @@ impl ActiveThread {
)
.code_block_renderer(markdown::CodeBlockRenderer::Default {
copy_button: false,
copy_button_on_hover: false,
border: false,
})
.on_url_click({
@@ -2735,7 +2746,6 @@ impl ActiveThread {
)
.code_block_renderer(markdown::CodeBlockRenderer::Default {
copy_button: false,
copy_button_on_hover: false,
border: false,
})
.on_url_click({
@@ -3372,21 +3382,6 @@ impl ActiveThread {
.log_err();
}))
}
pub fn is_codeblock_expanded(&self, message_id: MessageId, ix: usize) -> bool {
self.expanded_code_blocks
.get(&(message_id, ix))
.copied()
.unwrap_or(false)
}
pub fn toggle_codeblock_expanded(&mut self, message_id: MessageId, ix: usize) {
let is_expanded = self
.expanded_code_blocks
.entry((message_id, ix))
.or_insert(false);
*is_expanded = !*is_expanded;
}
}
pub enum ActiveThreadEvent {
@@ -3441,7 +3436,10 @@ pub(crate) fn open_active_thread_as_markdown(
workspace.update_in(cx, |workspace, window, cx| {
let thread = thread.read(cx);
let markdown = thread.to_markdown(cx)?;
let thread_summary = thread.summary().or_default().to_string();
let thread_summary = thread
.summary()
.map(|summary| summary.to_string())
.unwrap_or_else(|| "Thread".to_string());
let project = workspace.project().clone();

View File

@@ -85,6 +85,7 @@ actions!(
KeepAll,
Follow,
ResetTrialUpsell,
Close,
]
);

View File

@@ -36,7 +36,6 @@ pub struct AgentConfiguration {
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
context_server_store: Entity<ContextServerStore>,
expanded_context_server_tools: HashMap<ContextServerId, bool>,
expanded_provider_configurations: HashMap<LanguageModelProviderId, bool>,
tools: Entity<ToolWorkingSet>,
_registry_subscription: Subscription,
scroll_handle: ScrollHandle,
@@ -79,7 +78,6 @@ impl AgentConfiguration {
configuration_views_by_provider: HashMap::default(),
context_server_store,
expanded_context_server_tools: HashMap::default(),
expanded_provider_configurations: HashMap::default(),
tools,
_registry_subscription: registry_subscription,
scroll_handle,
@@ -98,7 +96,6 @@ impl AgentConfiguration {
fn remove_provider_configuration_view(&mut self, provider_id: &LanguageModelProviderId) {
self.configuration_views_by_provider.remove(provider_id);
self.expanded_provider_configurations.remove(provider_id);
}
fn add_provider_configuration_view(
@@ -138,14 +135,9 @@ impl AgentConfiguration {
.get(&provider.id())
.cloned();
let is_expanded = self
.expanded_provider_configurations
.get(&provider.id())
.copied()
.unwrap_or(true);
v_flex()
.pt_3()
.pb_1()
.gap_1p5()
.border_t_1()
.border_color(cx.theme().colors().border.opacity(0.6))
@@ -160,59 +152,32 @@ impl AgentConfiguration {
.size(IconSize::Small)
.color(Color::Muted),
)
.child(Label::new(provider_name.clone()).size(LabelSize::Large))
.when(provider.is_authenticated(cx) && !is_expanded, |parent| {
parent.child(Icon::new(IconName::Check).color(Color::Success))
}),
.child(Label::new(provider_name.clone()).size(LabelSize::Large)),
)
.child(
h_flex()
.gap_1()
.when(provider.is_authenticated(cx), |parent| {
parent.child(
Button::new(
SharedString::from(format!("new-thread-{provider_id}")),
"Start New Thread",
)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.layer(ElevationIndex::ModalSurface)
.label_size(LabelSize::Small)
.on_click(cx.listener({
let provider = provider.clone();
move |_this, _event, _window, cx| {
cx.emit(AssistantConfigurationEvent::NewThread(
provider.clone(),
))
}
})),
)
})
.child(
Disclosure::new(
SharedString::from(format!(
"provider-disclosure-{provider_id}"
)),
is_expanded,
)
.opened_icon(IconName::ChevronUp)
.closed_icon(IconName::ChevronDown)
.on_click(cx.listener({
let provider_id = provider.id().clone();
move |this, _event, _window, _cx| {
let is_open = this
.expanded_provider_configurations
.entry(provider_id.clone())
.or_insert(true);
*is_open = !*is_open;
}
})),
),
),
.when(provider.is_authenticated(cx), |parent| {
parent.child(
Button::new(
SharedString::from(format!("new-thread-{provider_id}")),
"Start New Thread",
)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.label_size(LabelSize::Small)
.on_click(cx.listener({
let provider = provider.clone();
move |_this, _event, _window, cx| {
cx.emit(AssistantConfigurationEvent::NewThread(
provider.clone(),
))
}
})),
)
}),
)
.when(is_expanded, |parent| match configuration_view {
.map(|parent| match configuration_view {
Some(configuration_view) => parent.child(configuration_view),
None => parent.child(div().child(Label::new(format!(
"No configuration view for {provider_name}",

View File

@@ -1,4 +1,6 @@
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll, Thread, ThreadEvent};
use crate::{
Keep, KeepAll, OpenAgentDiff, Reject, RejectAll, Thread, ThreadEvent, ui::AnimatedLabel,
};
use anyhow::Result;
use assistant_settings::AssistantSettings;
use buffer_diff::DiffHunkStatus;
@@ -9,9 +11,8 @@ use editor::{
scroll::Autoscroll,
};
use gpui::{
Action, Animation, AnimationExt, AnyElement, AnyView, App, AppContext, Empty, Entity,
EventEmitter, FocusHandle, Focusable, Global, SharedString, Subscription, Task, Transformation,
WeakEntity, Window, percentage, prelude::*,
Action, AnyElement, AnyView, App, AppContext, Empty, Entity, EventEmitter, FocusHandle,
Focusable, Global, SharedString, Subscription, Task, WeakEntity, Window, prelude::*,
};
use language::{Buffer, Capability, DiskState, OffsetRangeExt, Point};
@@ -24,7 +25,6 @@ use std::{
collections::hash_map::Entry,
ops::Range,
sync::Arc,
time::Duration,
};
use ui::{IconButtonShape, KeyBinding, Tooltip, prelude::*, vertical_divider};
use util::ResultExt;
@@ -215,7 +215,11 @@ impl AgentDiffPane {
}
fn update_title(&mut self, cx: &mut Context<Self>) {
let new_title = self.thread.read(cx).summary().unwrap_or("Agent Changes");
let new_title = self
.thread
.read(cx)
.summary()
.unwrap_or("Agent Changes".into());
if new_title != self.title {
self.title = new_title;
cx.emit(EditorEvent::TitleChanged);
@@ -465,7 +469,11 @@ impl Item for AgentDiffPane {
}
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
let summary = self.thread.read(cx).summary().unwrap_or("Agent Changes");
let summary = self
.thread
.read(cx)
.summary()
.unwrap_or("Agent Changes".into());
Label::new(format!("Review: {}", summary))
.color(if params.selected {
Color::Default
@@ -970,20 +978,9 @@ impl ToolbarItemView for AgentDiffToolbar {
impl Render for AgentDiffToolbar {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let spinner_icon = div()
.px_0p5()
.id("generating")
.tooltip(Tooltip::text("Generating Changes…"))
.child(
Icon::new(IconName::LoadCircle)
.size(IconSize::Small)
.color(Color::Accent)
.with_animation(
"load_circle",
Animation::new(Duration::from_secs(3)).repeat(),
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
),
)
let generating_label = div()
.w(rems_from_px(110.)) // Arbitrary size so the label doesn't dance around
.child(AnimatedLabel::new("Generating"))
.into_any();
let Some(active_item) = self.active_item.as_ref() else {
@@ -1000,7 +997,7 @@ impl Render for AgentDiffToolbar {
let content = match state {
EditorState::Idle => return Empty.into_any(),
EditorState::Generating => vec![spinner_icon],
EditorState::Generating => vec![generating_label],
EditorState::Reviewing => vec![
h_flex()
.child(
@@ -1118,7 +1115,7 @@ impl Render for AgentDiffToolbar {
let is_generating = agent_diff.read(cx).thread.read(cx).is_generating();
if is_generating {
return div().px_2().child(spinner_icon).into_any();
return div().px_2().child(generating_label).into_any();
}
let is_empty = agent_diff.read(cx).multibuffer.read(cx).is_empty();

View File

@@ -10,8 +10,8 @@ use serde::{Deserialize, Serialize};
use anyhow::{Result, anyhow};
use assistant_context_editor::{
AgentPanelDelegate, AssistantContext, ConfigurationError, ContextEditor, ContextEvent,
ContextSummary, SlashCommandCompletionProvider, humanize_token_count,
make_lsp_adapter_delegate, render_remaining_tokens,
SlashCommandCompletionProvider, humanize_token_count, make_lsp_adapter_delegate,
render_remaining_tokens,
};
use assistant_settings::{AssistantDockPosition, AssistantSettings};
use assistant_slash_command::SlashCommandWorkingSet;
@@ -46,9 +46,7 @@ use ui::{
};
use util::{ResultExt as _, maybe};
use workspace::dock::{DockPosition, Panel, PanelEvent};
use workspace::{
CollaboratorId, DraggedSelection, DraggedTab, ToggleZoom, ToolbarItemView, Workspace,
};
use workspace::{CollaboratorId, DraggedSelection, DraggedTab, ToolbarItemView, Workspace};
use zed_actions::agent::{OpenConfiguration, OpenOnboardingModal, ResetOnboarding};
use zed_actions::assistant::{OpenRulesLibrary, ToggleFocus};
use zed_actions::{DecreaseBufferFontSize, IncreaseBufferFontSize, ResetBufferFontSize};
@@ -57,17 +55,17 @@ use zed_llm_client::UsageLimit;
use crate::active_thread::{self, ActiveThread, ActiveThreadEvent};
use crate::agent_configuration::{AgentConfiguration, AssistantConfigurationEvent};
use crate::agent_diff::AgentDiff;
use crate::history_store::{HistoryStore, RecentEntry};
use crate::history_store::{HistoryEntry, HistoryStore, RecentEntry};
use crate::message_editor::{MessageEditor, MessageEditorEvent};
use crate::thread::{Thread, ThreadError, ThreadId, ThreadSummary, TokenUsageRatio};
use crate::thread_history::{HistoryEntryElement, ThreadHistory};
use crate::thread::{Thread, ThreadError, ThreadId, TokenUsageRatio};
use crate::thread_history::{EntryTimeFormat, PastContext, PastThread, ThreadHistory};
use crate::thread_store::ThreadStore;
use crate::ui::AgentOnboardingModal;
use crate::{
AddContextServer, AgentDiffPane, ContextStore, DeleteRecentlyOpenThread, ExpandMessageEditor,
Follow, InlineAssistant, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff,
OpenHistory, ResetTrialUpsell, TextThreadStore, ThreadEvent, ToggleContextPicker,
ToggleNavigationMenu, ToggleOptionsMenu,
AddContextServer, AgentDiffPane, Close, ContextStore, DeleteRecentlyOpenThread,
ExpandMessageEditor, Follow, InlineAssistant, NewTextThread, NewThread,
OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialUpsell, TextThreadStore,
ThreadEvent, ToggleContextPicker, ToggleNavigationMenu, ToggleOptionsMenu,
};
const AGENT_PANEL_KEY: &str = "agent_panel";
@@ -158,6 +156,11 @@ pub fn init(cx: &mut App) {
})
.register_action(|_workspace, _: &ResetTrialUpsell, _window, cx| {
set_trial_upsell_dismissed(false, cx);
})
.register_action(|workspace, _: &Close, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
panel.update(cx, |panel, cx| panel.close_panel(window, cx));
}
});
},
)
@@ -196,7 +199,7 @@ impl ActiveView {
}
pub fn thread(thread: Entity<Thread>, window: &mut Window, cx: &mut App) -> Self {
let summary = thread.read(cx).summary().or_default();
let summary = thread.read(cx).summary_or_default();
let editor = cx.new(|cx| {
let mut editor = Editor::single_line(window, cx);
@@ -218,7 +221,7 @@ impl ActiveView {
}
EditorEvent::Blurred => {
if editor.read(cx).text(cx).is_empty() {
let summary = thread.read(cx).summary().or_default();
let summary = thread.read(cx).summary_or_default();
editor.update(cx, |editor, cx| {
editor.set_text(summary, window, cx);
@@ -233,7 +236,7 @@ impl ActiveView {
let editor = editor.clone();
move |thread, event, window, cx| match event {
ThreadEvent::SummaryGenerated => {
let summary = thread.read(cx).summary().or_default();
let summary = thread.read(cx).summary_or_default();
editor.update(cx, |editor, cx| {
editor.set_text(summary, window, cx);
@@ -296,8 +299,7 @@ impl ActiveView {
.read(cx)
.context()
.read(cx)
.summary()
.or_default();
.summary_or_default();
editor.update(cx, |editor, cx| {
editor.set_text(summary, window, cx);
@@ -312,7 +314,7 @@ impl ActiveView {
let editor = editor.clone();
move |assistant_context, event, window, cx| match event {
ContextEvent::SummaryGenerated => {
let summary = assistant_context.read(cx).summary().or_default();
let summary = assistant_context.read(cx).summary_or_default();
editor.update(cx, |editor, cx| {
editor.set_text(summary, window, cx);
@@ -359,19 +361,21 @@ pub struct AgentPanel {
previous_view: Option<ActiveView>,
history_store: Entity<HistoryStore>,
history: Entity<ThreadHistory>,
hovered_recent_history_item: Option<usize>,
assistant_dropdown_menu_handle: PopoverMenuHandle<ContextMenu>,
assistant_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
assistant_navigation_menu: Option<Entity<ContextMenu>>,
width: Option<Pixels>,
height: Option<Pixels>,
zoomed: bool,
pending_serialization: Option<Task<Result<()>>>,
hide_trial_upsell: bool,
_trial_markdown: Entity<Markdown>,
}
impl AgentPanel {
fn close_panel(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
cx.emit(PanelEvent::Close);
}
fn serialize(&mut self, cx: &mut Context<Self>) {
let width = self.width;
self.pending_serialization = Some(cx.background_spawn(async move {
@@ -701,13 +705,11 @@ impl AgentPanel {
previous_view: None,
history_store: history_store.clone(),
history: cx.new(|cx| ThreadHistory::new(weak_self, history_store, window, cx)),
hovered_recent_history_item: None,
assistant_dropdown_menu_handle: PopoverMenuHandle::default(),
assistant_navigation_menu_handle: PopoverMenuHandle::default(),
assistant_navigation_menu: None,
width: None,
height: None,
zoomed: false,
pending_serialization: None,
hide_trial_upsell: false,
_trial_markdown: trial_markdown,
@@ -1149,17 +1151,6 @@ impl AgentPanel {
}
}
pub fn toggle_zoom(&mut self, _: &ToggleZoom, window: &mut Window, cx: &mut Context<Self>) {
if self.zoomed {
cx.emit(PanelEvent::ZoomOut);
} else {
if !self.focus_handle(cx).contains_focused(window, cx) {
cx.focus_self(window);
}
cx.emit(PanelEvent::ZoomIn);
}
}
pub fn open_agent_diff(
&mut self,
_: &OpenAgentDiff,
@@ -1432,15 +1423,6 @@ impl Panel for AgentPanel {
fn enabled(&self, cx: &App) -> bool {
AssistantSettings::get_global(cx).enabled
}
fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
self.zoomed
}
fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context<Self>) {
self.zoomed = zoomed;
cx.notify();
}
}
impl AgentPanel {
@@ -1453,45 +1435,23 @@ impl AgentPanel {
..
} => {
let active_thread = self.thread.read(cx);
let state = if active_thread.is_empty() {
&ThreadSummary::Pending
} else {
active_thread.summary(cx)
};
let is_empty = active_thread.is_empty();
match state {
ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT.clone())
let summary = active_thread.summary(cx);
if is_empty {
Label::new(Thread::DEFAULT_SUMMARY.clone())
.truncate()
.into_any_element(),
ThreadSummary::Generating => Label::new(LOADING_SUMMARY_PLACEHOLDER)
.into_any_element()
} else if summary.is_none() {
Label::new(LOADING_SUMMARY_PLACEHOLDER)
.truncate()
.into_any_element(),
ThreadSummary::Ready(_) => div()
.into_any_element()
} else {
div()
.w_full()
.child(change_title_editor.clone())
.into_any_element(),
ThreadSummary::Error => h_flex()
.w_full()
.child(change_title_editor.clone())
.child(
ui::IconButton::new("retry-summary-generation", IconName::RotateCcw)
.on_click({
let active_thread = self.thread.clone();
move |_, _window, cx| {
active_thread.update(cx, |thread, cx| {
thread.regenerate_summary(cx);
});
}
})
.tooltip(move |_window, cx| {
cx.new(|_| {
Tooltip::new("Failed to generate title")
.meta("Click to try again")
})
.into()
}),
)
.into_any_element(),
.into_any_element()
}
}
ActiveView::PromptEditor {
@@ -1499,13 +1459,14 @@ impl AgentPanel {
context_editor,
..
} => {
let summary = context_editor.read(cx).context().read(cx).summary();
let context_editor = context_editor.read(cx);
let summary = context_editor.context().read(cx).summary();
match summary {
ContextSummary::Pending => Label::new(ContextSummary::DEFAULT)
None => Label::new(AssistantContext::DEFAULT_SUMMARY.clone())
.truncate()
.into_any_element(),
ContextSummary::Content(summary) => {
Some(summary) => {
if summary.done {
div()
.w_full()
@@ -1517,28 +1478,6 @@ impl AgentPanel {
.into_any_element()
}
}
ContextSummary::Error => h_flex()
.w_full()
.child(title_editor.clone())
.child(
ui::IconButton::new("retry-summary-generation", IconName::RotateCcw)
.on_click({
let context_editor = context_editor.clone();
move |_, _window, cx| {
context_editor.update(cx, |context_editor, cx| {
context_editor.regenerate_summary(cx);
});
}
})
.tooltip(move |_window, cx| {
cx.new(|_| {
Tooltip::new("Failed to generate title")
.meta("Click to try again")
})
.into()
}),
)
.into_any_element(),
}
}
ActiveView::History => Label::new("History").truncate().into_any_element(),
@@ -1648,12 +1587,6 @@ impl AgentPanel {
}),
);
let zoom_in_label = if self.is_zoomed(window, cx) {
"Zoom Out"
} else {
"Zoom In"
};
let agent_extra_menu = PopoverMenu::new("agent-options-menu")
.trigger_with_tooltip(
IconButton::new("agent-options-menu", IconName::Ellipsis)
@@ -1740,8 +1673,7 @@ impl AgentPanel {
menu = menu
.action("Rules…", Box::new(OpenRulesLibrary::default()))
.action("Settings", Box::new(OpenConfiguration))
.action(zoom_in_label, Box::new(ToggleZoom));
.action("Settings", Box::new(OpenConfiguration));
menu
}))
});
@@ -2289,7 +2221,7 @@ impl AgentPanel {
.border_b_1()
.border_color(cx.theme().colors().border_variant)
.child(
Label::new("Recent")
Label::new("Past Interactions")
.size(LabelSize::Small)
.color(Color::Muted),
)
@@ -2314,20 +2246,18 @@ impl AgentPanel {
v_flex()
.gap_1()
.children(
recent_history.into_iter().enumerate().map(|(index, entry)| {
recent_history.into_iter().map(|entry| {
// TODO: Add keyboard navigation.
let is_hovered = self.hovered_recent_history_item == Some(index);
HistoryEntryElement::new(entry.clone(), cx.entity().downgrade())
.hovered(is_hovered)
.on_hover(cx.listener(move |this, is_hovered, _window, cx| {
if *is_hovered {
this.hovered_recent_history_item = Some(index);
} else if this.hovered_recent_history_item == Some(index) {
this.hovered_recent_history_item = None;
}
cx.notify();
}))
.into_any_element()
match entry {
HistoryEntry::Thread(thread) => {
PastThread::new(thread, cx.entity().downgrade(), false, vec![], EntryTimeFormat::DateAndTime)
.into_any_element()
}
HistoryEntry::Context(context) => {
PastContext::new(context, cx.entity().downgrade(), false, vec![], EntryTimeFormat::DateAndTime)
.into_any_element()
}
}
}),
)
)
@@ -2855,7 +2785,6 @@ impl Render for AgentPanel {
.on_action(cx.listener(Self::increase_font_size))
.on_action(cx.listener(Self::decrease_font_size))
.on_action(cx.listener(Self::reset_font_size))
.on_action(cx.listener(Self::toggle_zoom))
.child(self.render_toolbar(window, cx))
.children(self.render_trial_upsell(window, cx))
.map(|parent| match &self.active_view {

View File

@@ -586,7 +586,10 @@ impl ThreadContextHandle {
}
pub fn title(&self, cx: &App) -> SharedString {
self.thread.read(cx).summary().or_default()
self.thread
.read(cx)
.summary()
.unwrap_or_else(|| "New thread".into())
}
fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
@@ -594,7 +597,9 @@ impl ThreadContextHandle {
let text = Thread::wait_for_detailed_summary_or_text(&self.thread, cx).await?;
let title = self
.thread
.read_with(cx, |thread, _cx| thread.summary().or_default())
.read_with(cx, |thread, _cx| {
thread.summary().unwrap_or_else(|| "New thread".into())
})
.ok()?;
let context = AgentContext::Thread(ThreadContext {
title,
@@ -637,7 +642,7 @@ impl TextThreadContextHandle {
}
pub fn title(&self, cx: &App) -> SharedString {
self.context.read(cx).summary().or_default()
self.context.read(cx).summary_or_default()
}
fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
@@ -825,25 +830,29 @@ pub fn load_context(
prompt_store: &Option<Entity<PromptStore>>,
cx: &mut App,
) -> Task<ContextLoadResult> {
let load_tasks: Vec<_> = contexts
.into_iter()
.map(|context| match context {
AgentContextHandle::File(context) => context.load(cx),
AgentContextHandle::Directory(context) => context.load(project.clone(), cx),
AgentContextHandle::Symbol(context) => context.load(cx),
AgentContextHandle::Selection(context) => context.load(cx),
AgentContextHandle::FetchedUrl(context) => context.load(),
AgentContextHandle::Thread(context) => context.load(cx),
AgentContextHandle::TextThread(context) => context.load(cx),
AgentContextHandle::Rules(context) => context.load(prompt_store, cx),
AgentContextHandle::Image(context) => context.load(cx),
})
.collect();
let mut load_tasks = Vec::new();
for context in contexts.iter().cloned() {
match context {
AgentContextHandle::File(context) => load_tasks.push(context.load(cx)),
AgentContextHandle::Directory(context) => {
load_tasks.push(context.load(project.clone(), cx))
}
AgentContextHandle::Symbol(context) => load_tasks.push(context.load(cx)),
AgentContextHandle::Selection(context) => load_tasks.push(context.load(cx)),
AgentContextHandle::FetchedUrl(context) => load_tasks.push(context.load()),
AgentContextHandle::Thread(context) => load_tasks.push(context.load(cx)),
AgentContextHandle::TextThread(context) => load_tasks.push(context.load(cx)),
AgentContextHandle::Rules(context) => load_tasks.push(context.load(prompt_store, cx)),
AgentContextHandle::Image(context) => load_tasks.push(context.load(cx)),
}
}
cx.background_spawn(async move {
let load_results = future::join_all(load_tasks).await;
let mut contexts = Vec::new();
let mut text = String::new();
let mut referenced_buffers = HashSet::default();
for context in load_results {
let Some((context, buffers)) = context else {
@@ -862,18 +871,10 @@ pub fn load_context(
let mut text_thread_context = Vec::new();
let mut rules_context = Vec::new();
let mut images = Vec::new();
let mut loaded_files = Vec::new();
let mut loaded_dirs = Vec::new();
for context in &contexts {
match context {
AgentContext::File(context) => {
file_context.push(context);
loaded_files.push(context.full_path.clone());
}
AgentContext::Directory(context) => {
directory_context.push(context);
loaded_dirs.push(context.full_path.clone());
}
AgentContext::File(context) => file_context.push(context),
AgentContext::Directory(context) => directory_context.push(context),
AgentContext::Symbol(context) => symbol_context.push(context),
AgentContext::Selection(context) => selection_context.push(context),
AgentContext::FetchedUrl(context) => fetched_url_context.push(context),
@@ -898,17 +899,18 @@ pub fn load_context(
return ContextLoadResult {
loaded_context: LoadedContext {
contexts,
text: String::new(),
text,
images,
},
referenced_buffers,
};
}
let mut text = "\n<context>\n\
text.push_str(
"\n<context>\n\
The following items were attached by the user. \
They are up-to-date and don't need to be re-read.\n\n"
.to_string();
They are up-to-date and don't need to be re-read.\n\n",
);
if !file_context.is_empty() {
text.push_str("<files>");

View File

@@ -381,16 +381,6 @@ impl ContextPicker {
cx.focus_self(window);
}
pub fn select_first(&mut self, window: &mut Window, cx: &mut Context<Self>) {
match &self.mode {
ContextPickerState::Default(entity) => entity.update(cx, |entity, cx| {
entity.select_first(&Default::default(), window, cx)
}),
// Other variants already select their first entry on open automatically
_ => {}
}
}
fn recent_menu_item(
&self,
context_picker: Entity<ContextPicker>,

View File

@@ -160,7 +160,7 @@ impl ContextStrip {
}
Some(SuggestedContext::Thread {
name: active_thread.summary().or_default(),
name: active_thread.summary_or_default(),
thread: weak_active_thread,
})
} else if let Some(active_context_editor) = panel.active_context_editor() {
@@ -174,7 +174,7 @@ impl ContextStrip {
}
Some(SuggestedContext::TextThread {
name: context.summary().or_default(),
name: context.summary_or_default(),
context: weak_context,
})
} else {
@@ -420,25 +420,12 @@ impl Render for ContextStrip {
})
.child(
PopoverMenu::new("context-picker")
.menu({
let context_picker = context_picker.clone();
move |window, cx| {
context_picker.update(cx, |this, cx| {
this.init(window, cx);
});
.menu(move |window, cx| {
context_picker.update(cx, |this, cx| {
this.init(window, cx);
});
Some(context_picker.clone())
}
})
.on_open({
let context_picker = context_picker.downgrade();
Rc::new(move |window, cx| {
context_picker
.update(cx, |context_picker, cx| {
context_picker.select_first(window, cx);
})
.ok();
})
Some(context_picker.clone())
})
.trigger_with_tooltip(
IconButton::new("add-context", IconName::Plus)

View File

@@ -75,7 +75,7 @@ impl Default for DebugAccountState {
Self {
enabled: false,
trial_expired: false,
plan: Plan::ZedFree,
plan: Plan::Free,
custom_prompt_usage: RequestUsage {
limit: UsageLimit::Unlimited,
amount: 0,

View File

@@ -1,4 +1,4 @@
use std::{collections::VecDeque, path::Path, sync::Arc};
use std::{collections::VecDeque, path::Path};
use anyhow::{Context as _, anyhow};
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
@@ -34,20 +34,6 @@ impl HistoryEntry {
HistoryEntry::Context(context) => context.mtime.to_utc(),
}
}
pub fn id(&self) -> HistoryEntryId {
match self {
HistoryEntry::Thread(thread) => HistoryEntryId::Thread(thread.id.clone()),
HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()),
}
}
}
/// Generic identifier for a history entry.
#[derive(Clone, PartialEq, Eq)]
pub enum HistoryEntryId {
Thread(ThreadId),
Context(Arc<Path>),
}
#[derive(Clone, Debug)]
@@ -71,8 +57,8 @@ impl Eq for RecentEntry {}
impl RecentEntry {
pub(crate) fn summary(&self, cx: &App) -> SharedString {
match self {
RecentEntry::Thread(_, thread) => thread.read(cx).summary().or_default(),
RecentEntry::Context(context) => context.read(cx).summary().or_default(),
RecentEntry::Thread(_, thread) => thread.read(cx).summary_or_default(),
RecentEntry::Context(context) => context.read(cx).summary_or_default(),
}
}
}

View File

@@ -338,27 +338,13 @@ impl InlineAssistant {
window: &mut Window,
cx: &mut App,
) {
let (snapshot, initial_selections, newest_selection) = editor.update(cx, |editor, cx| {
let selections = editor.selections.all::<Point>(cx);
let newest_selection = editor.selections.newest::<Point>(cx);
(editor.snapshot(window, cx), selections, newest_selection)
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
(
editor.snapshot(window, cx),
editor.selections.all::<Point>(cx),
)
});
// Check if there is already an inline assistant that contains the
// newest selection, if there is, focus it
if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) {
for assist_id in &editor_assists.assist_ids {
let assist = &self.assists[assist_id];
let range = assist.range.to_point(&snapshot.buffer_snapshot);
if range.start.row <= newest_selection.start.row
&& newest_selection.end.row <= range.end.row
{
self.focus_assist(*assist_id, window, cx);
return;
}
}
}
let mut selections = Vec::<Selection<Point>>::new();
let mut newest_selection = None;
for mut selection in initial_selections {

View File

@@ -200,13 +200,7 @@ impl MessageEditor {
});
let profile_selector = cx.new(|cx| {
ProfileSelector::new(
fs,
thread.clone(),
thread_store,
editor.focus_handle(cx),
cx,
)
ProfileSelector::new(thread.clone(), thread_store, editor.focus_handle(cx), cx)
});
Self {
@@ -1085,11 +1079,11 @@ impl MessageEditor {
let plan = user_store
.current_plan()
.map(|plan| match plan {
Plan::Free => zed_llm_client::Plan::ZedFree,
Plan::Free => zed_llm_client::Plan::Free,
Plan::ZedPro => zed_llm_client::Plan::ZedPro,
Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
})
.unwrap_or(zed_llm_client::Plan::ZedFree);
.unwrap_or(zed_llm_client::Plan::Free);
let usage = self.thread.read(cx).last_usage().or_else(|| {
maybe!({
let amount = user_store.model_request_usage_amount()?;

View File

@@ -1,13 +1,10 @@
use std::sync::Arc;
use assistant_settings::{
AgentProfile, AgentProfileId, AssistantDockPosition, AssistantSettings, GroupedAgentProfiles,
builtin_profiles,
};
use fs::Fs;
use gpui::{Action, Entity, FocusHandle, Subscription, WeakEntity, prelude::*};
use language_model::LanguageModelRegistry;
use settings::{Settings as _, SettingsStore, update_settings_file};
use settings::{Settings as _, SettingsStore};
use ui::{
ContextMenu, ContextMenuEntry, DocumentationSide, PopoverMenu, PopoverMenuHandle, Tooltip,
prelude::*,
@@ -18,7 +15,6 @@ use crate::{ManageProfiles, Thread, ThreadStore, ToggleProfileSelector};
pub struct ProfileSelector {
profiles: GroupedAgentProfiles,
fs: Arc<dyn Fs>,
thread: Entity<Thread>,
thread_store: WeakEntity<ThreadStore>,
menu_handle: PopoverMenuHandle<ContextMenu>,
@@ -28,7 +24,6 @@ pub struct ProfileSelector {
impl ProfileSelector {
pub fn new(
fs: Arc<dyn Fs>,
thread: Entity<Thread>,
thread_store: WeakEntity<ThreadStore>,
focus_handle: FocusHandle,
@@ -40,7 +35,6 @@ impl ProfileSelector {
Self {
profiles: GroupedAgentProfiles::from_settings(AssistantSettings::get_global(cx)),
fs,
thread,
thread_store,
menu_handle: PopoverMenuHandle::default(),
@@ -101,7 +95,7 @@ impl ProfileSelector {
profile_id: AgentProfileId,
profile: &AgentProfile,
settings: &AssistantSettings,
_cx: &App,
cx: &App,
) -> ContextMenuEntry {
let documentation = match profile.name.to_lowercase().as_str() {
builtin_profiles::WRITE => Some("Get help to write anything."),
@@ -110,8 +104,12 @@ impl ProfileSelector {
_ => None,
};
let entry = ContextMenuEntry::new(profile.name.clone())
.toggleable(IconPosition::End, profile_id == settings.default_profile);
let current_profile_id = self.thread.read(cx).configured_profile_id();
let entry = ContextMenuEntry::new(profile.name.clone()).toggleable(
IconPosition::End,
Some(profile_id.clone()) == current_profile_id,
);
let entry = if let Some(doc_text) = documentation {
entry.documentation_aside(documentation_side(settings.dock), move |_| {
@@ -122,15 +120,13 @@ impl ProfileSelector {
};
entry.handler({
let fs = self.fs.clone();
let thread_store = self.thread_store.clone();
let profile_id = profile_id.clone();
let thread = self.thread.clone();
move |_window, cx| {
update_settings_file::<AssistantSettings>(fs.clone(), cx, {
let profile_id = profile_id.clone();
move |settings, _cx| {
settings.set_profile(profile_id.clone());
}
thread.update(cx, |thread, cx| {
thread.set_configured_profile_id(Some(profile_id.clone()), cx);
});
thread_store
@@ -146,8 +142,12 @@ impl ProfileSelector {
impl Render for ProfileSelector {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let settings = AssistantSettings::get_global(cx);
let profile_id = &settings.default_profile;
let profile = settings.profiles.get(profile_id);
let profile_id = self
.thread
.read(cx)
.configured_profile_id()
.unwrap_or(settings.default_profile.clone());
let profile = settings.profiles.get(&profile_id).cloned();
let selected_profile = profile
.map(|profile| profile.name.clone())

View File

@@ -5,7 +5,7 @@ use std::sync::Arc;
use std::time::Instant;
use anyhow::{Result, anyhow};
use assistant_settings::{AssistantSettings, CompletionMode};
use assistant_settings::{AgentProfileId, AssistantSettings, CompletionMode};
use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet};
use chrono::{DateTime, Utc};
use collections::HashMap;
@@ -36,7 +36,7 @@ use serde::{Deserialize, Serialize};
use settings::Settings;
use thiserror::Error;
use ui::Window;
use util::{ResultExt as _, post_inc};
use util::{ResultExt as _, TryFutureExt as _, post_inc};
use uuid::Uuid;
use zed_llm_client::CompletionRequestStatus;
@@ -324,7 +324,7 @@ pub enum QueueState {
pub struct Thread {
id: ThreadId,
updated_at: DateTime<Utc>,
summary: ThreadSummary,
summary: Option<SharedString>,
pending_summary: Task<Option<()>>,
detailed_summary_task: Task<Option<()>>,
detailed_summary_tx: postage::watch::Sender<DetailedSummaryState>,
@@ -359,33 +359,7 @@ pub struct Thread {
>,
remaining_turns: u32,
configured_model: Option<ConfiguredModel>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ThreadSummary {
Pending,
Generating,
Ready(SharedString),
Error,
}
impl ThreadSummary {
pub const DEFAULT: SharedString = SharedString::new_static("New Thread");
pub fn or_default(&self) -> SharedString {
self.unwrap_or(Self::DEFAULT)
}
pub fn unwrap_or(&self, message: impl Into<SharedString>) -> SharedString {
self.ready().unwrap_or_else(|| message.into())
}
pub fn ready(&self) -> Option<SharedString> {
match self {
ThreadSummary::Ready(summary) => Some(summary.clone()),
ThreadSummary::Pending | ThreadSummary::Generating | ThreadSummary::Error => None,
}
}
configured_profile_id: Option<AgentProfileId>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -406,11 +380,13 @@ impl Thread {
) -> Self {
let (detailed_summary_tx, detailed_summary_rx) = postage::watch::channel();
let configured_model = LanguageModelRegistry::read_global(cx).default_model();
let assistant_settings = AssistantSettings::get_global(cx);
let configured_profile_id = assistant_settings.default_profile.clone();
Self {
id: ThreadId::new(),
updated_at: Utc::now(),
summary: ThreadSummary::Pending,
summary: None,
pending_summary: Task::ready(None),
detailed_summary_task: Task::ready(None),
detailed_summary_tx,
@@ -448,6 +424,7 @@ impl Thread {
request_callback: None,
remaining_turns: u32::MAX,
configured_model,
configured_profile_id: Some(configured_profile_id),
}
}
@@ -495,10 +472,12 @@ impl Thread {
.completion_mode
.unwrap_or_else(|| AssistantSettings::get_global(cx).preferred_completion_mode);
let configured_profile_id = serialized.profile.clone();
Self {
id,
updated_at: serialized.updated_at,
summary: ThreadSummary::Ready(serialized.summary),
summary: Some(serialized.summary),
pending_summary: Task::ready(None),
detailed_summary_task: Task::ready(None),
detailed_summary_tx,
@@ -568,6 +547,7 @@ impl Thread {
request_callback: None,
remaining_turns: u32::MAX,
configured_model,
configured_profile_id,
}
}
@@ -599,6 +579,10 @@ impl Thread {
self.last_prompt_id = PromptId::new();
}
pub fn summary(&self) -> Option<SharedString> {
self.summary.clone()
}
pub fn project_context(&self) -> SharedProjectContext {
self.project_context.clone()
}
@@ -619,25 +603,39 @@ impl Thread {
cx.notify();
}
pub fn summary(&self) -> &ThreadSummary {
&self.summary
pub fn configured_profile_id(&self) -> Option<AgentProfileId> {
self.configured_profile_id.clone()
}
pub fn set_configured_profile_id(
&mut self,
id: Option<AgentProfileId>,
cx: &mut Context<Self>,
) {
self.configured_profile_id = id;
cx.notify();
}
pub const DEFAULT_SUMMARY: SharedString = SharedString::new_static("New Thread");
pub fn summary_or_default(&self) -> SharedString {
self.summary.clone().unwrap_or(Self::DEFAULT_SUMMARY)
}
pub fn set_summary(&mut self, new_summary: impl Into<SharedString>, cx: &mut Context<Self>) {
let current_summary = match &self.summary {
ThreadSummary::Pending | ThreadSummary::Generating => return,
ThreadSummary::Ready(summary) => summary,
ThreadSummary::Error => &ThreadSummary::DEFAULT,
let Some(current_summary) = &self.summary else {
// Don't allow setting summary until generated
return;
};
let mut new_summary = new_summary.into();
if new_summary.is_empty() {
new_summary = ThreadSummary::DEFAULT;
new_summary = Self::DEFAULT_SUMMARY;
}
if current_summary != &new_summary {
self.summary = ThreadSummary::Ready(new_summary);
self.summary = Some(new_summary);
cx.emit(ThreadEvent::SummaryChanged);
}
}
@@ -1051,7 +1049,7 @@ impl Thread {
let initial_project_snapshot = initial_project_snapshot.await;
this.read_with(cx, |this, cx| SerializedThread {
version: SerializedThread::VERSION.to_string(),
summary: this.summary().or_default(),
summary: this.summary_or_default(),
updated_at: this.updated_at(),
messages: this
.messages()
@@ -1122,6 +1120,7 @@ impl Thread {
provider: model.provider.id().0.to_string(),
model: model.model.id().0.to_string(),
}),
profile: this.configured_profile_id.clone(),
completion_mode: Some(this.completion_mode),
})
})
@@ -1647,7 +1646,7 @@ impl Thread {
// If there is a response without tool use, summarize the message. Otherwise,
// allow two tool uses before summarizing.
if matches!(thread.summary, ThreadSummary::Pending)
if thread.summary.is_none()
&& thread.messages.len() >= 2
&& (!thread.has_pending_tool_uses() || thread.messages.len() >= 6)
{
@@ -1761,7 +1760,6 @@ impl Thread {
pub fn summarize(&mut self, cx: &mut Context<Self>) {
let Some(model) = LanguageModelRegistry::read_global(cx).thread_summary_model() else {
println!("No thread summary model");
return;
};
@@ -1776,17 +1774,13 @@ impl Thread {
let request = self.to_summarize_request(&model.model, added_user_message.into(), cx);
self.summary = ThreadSummary::Generating;
self.pending_summary = cx.spawn(async move |this, cx| {
let result = async {
async move {
let mut messages = model.model.stream_completion(request, &cx).await?;
let mut new_summary = String::new();
while let Some(event) = messages.next().await {
let Ok(event) = event else {
continue;
};
let event = event?;
let text = match event {
LanguageModelCompletionEvent::Text(text) => text,
LanguageModelCompletionEvent::StatusUpdate(
@@ -1812,29 +1806,18 @@ impl Thread {
}
}
anyhow::Ok(new_summary)
this.update(cx, |this, cx| {
if !new_summary.is_empty() {
this.summary = Some(new_summary.into());
}
cx.emit(ThreadEvent::SummaryGenerated);
})?;
anyhow::Ok(())
}
.await;
this.update(cx, |this, cx| {
match result {
Ok(new_summary) => {
if new_summary.is_empty() {
this.summary = ThreadSummary::Error;
} else {
this.summary = ThreadSummary::Ready(new_summary.into());
}
}
Err(err) => {
this.summary = ThreadSummary::Error;
log::error!("Failed to generate thread summary: {}", err);
}
}
cx.emit(ThreadEvent::SummaryGenerated);
})
.log_err()?;
Some(())
.log_err()
.await
});
}
@@ -2241,7 +2224,7 @@ impl Thread {
.read(cx)
.enabled_tools(cx)
.iter()
.map(|tool| tool.name())
.map(|tool| tool.name().to_string())
.collect();
self.message_feedback.insert(message_id, feedback);
@@ -2444,8 +2427,9 @@ impl Thread {
pub fn to_markdown(&self, cx: &App) -> Result<String> {
let mut markdown = Vec::new();
let summary = self.summary().or_default();
writeln!(markdown, "# {summary}\n")?;
if let Some(summary) = self.summary() {
writeln!(markdown, "# {summary}\n")?;
};
for message in self.messages() {
writeln!(
@@ -2762,7 +2746,7 @@ mod tests {
use assistant_tool::ToolRegistry;
use editor::EditorSettings;
use gpui::TestAppContext;
use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider};
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project};
use prompt_store::PromptBuilder;
use serde_json::json;
@@ -3263,196 +3247,6 @@ fn main() {{
assert_eq!(request.temperature, None);
}
#[gpui::test]
async fn test_thread_summary(cx: &mut TestAppContext) {
init_test_settings(cx);
let project = create_test_project(cx, json!({})).await;
let (_, _thread_store, thread, _context_store, model) =
setup_test_environment(cx, project.clone()).await;
// Initial state should be pending
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Pending));
assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT);
});
// Manually setting the summary should not be allowed in this state
thread.update(cx, |thread, cx| {
thread.set_summary("This should not work", cx);
});
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Pending));
});
// Send a message
thread.update(cx, |thread, cx| {
thread.insert_user_message("Hi!", ContextLoadResult::default(), None, vec![], cx);
thread.send_to_model(model.clone(), None, cx);
});
let fake_model = model.as_fake();
simulate_successful_response(&fake_model, cx);
// Should start generating summary when there are >= 2 messages
thread.read_with(cx, |thread, _| {
assert_eq!(*thread.summary(), ThreadSummary::Generating);
});
// Should not be able to set the summary while generating
thread.update(cx, |thread, cx| {
thread.set_summary("This should not work either", cx);
});
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Generating));
assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT);
});
cx.run_until_parked();
fake_model.stream_last_completion_response("Brief".into());
fake_model.stream_last_completion_response(" Introduction".into());
fake_model.end_last_completion_stream();
cx.run_until_parked();
// Summary should be set
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Ready(_)));
assert_eq!(thread.summary().or_default(), "Brief Introduction");
});
// Now we should be able to set a summary
thread.update(cx, |thread, cx| {
thread.set_summary("Brief Intro", cx);
});
thread.read_with(cx, |thread, _| {
assert_eq!(thread.summary().or_default(), "Brief Intro");
});
// Test setting an empty summary (should default to DEFAULT)
thread.update(cx, |thread, cx| {
thread.set_summary("", cx);
});
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Ready(_)));
assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT);
});
}
#[gpui::test]
async fn test_thread_summary_error_set_manually(cx: &mut TestAppContext) {
init_test_settings(cx);
let project = create_test_project(cx, json!({})).await;
let (_, _thread_store, thread, _context_store, model) =
setup_test_environment(cx, project.clone()).await;
test_summarize_error(&model, &thread, cx);
// Now we should be able to set a summary
thread.update(cx, |thread, cx| {
thread.set_summary("Brief Intro", cx);
});
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Ready(_)));
assert_eq!(thread.summary().or_default(), "Brief Intro");
});
}
#[gpui::test]
async fn test_thread_summary_error_retry(cx: &mut TestAppContext) {
init_test_settings(cx);
let project = create_test_project(cx, json!({})).await;
let (_, _thread_store, thread, _context_store, model) =
setup_test_environment(cx, project.clone()).await;
test_summarize_error(&model, &thread, cx);
// Sending another message should not trigger another summarize request
thread.update(cx, |thread, cx| {
thread.insert_user_message(
"How are you?",
ContextLoadResult::default(),
None,
vec![],
cx,
);
thread.send_to_model(model.clone(), None, cx);
});
let fake_model = model.as_fake();
simulate_successful_response(&fake_model, cx);
thread.read_with(cx, |thread, _| {
// State is still Error, not Generating
assert!(matches!(thread.summary(), ThreadSummary::Error));
});
// But the summarize request can be invoked manually
thread.update(cx, |thread, cx| {
thread.summarize(cx);
});
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Generating));
});
cx.run_until_parked();
fake_model.stream_last_completion_response("A successful summary".into());
fake_model.end_last_completion_stream();
cx.run_until_parked();
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Ready(_)));
assert_eq!(thread.summary().or_default(), "A successful summary");
});
}
fn test_summarize_error(
model: &Arc<dyn LanguageModel>,
thread: &Entity<Thread>,
cx: &mut TestAppContext,
) {
thread.update(cx, |thread, cx| {
thread.insert_user_message("Hi!", ContextLoadResult::default(), None, vec![], cx);
thread.send_to_model(model.clone(), None, cx);
});
let fake_model = model.as_fake();
simulate_successful_response(&fake_model, cx);
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Generating));
assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT);
});
// Simulate summary request ending
cx.run_until_parked();
fake_model.end_last_completion_stream();
cx.run_until_parked();
// State is set to Error and default message
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Error));
assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT);
});
}
fn simulate_successful_response(fake_model: &FakeLanguageModel, cx: &mut TestAppContext) {
cx.run_until_parked();
fake_model.stream_last_completion_response("Assistant response".into());
fake_model.end_last_completion_stream();
cx.run_until_parked();
}
fn init_test_settings(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
@@ -3509,29 +3303,9 @@ fn main() {{
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None));
let provider = Arc::new(FakeLanguageModelProvider);
let model = provider.test_model();
let model = FakeLanguageModel::default();
let model: Arc<dyn LanguageModel> = Arc::new(model);
cx.update(|_, cx| {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model(
Some(ConfiguredModel {
provider: provider.clone(),
model: model.clone(),
}),
cx,
);
registry.set_thread_summary_model(
Some(ConfiguredModel {
provider,
model: model.clone(),
}),
cx,
);
})
});
(workspace, thread_store, thread, context_store, model)
}

View File

@@ -2,11 +2,12 @@ use std::fmt::Display;
use std::ops::Range;
use std::sync::Arc;
use assistant_context_editor::SavedContextMetadata;
use chrono::{Datelike as _, Local, NaiveDate, TimeDelta};
use editor::{Editor, EditorEvent};
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
App, ClickEvent, Empty, Entity, FocusHandle, Focusable, ScrollStrategy, Stateful, Task,
App, Empty, Entity, FocusHandle, Focusable, ScrollStrategy, Stateful, Task,
UniformListScrollHandle, WeakEntity, Window, uniform_list,
};
use time::{OffsetDateTime, UtcOffset};
@@ -17,6 +18,7 @@ use ui::{
use util::ResultExt;
use crate::history_store::{HistoryEntry, HistoryStore};
use crate::thread_store::SerializedThreadMetadata;
use crate::{AgentPanel, RemoveSelectedThread};
pub struct ThreadHistory {
@@ -24,12 +26,11 @@ pub struct ThreadHistory {
history_store: Entity<HistoryStore>,
scroll_handle: UniformListScrollHandle,
selected_index: usize,
hovered_index: Option<usize>,
search_editor: Entity<Editor>,
all_entries: Arc<Vec<HistoryEntry>>,
// When the search is empty, we display date separators between history entries
// This vector contains an enum of either a separator or an actual entry
separated_items: Vec<ListItemType>,
separated_items: Vec<HistoryListItem>,
// Maps entry indexes to list item indexes
separated_item_indexes: Vec<u32>,
_separated_items_task: Option<Task<()>>,
@@ -51,7 +52,7 @@ enum SearchState {
},
}
enum ListItemType {
enum HistoryListItem {
BucketSeparator(TimeBucket),
Entry {
index: usize,
@@ -59,11 +60,11 @@ enum ListItemType {
},
}
impl ListItemType {
impl HistoryListItem {
fn entry_index(&self) -> Option<usize> {
match self {
ListItemType::BucketSeparator(_) => None,
ListItemType::Entry { index, .. } => Some(*index),
HistoryListItem::BucketSeparator(_) => None,
HistoryListItem::Entry { index, .. } => Some(*index),
}
}
}
@@ -101,7 +102,6 @@ impl ThreadHistory {
history_store,
scroll_handle,
selected_index: 0,
hovered_index: None,
search_state: SearchState::Empty,
all_entries: Default::default(),
separated_items: Default::default(),
@@ -117,21 +117,40 @@ impl ThreadHistory {
}
fn update_all_entries(&mut self, cx: &mut Context<Self>) {
let new_entries: Arc<Vec<HistoryEntry>> = self
self.all_entries = self
.history_store
.update(cx, |store, cx| store.entries(cx))
.into();
self._separated_items_task.take();
self.set_selected_entry_index(0, cx);
self.update_separated_items(cx);
let mut items = Vec::with_capacity(new_entries.len() + 1);
let mut indexes = Vec::with_capacity(new_entries.len() + 1);
match &self.search_state {
SearchState::Empty => {}
SearchState::Searching { query, .. } | SearchState::Searched { query, .. } => {
self.search(query.clone(), cx);
}
}
cx.notify();
}
fn update_separated_items(&mut self, cx: &mut Context<Self>) {
self._separated_items_task.take();
let all_entries = self.all_entries.clone();
let mut items = std::mem::take(&mut self.separated_items);
let mut indexes = std::mem::take(&mut self.separated_item_indexes);
items.clear();
indexes.clear();
// We know there's going to be at least one bucket separator
items.reserve(all_entries.len() + 1);
indexes.reserve(all_entries.len() + 1);
let bg_task = cx.background_spawn(async move {
let mut bucket = None;
let today = Local::now().naive_local().date();
for (index, entry) in new_entries.iter().enumerate() {
for (index, entry) in all_entries.iter().enumerate() {
let entry_date = entry
.updated_at()
.with_timezone(&Local)
@@ -141,50 +160,23 @@ impl ThreadHistory {
if Some(entry_bucket) != bucket {
bucket = Some(entry_bucket);
items.push(ListItemType::BucketSeparator(entry_bucket));
items.push(HistoryListItem::BucketSeparator(entry_bucket));
}
indexes.push(items.len() as u32);
items.push(ListItemType::Entry {
items.push(HistoryListItem::Entry {
index,
format: entry_bucket.into(),
});
}
(new_entries, items, indexes)
(items, indexes)
});
let task = cx.spawn(async move |this, cx| {
let (new_entries, items, indexes) = bg_task.await;
let (items, indexes) = bg_task.await;
this.update(cx, |this, cx| {
let previously_selected_entry =
this.all_entries.get(this.selected_index).map(|e| e.id());
this.all_entries = new_entries;
this.separated_items = items;
this.separated_item_indexes = indexes;
match &this.search_state {
SearchState::Empty => {
if this.selected_index >= this.all_entries.len() {
this.set_selected_entry_index(
this.all_entries.len().saturating_sub(1),
cx,
);
} else if let Some(prev_id) = previously_selected_entry {
if let Some(new_ix) = this
.all_entries
.iter()
.position(|probe| probe.id() == prev_id)
{
this.set_selected_entry_index(new_ix, cx);
}
}
}
SearchState::Searching { query, .. } | SearchState::Searched { query, .. } => {
this.search(query.clone(), cx);
}
}
cx.notify();
})
.log_err();
@@ -475,7 +467,7 @@ impl ThreadHistory {
.map(|(ix, m)| {
self.render_list_item(
Some(range_start + ix),
&ListItemType::Entry {
&HistoryListItem::Entry {
index: m.candidate_id,
format: EntryTimeFormat::DateAndTime,
},
@@ -493,36 +485,25 @@ impl ThreadHistory {
fn render_list_item(
&self,
list_entry_ix: Option<usize>,
item: &ListItemType,
item: &HistoryListItem,
highlight_positions: Vec<usize>,
cx: &Context<Self>,
cx: &App,
) -> AnyElement {
match item {
ListItemType::Entry { index, format } => match self.all_entries.get(*index) {
HistoryListItem::Entry { index, format } => match self.all_entries.get(*index) {
Some(entry) => h_flex()
.w_full()
.pb_1()
.child(
HistoryEntryElement::new(entry.clone(), self.agent_panel.clone())
.highlight_positions(highlight_positions)
.timestamp_format(*format)
.selected(list_entry_ix == Some(self.selected_index))
.hovered(list_entry_ix == self.hovered_index)
.on_hover(cx.listener(move |this, is_hovered, _window, cx| {
if *is_hovered {
this.hovered_index = list_entry_ix;
} else if this.hovered_index == list_entry_ix {
this.hovered_index = None;
}
cx.notify();
}))
.into_any_element(),
)
.child(self.render_history_entry(
entry,
list_entry_ix == Some(self.selected_index),
highlight_positions,
*format,
))
.into_any(),
None => Empty.into_any_element(),
},
ListItemType::BucketSeparator(bucket) => div()
HistoryListItem::BucketSeparator(bucket) => div()
.px(DynamicSpacing::Base06.rems(cx))
.pt_2()
.pb_1()
@@ -534,6 +515,33 @@ impl ThreadHistory {
.into_any_element(),
}
}
fn render_history_entry(
&self,
entry: &HistoryEntry,
is_active: bool,
highlight_positions: Vec<usize>,
format: EntryTimeFormat,
) -> AnyElement {
match entry {
HistoryEntry::Thread(thread) => PastThread::new(
thread.clone(),
self.agent_panel.clone(),
is_active,
highlight_positions,
format,
)
.into_any_element(),
HistoryEntry::Context(context) => PastContext::new(
context.clone(),
self.agent_panel.clone(),
is_active,
highlight_positions,
format,
)
.into_any_element(),
}
}
}
impl Focusable for ThreadHistory {
@@ -615,97 +623,155 @@ impl Render for ThreadHistory {
}
#[derive(IntoElement)]
pub struct HistoryEntryElement {
entry: HistoryEntry,
pub struct PastThread {
thread: SerializedThreadMetadata,
agent_panel: WeakEntity<AgentPanel>,
selected: bool,
hovered: bool,
highlight_positions: Vec<usize>,
timestamp_format: EntryTimeFormat,
on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
}
impl HistoryEntryElement {
pub fn new(entry: HistoryEntry, agent_panel: WeakEntity<AgentPanel>) -> Self {
impl PastThread {
pub fn new(
thread: SerializedThreadMetadata,
agent_panel: WeakEntity<AgentPanel>,
selected: bool,
highlight_positions: Vec<usize>,
timestamp_format: EntryTimeFormat,
) -> Self {
Self {
entry,
thread,
agent_panel,
selected: false,
hovered: false,
highlight_positions: vec![],
timestamp_format: EntryTimeFormat::DateAndTime,
on_hover: Box::new(|_, _, _| {}),
selected,
highlight_positions,
timestamp_format,
}
}
pub fn selected(mut self, selected: bool) -> Self {
self.selected = selected;
self
}
pub fn hovered(mut self, hovered: bool) -> Self {
self.hovered = hovered;
self
}
pub fn highlight_positions(mut self, positions: Vec<usize>) -> Self {
self.highlight_positions = positions;
self
}
pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self {
self.on_hover = Box::new(on_hover);
self
}
pub fn timestamp_format(mut self, format: EntryTimeFormat) -> Self {
self.timestamp_format = format;
self
}
}
impl RenderOnce for HistoryEntryElement {
impl RenderOnce for PastThread {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let (id, summary, timestamp) = match &self.entry {
HistoryEntry::Thread(thread) => (
thread.id.to_string(),
thread.summary.clone(),
thread.updated_at.timestamp(),
),
HistoryEntry::Context(context) => (
context.path.to_string_lossy().to_string(),
context.title.clone().into(),
context.mtime.timestamp(),
),
};
let summary = self.thread.summary;
let thread_timestamp =
self.timestamp_format
.format_timestamp(&self.agent_panel, timestamp, cx);
let thread_timestamp = self.timestamp_format.format_timestamp(
&self.agent_panel,
self.thread.updated_at.timestamp(),
cx,
);
ListItem::new(SharedString::from(id))
ListItem::new(SharedString::from(self.thread.id.to_string()))
.rounded()
.toggle_state(self.selected)
.spacing(ListItemSpacing::Sparse)
.start_slot(
div().max_w_4_5().child(
HighlightedLabel::new(summary, self.highlight_positions)
.size(LabelSize::Small)
.truncate(),
),
)
.end_slot(
h_flex()
.w_full()
.gap_2()
.justify_between()
.child(
HighlightedLabel::new(summary, self.highlight_positions)
.size(LabelSize::Small)
.truncate(),
)
.gap_1p5()
.child(
Label::new(thread_timestamp)
.color(Color::Muted)
.size(LabelSize::XSmall),
)
.child(
IconButton::new("delete", IconName::TrashAlt)
.shape(IconButtonShape::Square)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.tooltip(move |window, cx| {
Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx)
})
.on_click({
let agent_panel = self.agent_panel.clone();
let id = self.thread.id.clone();
move |_event, _window, cx| {
agent_panel
.update(cx, |this, cx| {
this.delete_thread(&id, cx).detach_and_log_err(cx);
})
.ok();
}
}),
),
)
.on_hover(self.on_hover)
.end_slot::<IconButton>(if self.hovered || self.selected {
Some(
.on_click({
let agent_panel = self.agent_panel.clone();
let id = self.thread.id.clone();
move |_event, window, cx| {
agent_panel
.update(cx, |this, cx| {
this.open_thread_by_id(&id, window, cx)
.detach_and_log_err(cx);
})
.ok();
}
})
}
}
#[derive(IntoElement)]
pub struct PastContext {
context: SavedContextMetadata,
agent_panel: WeakEntity<AgentPanel>,
selected: bool,
highlight_positions: Vec<usize>,
timestamp_format: EntryTimeFormat,
}
impl PastContext {
pub fn new(
context: SavedContextMetadata,
agent_panel: WeakEntity<AgentPanel>,
selected: bool,
highlight_positions: Vec<usize>,
timestamp_format: EntryTimeFormat,
) -> Self {
Self {
context,
agent_panel,
selected,
highlight_positions,
timestamp_format,
}
}
}
impl RenderOnce for PastContext {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let summary = self.context.title;
let context_timestamp = self.timestamp_format.format_timestamp(
&self.agent_panel,
self.context.mtime.timestamp(),
cx,
);
ListItem::new(SharedString::from(
self.context.path.to_string_lossy().to_string(),
))
.rounded()
.toggle_state(self.selected)
.spacing(ListItemSpacing::Sparse)
.start_slot(
div().max_w_4_5().child(
HighlightedLabel::new(summary, self.highlight_positions)
.size(LabelSize::Small)
.truncate(),
),
)
.end_slot(
h_flex()
.gap_1p5()
.child(
Label::new(context_timestamp)
.color(Color::Muted)
.size(LabelSize::XSmall),
)
.child(
IconButton::new("delete", IconName::TrashAlt)
.shape(IconButtonShape::Square)
.icon_size(IconSize::XSmall)
@@ -715,70 +781,30 @@ impl RenderOnce for HistoryEntryElement {
})
.on_click({
let agent_panel = self.agent_panel.clone();
let f: Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static> =
match &self.entry {
HistoryEntry::Thread(thread) => {
let id = thread.id.clone();
Box::new(move |_event, _window, cx| {
agent_panel
.update(cx, |this, cx| {
this.delete_thread(&id, cx)
.detach_and_log_err(cx);
})
.ok();
})
}
HistoryEntry::Context(context) => {
let path = context.path.clone();
Box::new(move |_event, _window, cx| {
agent_panel
.update(cx, |this, cx| {
this.delete_context(path.clone(), cx)
.detach_and_log_err(cx);
})
.ok();
})
}
};
f
let path = self.context.path.clone();
move |_event, _window, cx| {
agent_panel
.update(cx, |this, cx| {
this.delete_context(path.clone(), cx)
.detach_and_log_err(cx);
})
.ok();
}
}),
)
} else {
None
})
.on_click({
let agent_panel = self.agent_panel.clone();
let f: Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static> = match &self.entry
{
HistoryEntry::Thread(thread) => {
let id = thread.id.clone();
Box::new(move |_event, window, cx| {
agent_panel
.update(cx, |this, cx| {
this.open_thread_by_id(&id, window, cx)
.detach_and_log_err(cx);
})
.ok();
})
}
HistoryEntry::Context(context) => {
let path = context.path.clone();
Box::new(move |_event, window, cx| {
agent_panel
.update(cx, |this, cx| {
this.open_saved_prompt_editor(path.clone(), window, cx)
.detach_and_log_err(cx);
})
.ok();
})
}
};
f
})
),
)
.on_click({
let agent_panel = self.agent_panel.clone();
let path = self.context.path.clone();
move |_event, window, cx| {
agent_panel
.update(cx, |this, cx| {
this.open_saved_prompt_editor(path.clone(), window, cx)
.detach_and_log_err(cx);
})
.ok();
}
})
}
}

View File

@@ -486,8 +486,8 @@ impl ThreadStore {
ToolSource::Native,
&profile
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool))
.iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(),
cx,
);
@@ -511,32 +511,32 @@ impl ThreadStore {
});
}
// Enable all the tools from all context servers, but disable the ones that are explicitly disabled
for (context_server_id, preset) in profile.context_servers {
for (context_server_id, preset) in &profile.context_servers {
self.tools.update(cx, |tools, cx| {
tools.disable(
ToolSource::ContextServer {
id: context_server_id.into(),
id: context_server_id.clone().into(),
},
&preset
.tools
.into_iter()
.filter_map(|(tool, enabled)| (!enabled).then(|| tool))
.iter()
.filter_map(|(tool, enabled)| (!enabled).then(|| tool.clone()))
.collect::<Vec<_>>(),
cx,
)
})
}
} else {
for (context_server_id, preset) in profile.context_servers {
for (context_server_id, preset) in &profile.context_servers {
self.tools.update(cx, |tools, cx| {
tools.enable(
ToolSource::ContextServer {
id: context_server_id.into(),
id: context_server_id.clone().into(),
},
&preset
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool))
.iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(),
cx,
)
@@ -657,6 +657,8 @@ pub struct SerializedThread {
pub model: Option<SerializedLanguageModel>,
#[serde(default)]
pub completion_mode: Option<CompletionMode>,
#[serde(default)]
pub profile: Option<AgentProfileId>,
}
#[derive(Serialize, Deserialize, Debug)]
@@ -802,6 +804,7 @@ impl LegacySerializedThread {
exceeded_window_error: None,
model: None,
completion_mode: None,
profile: None,
}
}
}

View File

@@ -39,7 +39,7 @@ impl RenderOnce for UsageCallout {
let (title, message, button_text, url) = if is_limit_reached {
match self.plan {
Plan::ZedFree => (
Plan::Free => (
"Out of free prompts",
"Upgrade to continue, wait for the next reset, or switch to API key."
.to_string(),
@@ -61,7 +61,7 @@ impl RenderOnce for UsageCallout {
}
} else {
match self.plan {
Plan::ZedFree => (
Plan::Free => (
"Reaching free plan limit soon",
format!(
"{remaining} remaining - Upgrade to increase limit, or switch providers",
@@ -120,7 +120,7 @@ impl Component for UsageCallout {
single_example(
"Approaching limit (90%)",
UsageCallout::new(
Plan::ZedFree,
Plan::Free,
RequestUsage {
limit: UsageLimit::Limited(50),
amount: 45, // 90% of limit
@@ -131,7 +131,7 @@ impl Component for UsageCallout {
single_example(
"Limit reached (100%)",
UsageCallout::new(
Plan::ZedFree,
Plan::Free,
RequestUsage {
limit: UsageLimit::Limited(50),
amount: 50, // 100% of limit

View File

@@ -167,20 +167,16 @@ fn get_shell_safe_zed_path() -> anyhow::Result<String> {
.to_string_lossy()
.to_string();
// NOTE: this was previously enabled, however, it caused errors when it shouldn't have
// (see https://github.com/zed-industries/zed/issues/29819)
// The zed path failing to execute within the askpass script results in very vague ssh
// authentication failed errors, so this was done to try and surface a better error
//
// use std::os::unix::fs::MetadataExt;
// let metadata = std::fs::metadata(&zed_path)
// .context("Failed to check metadata of Zed executable path for use in askpass")?;
// let is_executable = metadata.is_file() && metadata.mode() & 0o111 != 0;
// anyhow::ensure!(
// is_executable,
// "Failed to verify Zed executable path for use in askpass"
// );
// sanity check on unix systems that the path exists and is executable
// todo(windows): implement this check for windows (or just use `is-executable` crate)
use std::os::unix::fs::MetadataExt;
let metadata = std::fs::metadata(&zed_path)
.context("Failed to check metadata of Zed executable path for use in askpass")?;
let is_executable = metadata.is_file() && metadata.mode() & 0o111 != 0;
anyhow::ensure!(
is_executable,
"Failed to verify Zed executable path for use in askpass"
);
// As of writing, this can only be fail if the path contains a null byte, which shouldn't be possible
// but shlex has annotated the error as #[non_exhaustive] so we can't make it a compile error if other
// errors are introduced in the future :(

View File

@@ -8,8 +8,7 @@ mod slash_command_picker;
use std::sync::Arc;
use client::Client;
use gpui::{App, Context};
use workspace::Workspace;
use gpui::App;
pub use crate::context::*;
pub use crate::context_editor::*;
@@ -17,18 +16,6 @@ pub use crate::context_history::*;
pub use crate::context_store::*;
pub use crate::slash_command::*;
pub fn init(client: Arc<Client>, cx: &mut App) {
pub fn init(client: Arc<Client>, _cx: &mut App) {
context_store::init(&client.into());
workspace::FollowableViewRegistry::register::<ContextEditor>(cx);
cx.observe_new(
|workspace: &mut Workspace, _window, _cx: &mut Context<Workspace>| {
workspace
.register_action(ContextEditor::quote_selection)
.register_action(ContextEditor::insert_selection)
.register_action(ContextEditor::copy_code)
.register_action(ContextEditor::handle_insert_dragged_files);
},
)
.detach();
}

View File

@@ -1,7 +1,7 @@
#[cfg(test)]
mod context_tests;
use anyhow::{Context as _, Result, anyhow, bail};
use anyhow::{Context as _, Result, anyhow};
use assistant_settings::AssistantSettings;
use assistant_slash_command::{
SlashCommandContent, SlashCommandEvent, SlashCommandLine, SlashCommandOutputSection,
@@ -133,7 +133,7 @@ pub enum ContextOperation {
version: clock::Global,
},
UpdateSummary {
summary: ContextSummaryContent,
summary: ContextSummary,
version: clock::Global,
},
SlashCommandStarted {
@@ -203,7 +203,7 @@ impl ContextOperation {
version: language::proto::deserialize_version(&update.version),
}),
proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
summary: ContextSummaryContent {
summary: ContextSummary {
text: update.summary,
done: update.done,
timestamp: language::proto::deserialize_timestamp(
@@ -467,73 +467,11 @@ pub enum ContextEvent {
Operation(ContextOperation),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ContextSummary {
Pending,
Content(ContextSummaryContent),
Error,
}
#[derive(Default, Clone, Debug, Eq, PartialEq)]
pub struct ContextSummaryContent {
#[derive(Clone, Default, Debug)]
pub struct ContextSummary {
pub text: String,
pub done: bool,
pub timestamp: clock::Lamport,
}
impl ContextSummary {
pub const DEFAULT: &str = "New Text Thread";
pub fn or_default(&self) -> SharedString {
self.unwrap_or(Self::DEFAULT)
}
pub fn unwrap_or(&self, message: impl Into<SharedString>) -> SharedString {
self.content()
.map_or_else(|| message.into(), |content| content.text.clone().into())
}
pub fn content(&self) -> Option<&ContextSummaryContent> {
match self {
ContextSummary::Content(content) => Some(content),
ContextSummary::Pending | ContextSummary::Error => None,
}
}
fn content_as_mut(&mut self) -> Option<&mut ContextSummaryContent> {
match self {
ContextSummary::Content(content) => Some(content),
ContextSummary::Pending | ContextSummary::Error => None,
}
}
fn content_or_set_empty(&mut self) -> &mut ContextSummaryContent {
match self {
ContextSummary::Content(content) => content,
ContextSummary::Pending | ContextSummary::Error => {
let content = ContextSummaryContent::default();
*self = ContextSummary::Content(content);
self.content_as_mut().unwrap()
}
}
}
pub fn is_pending(&self) -> bool {
matches!(self, ContextSummary::Pending)
}
fn timestamp(&self) -> Option<clock::Lamport> {
match self {
ContextSummary::Content(content) => Some(content.timestamp),
ContextSummary::Pending | ContextSummary::Error => None,
}
}
}
impl PartialOrd for ContextSummary {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.timestamp().partial_cmp(&other.timestamp())
}
timestamp: clock::Lamport,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -669,7 +607,7 @@ pub struct AssistantContext {
message_anchors: Vec<MessageAnchor>,
contents: Vec<Content>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
summary: ContextSummary,
summary: Option<ContextSummary>,
summary_task: Task<Option<()>>,
completion_count: usize,
pending_completions: Vec<PendingCompletion>,
@@ -756,7 +694,7 @@ impl AssistantContext {
slash_command_output_sections: Vec::new(),
thought_process_output_sections: Vec::new(),
edits_since_last_parse: edits_since_last_slash_command_parse,
summary: ContextSummary::Pending,
summary: None,
summary_task: Task::ready(None),
completion_count: Default::default(),
pending_completions: Default::default(),
@@ -815,7 +753,7 @@ impl AssistantContext {
.collect(),
summary: self
.summary
.content()
.as_ref()
.map(|summary| summary.text.clone())
.unwrap_or_default(),
slash_command_output_sections: self
@@ -1001,10 +939,12 @@ impl AssistantContext {
summary: new_summary,
..
} => {
if self.summary.timestamp().map_or(true, |current_timestamp| {
new_summary.timestamp > current_timestamp
}) {
self.summary = ContextSummary::Content(new_summary);
if self
.summary
.as_ref()
.map_or(true, |summary| new_summary.timestamp > summary.timestamp)
{
self.summary = Some(new_summary);
summary_generated = true;
}
}
@@ -1162,8 +1102,8 @@ impl AssistantContext {
self.path.as_ref()
}
pub fn summary(&self) -> &ContextSummary {
&self.summary
pub fn summary(&self) -> Option<&ContextSummary> {
self.summary.as_ref()
}
pub fn parsed_slash_commands(&self) -> &[ParsedSlashCommand] {
@@ -2636,7 +2576,7 @@ impl AssistantContext {
return;
};
if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_pending()) {
if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
if !model.provider.is_authenticated(cx) {
return;
}
@@ -2653,20 +2593,17 @@ impl AssistantContext {
// If there is no summary, it is set with `done: false` so that "Loading Summary…" can
// be displayed.
match self.summary {
ContextSummary::Pending | ContextSummary::Error => {
self.summary = ContextSummary::Content(ContextSummaryContent {
text: "".to_string(),
done: false,
timestamp: clock::Lamport::default(),
});
replace_old = true;
}
ContextSummary::Content(_) => {}
if self.summary.is_none() {
self.summary = Some(ContextSummary {
text: "".to_string(),
done: false,
timestamp: clock::Lamport::default(),
});
replace_old = true;
}
self.summary_task = cx.spawn(async move |this, cx| {
let result = async {
async move {
let stream = model.model.stream_completion_text(request, &cx);
let mut messages = stream.await?;
@@ -2677,7 +2614,7 @@ impl AssistantContext {
this.update(cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
let summary = this.summary.content_or_set_empty();
let summary = this.summary.get_or_insert(ContextSummary::default());
if !replaced && replace_old {
summary.text.clear();
replaced = true;
@@ -2699,19 +2636,10 @@ impl AssistantContext {
}
}
this.read_with(cx, |this, _cx| {
if let Some(summary) = this.summary.content() {
if summary.text.is_empty() {
bail!("Model generated an empty summary");
}
}
Ok(())
})??;
this.update(cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
if let Some(summary) = this.summary.content_as_mut() {
if let Some(summary) = this.summary.as_mut() {
summary.done = true;
summary.timestamp = timestamp;
let operation = ContextOperation::UpdateSummary {
@@ -2726,18 +2654,8 @@ impl AssistantContext {
anyhow::Ok(())
}
.await;
if let Err(err) = result {
this.update(cx, |this, cx| {
this.summary = ContextSummary::Error;
cx.emit(ContextEvent::SummaryChanged);
})
.log_err();
log::error!("Error generating context summary: {}", err);
}
Some(())
.log_err()
.await
});
}
}
@@ -2851,7 +2769,7 @@ impl AssistantContext {
let (old_path, summary) = this.read_with(cx, |this, _| {
let path = this.path.clone();
let summary = if let Some(summary) = this.summary.content() {
let summary = if let Some(summary) = this.summary.as_ref() {
if summary.done {
Some(summary.text.clone())
} else {
@@ -2905,12 +2823,21 @@ impl AssistantContext {
pub fn set_custom_summary(&mut self, custom_summary: String, cx: &mut Context<Self>) {
let timestamp = self.next_timestamp();
let summary = self.summary.content_or_set_empty();
let summary = self.summary.get_or_insert(ContextSummary::default());
summary.timestamp = timestamp;
summary.done = true;
summary.text = custom_summary;
cx.emit(ContextEvent::SummaryChanged);
}
pub const DEFAULT_SUMMARY: SharedString = SharedString::new_static("New Text Thread");
pub fn summary_or_default(&self) -> SharedString {
self.summary
.as_ref()
.map(|summary| summary.text.clone().into())
.unwrap_or(Self::DEFAULT_SUMMARY)
}
}
#[derive(Debug, Default)]
@@ -3126,7 +3053,7 @@ impl SavedContext {
let timestamp = next_timestamp.tick();
operations.push(ContextOperation::UpdateSummary {
summary: ContextSummaryContent {
summary: ContextSummary {
text: self.summary,
done: true,
timestamp,

View File

@@ -1,5 +1,5 @@
use crate::{
AssistantContext, CacheStatus, ContextEvent, ContextId, ContextOperation, ContextSummary,
AssistantContext, CacheStatus, ContextEvent, ContextId, ContextOperation,
InvokedSlashCommandId, MessageCacheMetadata, MessageId, MessageStatus,
};
use anyhow::Result;
@@ -16,10 +16,7 @@ use futures::{
};
use gpui::{App, Entity, SharedString, Task, TestAppContext, WeakEntity, prelude::*};
use language::{Buffer, BufferSnapshot, LanguageRegistry, LspAdapterDelegate};
use language_model::{
ConfiguredModel, LanguageModelCacheConfiguration, LanguageModelRegistry, Role,
fake_provider::{FakeLanguageModel, FakeLanguageModelProvider},
};
use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Role};
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use project::Project;
@@ -1180,187 +1177,6 @@ fn test_mark_cache_anchors(cx: &mut App) {
);
}
#[gpui::test]
async fn test_summarization(cx: &mut TestAppContext) {
let (context, fake_model) = setup_context_editor_with_fake_model(cx);
// Initial state should be pending
context.read_with(cx, |context, _| {
assert!(matches!(context.summary(), ContextSummary::Pending));
assert_eq!(context.summary().or_default(), ContextSummary::DEFAULT);
});
let message_1 = context.read_with(cx, |context, _cx| context.message_anchors[0].clone());
context.update(cx, |context, cx| {
context
.insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx)
.unwrap();
});
// Send a message
context.update(cx, |context, cx| {
context.assist(cx);
});
simulate_successful_response(&fake_model, cx);
// Should start generating summary when there are >= 2 messages
context.read_with(cx, |context, _| {
assert!(!context.summary().content().unwrap().done);
});
cx.run_until_parked();
fake_model.stream_last_completion_response("Brief".into());
fake_model.stream_last_completion_response(" Introduction".into());
fake_model.end_last_completion_stream();
cx.run_until_parked();
// Summary should be set
context.read_with(cx, |context, _| {
assert_eq!(context.summary().or_default(), "Brief Introduction");
});
// We should be able to manually set a summary
context.update(cx, |context, cx| {
context.set_custom_summary("Brief Intro".into(), cx);
});
context.read_with(cx, |context, _| {
assert_eq!(context.summary().or_default(), "Brief Intro");
});
}
#[gpui::test]
async fn test_thread_summary_error_set_manually(cx: &mut TestAppContext) {
let (context, fake_model) = setup_context_editor_with_fake_model(cx);
test_summarize_error(&fake_model, &context, cx);
// Now we should be able to set a summary
context.update(cx, |context, cx| {
context.set_custom_summary("Brief Intro".into(), cx);
});
context.read_with(cx, |context, _| {
assert_eq!(context.summary().or_default(), "Brief Intro");
});
}
#[gpui::test]
async fn test_thread_summary_error_retry(cx: &mut TestAppContext) {
let (context, fake_model) = setup_context_editor_with_fake_model(cx);
test_summarize_error(&fake_model, &context, cx);
// Sending another message should not trigger another summarize request
context.update(cx, |context, cx| {
context.assist(cx);
});
simulate_successful_response(&fake_model, cx);
context.read_with(cx, |context, _| {
// State is still Error, not Generating
assert!(matches!(context.summary(), ContextSummary::Error));
});
// But the summarize request can be invoked manually
context.update(cx, |context, cx| {
context.summarize(true, cx);
});
context.read_with(cx, |context, _| {
assert!(!context.summary().content().unwrap().done);
});
cx.run_until_parked();
fake_model.stream_last_completion_response("A successful summary".into());
fake_model.end_last_completion_stream();
cx.run_until_parked();
context.read_with(cx, |context, _| {
assert_eq!(context.summary().or_default(), "A successful summary");
});
}
fn test_summarize_error(
model: &Arc<FakeLanguageModel>,
context: &Entity<AssistantContext>,
cx: &mut TestAppContext,
) {
let message_1 = context.read_with(cx, |context, _cx| context.message_anchors[0].clone());
context.update(cx, |context, cx| {
context
.insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx)
.unwrap();
});
// Send a message
context.update(cx, |context, cx| {
context.assist(cx);
});
simulate_successful_response(&model, cx);
context.read_with(cx, |context, _| {
assert!(!context.summary().content().unwrap().done);
});
// Simulate summary request ending
cx.run_until_parked();
model.end_last_completion_stream();
cx.run_until_parked();
// State is set to Error and default message
context.read_with(cx, |context, _| {
assert_eq!(*context.summary(), ContextSummary::Error);
assert_eq!(context.summary().or_default(), ContextSummary::DEFAULT);
});
}
fn setup_context_editor_with_fake_model(
cx: &mut TestAppContext,
) -> (Entity<AssistantContext>, Arc<FakeLanguageModel>) {
let registry = Arc::new(LanguageRegistry::test(cx.executor().clone()));
let fake_provider = Arc::new(FakeLanguageModelProvider);
let fake_model = Arc::new(fake_provider.test_model());
cx.update(|cx| {
init_test(cx);
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model(
Some(ConfiguredModel {
provider: fake_provider.clone(),
model: fake_model.clone(),
}),
cx,
)
})
});
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
let context = cx.new(|cx| {
AssistantContext::local(
registry,
None,
None,
prompt_builder.clone(),
Arc::new(SlashCommandWorkingSet::default()),
cx,
)
});
(context, fake_model)
}
fn simulate_successful_response(fake_model: &FakeLanguageModel, cx: &mut TestAppContext) {
cx.run_until_parked();
fake_model.stream_last_completion_response("Assistant response".into());
fake_model.end_last_completion_stream();
cx.run_until_parked();
}
fn messages(context: &Entity<AssistantContext>, cx: &App) -> Vec<(MessageId, Role, Range<usize>)> {
context
.read(cx)

View File

@@ -1860,12 +1860,7 @@ impl ContextEditor {
}
pub fn title(&self, cx: &App) -> SharedString {
self.context.read(cx).summary().or_default()
}
pub fn regenerate_summary(&mut self, cx: &mut Context<Self>) {
self.context
.update(cx, |context, cx| context.summarize(true, cx));
self.context.read(cx).summary_or_default()
}
fn render_notice(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
@@ -1899,24 +1894,11 @@ impl ContextEditor {
.log_err();
if let Some(client) = client {
cx.spawn(async move |context_editor, cx| {
match client.authenticate_and_connect(true, cx).await {
util::ConnectionResult::Timeout => {
log::error!("Authentication timeout")
}
util::ConnectionResult::ConnectionReset => {
log::error!("Connection reset")
}
util::ConnectionResult::Result(r) => {
if r.log_err().is_some() {
context_editor
.update(cx, |_, cx| cx.notify())
.ok();
}
}
}
cx.spawn(async move |this, cx| {
client.authenticate_and_connect(true, cx).await?;
this.update(cx, |_, cx| cx.notify())
})
.detach()
.detach_and_log_err(cx)
}
})),
)

View File

@@ -648,10 +648,7 @@ impl ContextStore {
if context.replica_id() == ReplicaId::default() {
Some(proto::ContextMetadata {
context_id: context.id().to_proto(),
summary: context
.summary()
.content()
.map(|summary| summary.text.clone()),
summary: context.summary().map(|summary| summary.text.clone()),
})
} else {
None

View File

@@ -278,8 +278,8 @@ impl CompletionProvider for SlashCommandCompletionProvider {
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
let arguments = call
.arguments
.into_iter()
.filter_map(|argument| Some(line.get(argument)?.to_string()))
.iter()
.filter_map(|argument| Some(line.get(argument.clone())?.to_string()))
.collect::<Vec<_>>();
let argument_range = first_arg_start..buffer_position;
(

View File

@@ -637,7 +637,7 @@ impl ToolCard for EditFileToolCard {
.p_3()
.gap_1()
.border_t_1()
.rounded_b_md()
.rounded_md()
.border_color(border_color)
.bg(cx.theme().colors().editor_background);

View File

@@ -2,18 +2,13 @@ use crate::schema::json_schema_for;
use anyhow::{Context as _, Result, anyhow, bail};
use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus};
use futures::{FutureExt as _, future::Shared};
use gpui::{
AnyWindowHandle, App, AppContext, Empty, Entity, EntityId, Task, TextStyleRefinement,
WeakEntity, Window,
};
use gpui::{AnyWindowHandle, App, AppContext, Empty, Entity, EntityId, Task, WeakEntity, Window};
use language::LineEnding;
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
use project::{Project, terminals::TerminalKind};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
use std::{
env,
path::{Path, PathBuf},
@@ -22,7 +17,6 @@ use std::{
time::{Duration, Instant},
};
use terminal_view::TerminalView;
use theme::ThemeSettings;
use ui::{Disclosure, Tooltip, prelude::*};
use util::{
get_system_shell, markdown::MarkdownInlineCode, size::format_file_size,
@@ -217,21 +211,8 @@ impl Tool for TerminalTool {
}
});
let command_markdown = cx.new(|cx| {
Markdown::new(
format!("```bash\n{}\n```", input.command).into(),
None,
None,
cx,
)
});
let card = cx.new(|cx| {
TerminalToolCard::new(
command_markdown.clone(),
working_dir.clone(),
cx.entity_id(),
)
TerminalToolCard::new(input.command.clone(), working_dir.clone(), cx.entity_id())
});
let output = cx.spawn({
@@ -407,7 +388,7 @@ fn working_dir(
}
struct TerminalToolCard {
input_command: Entity<Markdown>,
input_command: String,
working_dir: Option<PathBuf>,
entity_id: EntityId,
exit_status: Option<ExitStatus>,
@@ -423,11 +404,7 @@ struct TerminalToolCard {
}
impl TerminalToolCard {
pub fn new(
input_command: Entity<Markdown>,
working_dir: Option<PathBuf>,
entity_id: EntityId,
) -> Self {
pub fn new(input_command: String, working_dir: Option<PathBuf>, entity_id: EntityId) -> Self {
Self {
input_command,
working_dir,
@@ -450,7 +427,7 @@ impl ToolCard for TerminalToolCard {
fn render(
&mut self,
status: &ToolUseStatus,
window: &mut Window,
_window: &mut Window,
_workspace: WeakEntity<Workspace>,
cx: &mut Context<Self>,
) -> impl IntoElement {
@@ -594,25 +571,11 @@ impl ToolCard for TerminalToolCard {
.rounded_lg()
.overflow_hidden()
.child(
v_flex()
.p_2()
.gap_0p5()
.bg(header_bg)
.text_xs()
.child(header)
.child(
MarkdownElement::new(
self.input_command.clone(),
markdown_style(window, cx),
)
.code_block_renderer(
markdown::CodeBlockRenderer::Default {
copy_button: false,
copy_button_on_hover: true,
border: false,
},
),
),
v_flex().p_2().gap_0p5().bg(header_bg).child(header).child(
Label::new(self.input_command.clone())
.buffer_font(cx)
.size(LabelSize::Small),
),
)
.when(self.preview_expanded && !should_hide_terminal, |this| {
this.child(
@@ -631,27 +594,6 @@ impl ToolCard for TerminalToolCard {
}
}
fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
let theme_settings = ThemeSettings::get_global(cx);
let buffer_font_size = TextSize::Default.rems(cx);
let mut text_style = window.text_style();
text_style.refine(&TextStyleRefinement {
font_family: Some(theme_settings.buffer_font.family.clone()),
font_fallbacks: theme_settings.buffer_font.fallbacks.clone(),
font_features: Some(theme_settings.buffer_font.features.clone()),
font_size: Some(buffer_font_size.into()),
color: Some(cx.theme().colors().text),
..Default::default()
});
MarkdownStyle {
base_text_style: text_style.clone(),
selection_background_color: cx.theme().players().local().selection,
..Default::default()
}
}
#[cfg(test)]
mod tests {
use editor::EditorSettings;

View File

@@ -38,7 +38,6 @@ pub enum Model {
AmazonNovaLite,
AmazonNovaMicro,
AmazonNovaPro,
AmazonNovaPremier,
// AI21 models
AI21J2GrandeInstruct,
AI21J2JumboInstruct,
@@ -73,10 +72,6 @@ pub enum Model {
MistralMixtral8x7BInstructV0,
MistralMistralLarge2402V1,
MistralMistralSmall2402V1,
MistralPixtralLarge2502V1,
// Writer models
PalmyraWriterX5,
PalmyraWriterX4,
#[serde(rename = "custom")]
Custom {
name: String,
@@ -125,7 +120,6 @@ impl Model {
Model::AmazonNovaLite => "amazon.nova-lite-v1:0",
Model::AmazonNovaMicro => "amazon.nova-micro-v1:0",
Model::AmazonNovaPro => "amazon.nova-pro-v1:0",
Model::AmazonNovaPremier => "amazon.nova-premier-v1:0",
Model::DeepSeekR1 => "us.deepseek.r1-v1:0",
Model::AI21J2GrandeInstruct => "ai21.j2-grande-instruct",
Model::AI21J2JumboInstruct => "ai21.j2-jumbo-instruct",
@@ -155,9 +149,6 @@ impl Model {
Model::MistralMixtral8x7BInstructV0 => "mistral.mixtral-8x7b-instruct-v0:1",
Model::MistralMistralLarge2402V1 => "mistral.mistral-large-2402-v1:0",
Model::MistralMistralSmall2402V1 => "mistral.mistral-small-2402-v1:0",
Model::MistralPixtralLarge2502V1 => "mistral.pixtral-large-2502-v1:0",
Model::PalmyraWriterX4 => "writer.palmyra-x4-v1:0",
Model::PalmyraWriterX5 => "writer.palmyra-x5-v1:0",
Self::Custom { name, .. } => name,
}
}
@@ -175,7 +166,6 @@ impl Model {
Self::AmazonNovaLite => "Amazon Nova Lite",
Self::AmazonNovaMicro => "Amazon Nova Micro",
Self::AmazonNovaPro => "Amazon Nova Pro",
Self::AmazonNovaPremier => "Amazon Nova Premier",
Self::DeepSeekR1 => "DeepSeek R1",
Self::AI21J2GrandeInstruct => "AI21 Jurassic2 Grande Instruct",
Self::AI21J2JumboInstruct => "AI21 Jurassic2 Jumbo Instruct",
@@ -205,9 +195,6 @@ impl Model {
Self::MistralMixtral8x7BInstructV0 => "Mistral Mixtral 8x7B Instruct V0",
Self::MistralMistralLarge2402V1 => "Mistral Large 2402 V1",
Self::MistralMistralSmall2402V1 => "Mistral Small 2402 V1",
Self::MistralPixtralLarge2502V1 => "Pixtral Large 25.02 V1",
Self::PalmyraWriterX5 => "Writer Palmyra X5",
Self::PalmyraWriterX4 => "Writer Palmyra X4",
Self::Custom {
display_name, name, ..
} => display_name.as_deref().unwrap_or(name),
@@ -221,11 +208,8 @@ impl Model {
| Self::Claude3Sonnet
| Self::Claude3_5Haiku
| Self::Claude3_7Sonnet => 200_000,
Self::AmazonNovaPremier => 1_000_000,
Self::PalmyraWriterX5 => 1_000_000,
Self::PalmyraWriterX4 => 128_000,
Self::Custom { max_tokens, .. } => *max_tokens,
_ => 128_000,
_ => 200_000,
}
}
@@ -233,7 +217,7 @@ impl Model {
match self {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
Self::Claude3_5SonnetV2 | Self::PalmyraWriterX4 | Self::PalmyraWriterX5 => 8_192,
Self::Claude3_5SonnetV2 => 8_192,
Self::Custom {
max_output_tokens, ..
} => max_output_tokens.unwrap_or(4_096),
@@ -268,10 +252,7 @@ impl Model {
| Self::Claude3_5Haiku => true,
// Amazon Nova models (all support tool use)
Self::AmazonNovaPremier
| Self::AmazonNovaPro
| Self::AmazonNovaLite
| Self::AmazonNovaMicro => true,
Self::AmazonNovaPro | Self::AmazonNovaLite | Self::AmazonNovaMicro => true,
// AI21 Jamba 1.5 models support tool use
Self::AI21Jamba15LargeV1 | Self::AI21Jamba15MiniV1 => true,
@@ -324,11 +305,8 @@ impl Model {
// Models available only in US
(Model::Claude3Opus, "us")
| (Model::Claude3_5Haiku, "us")
| (Model::Claude3_7Sonnet, "us")
| (Model::Claude3_7SonnetThinking, "us")
| (Model::AmazonNovaPremier, "us")
| (Model::MistralPixtralLarge2502V1, "us") => {
| (Model::Claude3_7SonnetThinking, "us") => {
Ok(format!("{}.{}", region_group, model_id))
}
@@ -362,12 +340,6 @@ impl Model {
Ok(format!("{}.{}", region_group, model_id))
}
// Writer models only available in the US
(Model::PalmyraWriterX4, "us") | (Model::PalmyraWriterX5, "us") => {
// They have some goofiness
Ok(format!("{}.{}", region_group, model_id))
}
// Any other combination is not supported
_ => Ok(self.id().into()),
}

View File

@@ -49,7 +49,7 @@ use telemetry::Telemetry;
use thiserror::Error;
use tokio::net::TcpStream;
use url::Url;
use util::{ConnectionResult, ResultExt};
use util::{ResultExt, TryFutureExt};
pub use rpc::*;
pub use telemetry_events::Event;
@@ -151,19 +151,9 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &SignIn, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(
async move |cx| match client.authenticate_and_connect(true, &cx).await {
ConnectionResult::Timeout => {
log::error!("Initial authentication timed out");
}
ConnectionResult::ConnectionReset => {
log::error!("Initial authentication connection reset");
}
ConnectionResult::Result(r) => {
r.log_err();
}
},
)
cx.spawn(async move |cx| {
client.authenticate_and_connect(true, &cx).log_err().await
})
.detach();
}
}
@@ -668,7 +658,7 @@ impl Client {
state._reconnect_task = None;
}
Status::ConnectionLost => {
let client = self.clone();
let this = self.clone();
state._reconnect_task = Some(cx.spawn(async move |cx| {
#[cfg(any(test, feature = "test-support"))]
let mut rng = StdRng::seed_from_u64(0);
@@ -676,25 +666,10 @@ impl Client {
let mut rng = StdRng::from_entropy();
let mut delay = INITIAL_RECONNECTION_DELAY;
loop {
match client.authenticate_and_connect(true, &cx).await {
ConnectionResult::Timeout => {
log::error!("client connect attempt timed out")
}
ConnectionResult::ConnectionReset => {
log::error!("client connect attempt reset")
}
ConnectionResult::Result(r) => {
if let Err(error) = r {
log::error!("failed to connect: {error}");
} else {
break;
}
}
}
if matches!(*client.status().borrow(), Status::ConnectionError) {
client.set_status(
while let Err(error) = this.authenticate_and_connect(true, &cx).await {
log::error!("failed to connect {}", error);
if matches!(*this.status().borrow(), Status::ConnectionError) {
this.set_status(
Status::ReconnectionError {
next_reconnection: Instant::now() + delay,
},
@@ -852,7 +827,7 @@ impl Client {
self: &Arc<Self>,
try_provider: bool,
cx: &AsyncApp,
) -> ConnectionResult<()> {
) -> anyhow::Result<()> {
let was_disconnected = match *self.status().borrow() {
Status::SignedOut => true,
Status::ConnectionError
@@ -861,14 +836,9 @@ impl Client {
| Status::Reauthenticating { .. }
| Status::ReconnectionError { .. } => false,
Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => {
return ConnectionResult::Result(Ok(()));
}
Status::UpgradeRequired => {
return ConnectionResult::Result(
Err(EstablishConnectionError::UpgradeRequired)
.context("client auth and connect"),
);
return Ok(());
}
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
};
if was_disconnected {
self.set_status(Status::Authenticating, cx);
@@ -892,12 +862,12 @@ impl Client {
Ok(creds) => credentials = Some(creds),
Err(err) => {
self.set_status(Status::ConnectionError, cx);
return ConnectionResult::Result(Err(err));
return Err(err);
}
}
}
_ = status_rx.next().fuse() => {
return ConnectionResult::Result(Err(anyhow!("authentication canceled")));
return Err(anyhow!("authentication canceled"));
}
}
}
@@ -922,10 +892,10 @@ impl Client {
}
futures::select_biased! {
result = self.set_connection(conn, cx).fuse() => ConnectionResult::Result(result.context("client auth and connect")),
result = self.set_connection(conn, cx).fuse() => result,
_ = timeout => {
self.set_status(Status::ConnectionError, cx);
ConnectionResult::Timeout
Err(anyhow!("timed out waiting on hello message from server"))
}
}
}
@@ -937,22 +907,22 @@ impl Client {
self.authenticate_and_connect(false, cx).await
} else {
self.set_status(Status::ConnectionError, cx);
ConnectionResult::Result(Err(EstablishConnectionError::Unauthorized).context("client auth and connect"))
Err(EstablishConnectionError::Unauthorized)?
}
}
Err(EstablishConnectionError::UpgradeRequired) => {
self.set_status(Status::UpgradeRequired, cx);
ConnectionResult::Result(Err(EstablishConnectionError::UpgradeRequired).context("client auth and connect"))
Err(EstablishConnectionError::UpgradeRequired)?
}
Err(error) => {
self.set_status(Status::ConnectionError, cx);
ConnectionResult::Result(Err(error).context("client auth and connect"))
Err(error)?
}
}
}
_ = &mut timeout => {
self.set_status(Status::ConnectionError, cx);
ConnectionResult::Timeout
Err(anyhow!("timed out trying to establish connection"))
}
}
}
@@ -968,7 +938,10 @@ impl Client {
let peer_id = async {
log::debug!("waiting for server hello");
let message = incoming.next().await.context("no hello message received")?;
let message = incoming
.next()
.await
.ok_or_else(|| anyhow!("no hello message received"))?;
log::debug!("got server hello");
let hello_message_type_name = message.payload_type_name().to_string();
let hello = message
@@ -1770,7 +1743,7 @@ mod tests {
status.next().await,
Some(Status::ConnectionError { .. })
));
auth_and_connect.await.into_response().unwrap_err();
auth_and_connect.await.unwrap_err();
// Allow the connection to be established.
let server = FakeServer::for_client(user_id, &client, cx).await;

View File

@@ -107,7 +107,6 @@ impl FakeServer {
client
.authenticate_and_connect(false, &cx.to_async())
.await
.into_response()
.unwrap();
server

View File

@@ -1137,12 +1137,6 @@ async fn handle_customer_subscription_event(
.await?;
}
// When the user's subscription changes, push down any changes to their plan.
rpc_server
.update_plan_for_user(billing_customer.user_id)
.await
.trace_err();
// When the user's subscription changes, we want to refresh their LLM tokens
// to either grant/revoke access.
rpc_server
@@ -1280,7 +1274,7 @@ async fn get_current_usage(
subscription
.kind
.map(Into::into)
.unwrap_or(zed_llm_client::Plan::ZedFree)
.unwrap_or(zed_llm_client::Plan::Free)
});
let model_requests_limit = match plan.model_requests_limit() {

View File

@@ -99,7 +99,7 @@ impl From<SubscriptionKind> for zed_llm_client::Plan {
match value {
SubscriptionKind::ZedPro => Self::ZedPro,
SubscriptionKind::ZedProTrial => Self::ZedProTrial,
SubscriptionKind::ZedFree => Self::ZedFree,
SubscriptionKind::ZedFree => Self::Free,
}
}
}

View File

@@ -25,12 +25,18 @@ pub struct LlmTokenClaims {
pub is_staff: bool,
pub has_llm_closed_beta_feature_flag: bool,
pub bypass_account_age_check: bool,
#[serde(default)]
pub use_llm_request_queue: bool,
pub plan: Plan,
#[serde(default)]
pub has_extended_trial: bool,
pub subscription_period: (NaiveDateTime, NaiveDateTime),
#[serde(default)]
pub subscription_period: Option<(NaiveDateTime, NaiveDateTime)>,
#[serde(default)]
pub enable_model_request_overages: bool,
#[serde(default)]
pub model_request_overages_spend_limit_in_cents: u32,
#[serde(default)]
pub can_use_web_search_tool: bool,
}
@@ -51,23 +57,6 @@ impl LlmTokenClaims {
.as_ref()
.ok_or_else(|| anyhow!("no LLM API secret"))?;
let plan = if is_staff {
Plan::ZedPro
} else {
subscription
.as_ref()
.and_then(|subscription| subscription.kind)
.map_or(Plan::ZedFree, |kind| match kind {
SubscriptionKind::ZedFree => Plan::ZedFree,
SubscriptionKind::ZedPro => Plan::ZedPro,
SubscriptionKind::ZedProTrial => Plan::ZedProTrial,
})
};
let subscription_period =
billing_subscription::Model::current_period(subscription, is_staff)
.map(|(start, end)| (start.naive_utc(), end.naive_utc()))
.ok_or_else(|| anyhow!("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started."))?;
let now = Utc::now();
let claims = Self {
iat: now.timestamp() as u64,
@@ -87,11 +76,26 @@ impl LlmTokenClaims {
.any(|flag| flag == "bypass-account-age-check"),
can_use_web_search_tool: true,
use_llm_request_queue: feature_flags.iter().any(|flag| flag == "llm-request-queue"),
plan,
plan: if is_staff {
Plan::ZedPro
} else {
subscription
.as_ref()
.and_then(|subscription| subscription.kind)
.map_or(Plan::Free, |kind| match kind {
SubscriptionKind::ZedFree => Plan::Free,
SubscriptionKind::ZedPro => Plan::ZedPro,
SubscriptionKind::ZedProTrial => Plan::ZedProTrial,
})
},
has_extended_trial: feature_flags
.iter()
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG),
subscription_period,
subscription_period: billing_subscription::Model::current_period(
subscription,
is_staff,
)
.map(|(start, end)| (start.naive_utc(), end.naive_utc())),
enable_model_request_overages: billing_preferences
.as_ref()
.map_or(false, |preferences| {

View File

@@ -2,7 +2,6 @@ mod connection_pool;
use crate::api::{CloudflareIpCountryHeader, SystemIdHeader};
use crate::db::billing_subscription::SubscriptionKind;
use crate::llm::db::LlmDatabase;
use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, LlmTokenClaims};
use crate::{
AppState, Error, Result, auth,
@@ -68,7 +67,7 @@ use std::{
time::{Duration, Instant},
};
use time::OffsetDateTime;
use tokio::sync::{Semaphore, watch};
use tokio::sync::{MutexGuard, Semaphore, watch};
use tower::ServiceBuilder;
use tracing::{
Instrument,
@@ -167,6 +166,29 @@ impl Session {
}
}
pub async fn current_plan(&self, db: &MutexGuard<'_, DbHandle>) -> anyhow::Result<proto::Plan> {
if self.is_staff() {
return Ok(proto::Plan::ZedPro);
}
let user_id = self.user_id();
let subscription = db.get_active_billing_subscription(user_id).await?;
let subscription_kind = subscription.and_then(|subscription| subscription.kind);
let plan = if let Some(subscription_kind) = subscription_kind {
match subscription_kind {
SubscriptionKind::ZedPro => proto::Plan::ZedPro,
SubscriptionKind::ZedProTrial => proto::Plan::ZedProTrial,
SubscriptionKind::ZedFree => proto::Plan::Free,
}
} else {
proto::Plan::Free
};
Ok(plan)
}
fn user_id(&self) -> UserId {
match &self.principal {
Principal::User(user) => user.id,
@@ -931,32 +953,6 @@ impl Server {
Ok(())
}
pub async fn update_plan_for_user(self: &Arc<Self>, user_id: UserId) -> Result<()> {
let user = self
.app_state
.db
.get_user_by_id(user_id)
.await?
.ok_or_else(|| anyhow!("user not found"))?;
let update_user_plan = make_update_user_plan_message(
&self.app_state.db,
self.app_state.llm_db.clone(),
user_id,
user.admin,
)
.await?;
let pool = self.connection_pool.lock();
for connection_id in pool.user_connection_ids(user_id) {
self.peer
.send(connection_id, update_user_plan.clone())
.trace_err();
}
Ok(())
}
pub async fn refresh_llm_tokens_for_user(self: &Arc<Self>, user_id: UserId) {
let pool = self.connection_pool.lock();
for connection_id in pool.user_connection_ids(user_id) {
@@ -2692,43 +2688,21 @@ fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool {
version.0.minor() < 139
}
async fn current_plan(db: &Arc<Database>, user_id: UserId, is_staff: bool) -> Result<proto::Plan> {
if is_staff {
return Ok(proto::Plan::ZedPro);
}
async fn update_user_plan(user_id: UserId, session: &Session) -> Result<()> {
let db = session.db().await;
let subscription = db.get_active_billing_subscription(user_id).await?;
let subscription_kind = subscription.and_then(|subscription| subscription.kind);
let plan = if let Some(subscription_kind) = subscription_kind {
match subscription_kind {
SubscriptionKind::ZedPro => proto::Plan::ZedPro,
SubscriptionKind::ZedProTrial => proto::Plan::ZedProTrial,
SubscriptionKind::ZedFree => proto::Plan::Free,
}
} else {
proto::Plan::Free
};
Ok(plan)
}
async fn make_update_user_plan_message(
db: &Arc<Database>,
llm_db: Option<Arc<LlmDatabase>>,
user_id: UserId,
is_staff: bool,
) -> Result<proto::UpdateUserPlan> {
let feature_flags = db.get_user_flags(user_id).await?;
let plan = current_plan(db, user_id, is_staff).await?;
let plan = session.current_plan(&db).await?;
let billing_customer = db.get_billing_customer_by_user_id(user_id).await?;
let billing_preferences = db.get_billing_preferences(user_id).await?;
let (subscription_period, usage) = if let Some(llm_db) = llm_db {
let (subscription_period, usage) = if let Some(llm_db) = session.app_state.llm_db.clone() {
let subscription = db.get_active_billing_subscription(user_id).await?;
let subscription_period =
crate::db::billing_subscription::Model::current_period(subscription, is_staff);
let subscription_period = crate::db::billing_subscription::Model::current_period(
subscription,
session.is_staff(),
);
let usage = if let Some((period_start_at, period_end_at)) = subscription_period {
llm_db
@@ -2743,92 +2717,92 @@ async fn make_update_user_plan_message(
(None, None)
};
Ok(proto::UpdateUserPlan {
plan: plan.into(),
trial_started_at: billing_customer
.and_then(|billing_customer| billing_customer.trial_started_at)
.map(|trial_started_at| trial_started_at.and_utc().timestamp() as u64),
is_usage_based_billing_enabled: if is_staff {
Some(true)
} else {
billing_preferences.map(|preferences| preferences.model_request_overages_enabled)
},
subscription_period: subscription_period.map(|(started_at, ended_at)| {
proto::SubscriptionPeriod {
started_at: started_at.timestamp() as u64,
ended_at: ended_at.timestamp() as u64,
}
}),
usage: usage.map(|usage| {
let plan = match plan {
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
};
let model_requests_limit = match plan.model_requests_limit() {
zed_llm_client::UsageLimit::Limited(limit) => {
let limit = if plan == zed_llm_client::Plan::ZedProTrial
&& feature_flags
.iter()
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
{
1_000
} else {
limit
};
zed_llm_client::UsageLimit::Limited(limit)
}
zed_llm_client::UsageLimit::Unlimited => zed_llm_client::UsageLimit::Unlimited,
};
proto::SubscriptionUsage {
model_requests_usage_amount: usage.model_requests as u32,
model_requests_usage_limit: Some(proto::UsageLimit {
variant: Some(match model_requests_limit {
zed_llm_client::UsageLimit::Limited(limit) => {
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
limit: limit as u32,
})
}
zed_llm_client::UsageLimit::Unlimited => {
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
}
}),
}),
edit_predictions_usage_amount: usage.edit_predictions as u32,
edit_predictions_usage_limit: Some(proto::UsageLimit {
variant: Some(match plan.edit_predictions_limit() {
zed_llm_client::UsageLimit::Limited(limit) => {
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
limit: limit as u32,
})
}
zed_llm_client::UsageLimit::Unlimited => {
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
}
}),
}),
}
}),
})
}
async fn update_user_plan(user_id: UserId, session: &Session) -> Result<()> {
let db = session.db().await;
let update_user_plan = make_update_user_plan_message(
&db.0,
session.app_state.llm_db.clone(),
user_id,
session.is_staff(),
)
.await?;
session
.peer
.send(session.connection_id, update_user_plan)
.send(
session.connection_id,
proto::UpdateUserPlan {
plan: plan.into(),
trial_started_at: billing_customer
.and_then(|billing_customer| billing_customer.trial_started_at)
.map(|trial_started_at| trial_started_at.and_utc().timestamp() as u64),
is_usage_based_billing_enabled: if session.is_staff() {
Some(true)
} else {
billing_preferences
.map(|preferences| preferences.model_request_overages_enabled)
},
subscription_period: subscription_period.map(|(started_at, ended_at)| {
proto::SubscriptionPeriod {
started_at: started_at.timestamp() as u64,
ended_at: ended_at.timestamp() as u64,
}
}),
usage: usage.map(|usage| {
let plan = match plan {
proto::Plan::Free => zed_llm_client::Plan::Free,
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
};
let model_requests_limit = match plan.model_requests_limit() {
zed_llm_client::UsageLimit::Limited(limit) => {
let limit = if plan == zed_llm_client::Plan::ZedProTrial
&& feature_flags
.iter()
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
{
1_000
} else {
limit
};
zed_llm_client::UsageLimit::Limited(limit)
}
zed_llm_client::UsageLimit::Unlimited => {
zed_llm_client::UsageLimit::Unlimited
}
};
proto::SubscriptionUsage {
model_requests_usage_amount: usage.model_requests as u32,
model_requests_usage_limit: Some(proto::UsageLimit {
variant: Some(match model_requests_limit {
zed_llm_client::UsageLimit::Limited(limit) => {
proto::usage_limit::Variant::Limited(
proto::usage_limit::Limited {
limit: limit as u32,
},
)
}
zed_llm_client::UsageLimit::Unlimited => {
proto::usage_limit::Variant::Unlimited(
proto::usage_limit::Unlimited {},
)
}
}),
}),
edit_predictions_usage_amount: usage.edit_predictions as u32,
edit_predictions_usage_limit: Some(proto::UsageLimit {
variant: Some(match plan.edit_predictions_limit() {
zed_llm_client::UsageLimit::Limited(limit) => {
proto::usage_limit::Variant::Limited(
proto::usage_limit::Limited {
limit: limit as u32,
},
)
}
zed_llm_client::UsageLimit::Unlimited => {
proto::usage_limit::Variant::Unlimited(
proto::usage_limit::Unlimited {},
)
}
}),
}),
}
}),
},
)
.trace_err();
Ok(())

View File

@@ -248,8 +248,6 @@ impl StripeBilling {
let mut params = stripe::CreateCheckoutSession::new();
params.mode = Some(stripe::CheckoutSessionMode::Subscription);
params.payment_method_collection =
Some(stripe::CheckoutSessionPaymentMethodCollection::IfRequired);
params.customer = Some(customer_id);
params.client_reference_id = Some(github_login);
params.line_items = Some(vec![stripe::CreateCheckoutSessionLineItems {

View File

@@ -1740,7 +1740,6 @@ async fn test_mutual_editor_inlay_hint_cache_update(
fake_language_server
.request::<lsp::request::InlayHintRefreshRequest>(())
.await
.into_response()
.expect("inlay refresh request failed");
executor.run_until_parked();
@@ -1931,7 +1930,6 @@ async fn test_inlay_hint_refresh_is_forwarded(
fake_language_server
.request::<lsp::request::InlayHintRefreshRequest>(())
.await
.into_response()
.expect("inlay refresh request failed");
executor.run_until_parked();
editor_a.update(cx_a, |editor, _| {

View File

@@ -1253,7 +1253,6 @@ async fn test_calls_on_multiple_connections(
client_b1
.authenticate_and_connect(false, &cx_b1.to_async())
.await
.into_response()
.unwrap();
// User B hangs up, and user A calls them again.
@@ -1634,7 +1633,6 @@ async fn test_project_reconnect(
client_a
.authenticate_and_connect(false, &cx_a.to_async())
.await
.into_response()
.unwrap();
executor.run_until_parked();
@@ -1763,7 +1761,6 @@ async fn test_project_reconnect(
client_b
.authenticate_and_connect(false, &cx_b.to_async())
.await
.into_response()
.unwrap();
executor.run_until_parked();
@@ -4320,7 +4317,6 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
})
.await
.into_response()
.unwrap();
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
@@ -5703,7 +5699,6 @@ async fn test_contacts(
client_c
.authenticate_and_connect(false, &cx_c.to_async())
.await
.into_response()
.unwrap();
executor.run_until_parked();
@@ -6234,7 +6229,6 @@ async fn test_contact_requests(
client
.authenticate_and_connect(false, &cx.to_async())
.await
.into_response()
.unwrap();
}
}

View File

@@ -313,7 +313,6 @@ impl TestServer {
client
.authenticate_and_connect(false, &cx.to_async())
.await
.into_response()
.unwrap();
let client = TestClient {

View File

@@ -42,7 +42,6 @@ futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
language.workspace = true
log.workspace = true
menu.workspace = true
notifications.workspace = true
picker.workspace = true

View File

@@ -2227,7 +2227,6 @@ impl CollabPanel {
client
.authenticate_and_connect(true, &cx)
.await
.into_response()
.notify_async_err(cx);
})
.detach()

View File

@@ -646,20 +646,10 @@ impl Render for NotificationPanel {
let client = client.clone();
window
.spawn(cx, async move |cx| {
match client
client
.authenticate_and_connect(true, &cx)
.await
{
util::ConnectionResult::Timeout => {
log::error!("Connection timeout");
}
util::ConnectionResult::ConnectionReset => {
log::error!("Connection reset");
}
util::ConnectionResult::Result(r) => {
r.log_err();
}
}
.log_err()
.await;
})
.detach()
}

View File

@@ -0,0 +1,37 @@
[package]
name = "component_preview"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/component_preview.rs"
[features]
default = []
[dependencies]
agent.workspace = true
anyhow.workspace = true
client.workspace = true
collections.workspace = true
component.workspace = true
db.workspace = true
futures.workspace = true
gpui.workspace = true
languages.workspace = true
log.workspace = true
notifications.workspace = true
project.workspace = true
prompt_store.workspace = true
serde.workspace = true
ui.workspace = true
ui_input.workspace = true
util.workspace = true
workspace-hack.workspace = true
workspace.workspace = true
assistant_tool.workspace = true

View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -1,12 +1,12 @@
use languages::LanguageRegistry;
use project::Project;
use std::sync::Arc;
use agent::{ActiveThread, ContextStore, MessageSegment, TextThreadStore, ThreadStore};
use anyhow::{Result, anyhow};
use assistant_tool::ToolWorkingSet;
use gpui::{AppContext, AsyncApp, Entity, Task, WeakEntity};
use indoc::indoc;
use languages::LanguageRegistry;
use project::Project;
use prompt_store::PromptBuilder;
use std::sync::Arc;
use ui::{App, Window};
use workspace::Workspace;
@@ -60,30 +60,16 @@ pub fn static_active_thread(
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
thread.update(cx, |thread, cx| {
thread.insert_assistant_message(vec![
MessageSegment::Text(indoc! {"
I'll help you fix the lifetime error in your `cx.spawn` call. When working with async operations in GPUI, there are specific patterns to follow for proper lifetime management.
Let's look at what's happening in your code:
---
Let's check the current state of the active_thread.rs file to understand what might have changed:
---
Looking at the implementation of `load_preview_thread_store` and understanding GPUI's async patterns, here's the issue:
1. `load_preview_thread_store` returns a `Task<anyhow::Result<Entity<ThreadStore>>>`, which means it's already a task.
2. When you call this function inside another `spawn` call, you're nesting tasks incorrectly.
Here's the correct way to implement this:
---
The problem is in how you're setting up the async closure and trying to reference variables like `window` and `language_registry` that aren't accessible in that scope.
Here's how to fix it:
"}.to_string()),
MessageSegment::Text("I'll help you fix the lifetime error in your `cx.spawn` call. When working with async operations in GPUI, there are specific patterns to follow for proper lifetime management.".to_string()),
MessageSegment::Text("\n\nLet's look at what's happening in your code:".to_string()),
MessageSegment::Text("\n\n---\n\nLet's check the current state of the active_thread.rs file to understand what might have changed:".to_string()),
MessageSegment::Text("\n\n---\n\nLooking at the implementation of `load_preview_thread_store` and understanding GPUI's async patterns, here's the issue:".to_string()),
MessageSegment::Text("\n\n1. `load_preview_thread_store` returns a `Task<anyhow::Result<Entity<ThreadStore>>>`, which means it's already a task".to_string()),
MessageSegment::Text("\n2. When you call this function inside another `spawn` call, you're nesting tasks incorrectly".to_string()),
MessageSegment::Text("\n3. The `this` parameter you're trying to use in your closure has the wrong context".to_string()),
MessageSegment::Text("\n\nHere's the correct way to implement this:".to_string()),
MessageSegment::Text("\n\n---\n\nThe problem is in how you're setting up the async closure and trying to reference variables like `window` and `language_registry` that aren't accessible in that scope.".to_string()),
MessageSegment::Text("\n\nHere's how to fix it:".to_string()),
], cx);
});
cx.new(|cx| {

View File

@@ -14,6 +14,7 @@ doctest = false
[features]
default = []
schemars = ["dep:schemars"]
test-support = [
"collections/test-support",
"gpui/test-support",
@@ -42,15 +43,16 @@ node_runtime.workspace = true
parking_lot.workspace = true
paths.workspace = true
project.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
strum.workspace = true
task.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
workspace-hack.workspace = true
itertools.workspace = true
[target.'cfg(windows)'.dependencies]
async-std = { version = "1.12.0", features = ["unstable"] }

View File

@@ -5,7 +5,7 @@ mod sign_in;
use crate::sign_in::initiate_sign_in_within_workspace;
use ::fs::Fs;
use anyhow::{Context as _, Result, anyhow};
use anyhow::{Result, anyhow};
use collections::{HashMap, HashSet};
use command_palette_hooks::CommandPaletteFilter;
use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared};
@@ -531,15 +531,11 @@ impl Copilot {
.request::<request::CheckStatus>(request::CheckStatusParams {
local_checks_only: false,
})
.await
.into_response()
.context("copilot: check status")?;
.await?;
server
.request::<request::SetEditorInfo>(editor_info)
.await
.into_response()
.context("copilot: set editor info")?;
.await?;
anyhow::Ok((server, status))
};
@@ -585,9 +581,7 @@ impl Copilot {
.request::<request::SignInInitiate>(
request::SignInInitiateParams {},
)
.await
.into_response()
.context("copilot sign-in")?;
.await?;
match sign_in {
request::SignInInitiateResult::AlreadySignedIn { user } => {
Ok(request::SignInStatus::Ok { user: Some(user) })
@@ -615,9 +609,7 @@ impl Copilot {
user_code: flow.user_code,
},
)
.await
.into_response()
.context("copilot: sign in confirm")?;
.await?;
Ok(response)
}
}
@@ -664,9 +656,7 @@ impl Copilot {
cx.background_spawn(async move {
server
.request::<request::SignOut>(request::SignOutParams {})
.await
.into_response()
.context("copilot: sign in confirm")?;
.await?;
anyhow::Ok(())
})
}
@@ -883,10 +873,7 @@ impl Copilot {
uuid: completion.uuid.clone(),
});
cx.background_spawn(async move {
request
.await
.into_response()
.context("copilot: notify accepted")?;
request.await?;
Ok(())
})
}
@@ -910,10 +897,7 @@ impl Copilot {
.collect(),
});
cx.background_spawn(async move {
request
.await
.into_response()
.context("copilot: notify rejected")?;
request.await?;
Ok(())
})
}
@@ -973,9 +957,7 @@ impl Copilot {
version: version.try_into().unwrap(),
},
})
.await
.into_response()
.context("copilot: get completions")?;
.await?;
let completions = result
.completions
.into_iter()

View File

@@ -9,20 +9,13 @@ use fs::Fs;
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use gpui::{App, AsyncApp, Global, prelude::*};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use itertools::Itertools;
use paths::home_dir;
use serde::{Deserialize, Serialize};
use settings::watch_config_dir;
use strum::EnumIter;
pub const COPILOT_CHAT_COMPLETION_URL: &str = "https://api.githubcopilot.com/chat/completions";
pub const COPILOT_CHAT_AUTH_URL: &str = "https://api.github.com/copilot_internal/v2/token";
pub const COPILOT_CHAT_MODELS_URL: &str = "https://api.githubcopilot.com/models";
// Copilot's base model; defined by Microsoft in premium requests table
// This will be moved to the front of the Copilot model list, and will be used for
// 'fast' requests (e.g. title generation)
// https://docs.github.com/en/copilot/managing-copilot/monitoring-usage-and-entitlements/about-premium-requests
const DEFAULT_MODEL_ID: &str = "gpt-4.1";
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
@@ -32,130 +25,132 @@ pub enum Role {
System,
}
#[derive(Deserialize)]
struct ModelSchema {
#[serde(deserialize_with = "deserialize_models_skip_errors")]
data: Vec<Model>,
}
fn deserialize_models_skip_errors<'de, D>(deserializer: D) -> Result<Vec<Model>, D::Error>
where
D: serde::Deserializer<'de>,
{
let raw_values = Vec::<serde_json::Value>::deserialize(deserializer)?;
let models = raw_values
.into_iter()
.filter_map(|value| match serde_json::from_value::<Model>(value) {
Ok(model) => Some(model),
Err(err) => {
log::warn!("GitHub Copilot Chat model failed to deserialize: {:?}", err);
None
}
})
.collect();
Ok(models)
}
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct Model {
capabilities: ModelCapabilities,
id: String,
name: String,
policy: Option<ModelPolicy>,
vendor: ModelVendor,
model_picker_enabled: bool,
}
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
struct ModelCapabilities {
family: String,
#[serde(default)]
limits: ModelLimits,
supports: ModelSupportedFeatures,
}
#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
struct ModelLimits {
#[serde(default)]
max_context_window_tokens: usize,
#[serde(default)]
max_output_tokens: usize,
#[serde(default)]
max_prompt_tokens: usize,
}
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
struct ModelPolicy {
state: String,
}
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
struct ModelSupportedFeatures {
#[serde(default)]
streaming: bool,
#[serde(default)]
tool_calls: bool,
#[serde(default)]
parallel_tool_calls: bool,
#[serde(default)]
vision: bool,
}
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub enum ModelVendor {
// Azure OpenAI should have no functional difference from OpenAI in Copilot Chat
#[serde(alias = "Azure OpenAI")]
OpenAI,
Google,
Anthropic,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
pub enum ChatMessageContent {
#[serde(rename = "text")]
Text { text: String },
#[serde(rename = "image_url")]
Image { image_url: ImageUrl },
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ImageUrl {
pub url: String,
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model {
#[default]
#[serde(alias = "gpt-4o", rename = "gpt-4o-2024-05-13")]
Gpt4o,
#[serde(alias = "gpt-4", rename = "gpt-4")]
Gpt4,
#[serde(alias = "gpt-4.1", rename = "gpt-4.1")]
Gpt4_1,
#[serde(alias = "gpt-3.5-turbo", rename = "gpt-3.5-turbo")]
Gpt3_5Turbo,
#[serde(alias = "o1", rename = "o1")]
O1,
#[serde(alias = "o1-mini", rename = "o3-mini")]
O3Mini,
#[serde(alias = "o3", rename = "o3")]
O3,
#[serde(alias = "o4-mini", rename = "o4-mini")]
O4Mini,
#[serde(alias = "claude-3-5-sonnet", rename = "claude-3.5-sonnet")]
Claude3_5Sonnet,
#[serde(alias = "claude-3-7-sonnet", rename = "claude-3.7-sonnet")]
Claude3_7Sonnet,
#[serde(
alias = "claude-3.7-sonnet-thought",
rename = "claude-3.7-sonnet-thought"
)]
Claude3_7SonnetThinking,
#[serde(alias = "gemini-2.0-flash", rename = "gemini-2.0-flash-001")]
Gemini20Flash,
#[serde(alias = "gemini-2.5-pro", rename = "gemini-2.5-pro")]
Gemini25Pro,
}
impl Model {
pub fn default_fast() -> Self {
Self::Claude3_7Sonnet
}
pub fn uses_streaming(&self) -> bool {
self.capabilities.supports.streaming
match self {
Self::Gpt4o
| Self::Gpt4
| Self::Gpt4_1
| Self::Gpt3_5Turbo
| Self::O3
| Self::O4Mini
| Self::Claude3_5Sonnet
| Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking => true,
Self::O3Mini | Self::O1 | Self::Gemini20Flash | Self::Gemini25Pro => false,
}
}
pub fn id(&self) -> &str {
self.id.as_str()
pub fn from_id(id: &str) -> Result<Self> {
match id {
"gpt-4o" => Ok(Self::Gpt4o),
"gpt-4" => Ok(Self::Gpt4),
"gpt-4.1" => Ok(Self::Gpt4_1),
"gpt-3.5-turbo" => Ok(Self::Gpt3_5Turbo),
"o1" => Ok(Self::O1),
"o3-mini" => Ok(Self::O3Mini),
"o3" => Ok(Self::O3),
"o4-mini" => Ok(Self::O4Mini),
"claude-3-5-sonnet" => Ok(Self::Claude3_5Sonnet),
"claude-3-7-sonnet" => Ok(Self::Claude3_7Sonnet),
"claude-3.7-sonnet-thought" => Ok(Self::Claude3_7SonnetThinking),
"gemini-2.0-flash-001" => Ok(Self::Gemini20Flash),
"gemini-2.5-pro" => Ok(Self::Gemini25Pro),
_ => Err(anyhow!("Invalid model id: {}", id)),
}
}
pub fn display_name(&self) -> &str {
self.name.as_str()
pub fn id(&self) -> &'static str {
match self {
Self::Gpt3_5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4_1 => "gpt-4.1",
Self::Gpt4o => "gpt-4o",
Self::O3Mini => "o3-mini",
Self::O1 => "o1",
Self::O3 => "o3",
Self::O4Mini => "o4-mini",
Self::Claude3_5Sonnet => "claude-3-5-sonnet",
Self::Claude3_7Sonnet => "claude-3-7-sonnet",
Self::Claude3_7SonnetThinking => "claude-3.7-sonnet-thought",
Self::Gemini20Flash => "gemini-2.0-flash-001",
Self::Gemini25Pro => "gemini-2.5-pro",
}
}
pub fn display_name(&self) -> &'static str {
match self {
Self::Gpt3_5Turbo => "GPT-3.5",
Self::Gpt4 => "GPT-4",
Self::Gpt4_1 => "GPT-4.1",
Self::Gpt4o => "GPT-4o",
Self::O3Mini => "o3-mini",
Self::O1 => "o1",
Self::O3 => "o3",
Self::O4Mini => "o4-mini",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
Self::Gemini20Flash => "Gemini 2.0 Flash",
Self::Gemini25Pro => "Gemini 2.5 Pro",
}
}
pub fn max_token_count(&self) -> usize {
self.capabilities.limits.max_prompt_tokens
}
pub fn supports_tools(&self) -> bool {
self.capabilities.supports.tool_calls
}
pub fn vendor(&self) -> ModelVendor {
self.vendor
}
pub fn supports_vision(&self) -> bool {
self.capabilities.supports.vision
}
pub fn supports_parallel_tool_calls(&self) -> bool {
self.capabilities.supports.parallel_tool_calls
match self {
Self::Gpt4o => 64_000,
Self::Gpt4 => 32_768,
Self::Gpt4_1 => 128_000,
Self::Gpt3_5Turbo => 12_288,
Self::O3Mini => 64_000,
Self::O1 => 20_000,
Self::O3 => 128_000,
Self::O4Mini => 128_000,
Self::Claude3_5Sonnet => 200_000,
Self::Claude3_7Sonnet => 90_000,
Self::Claude3_7SonnetThinking => 90_000,
Self::Gemini20Flash => 128_000,
Self::Gemini25Pro => 128_000,
}
}
}
@@ -165,7 +160,7 @@ pub struct Request {
pub n: usize,
pub stream: bool,
pub temperature: f32,
pub model: String,
pub model: Model,
pub messages: Vec<ChatMessage>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tools: Vec<Tool>,
@@ -203,7 +198,7 @@ pub enum ChatMessage {
tool_calls: Vec<ToolCall>,
},
User {
content: Vec<ChatMessageContent>,
content: String,
},
System {
content: String,
@@ -311,7 +306,6 @@ impl Global for GlobalCopilotChat {}
pub struct CopilotChat {
oauth_token: Option<String>,
api_token: Option<ApiToken>,
models: Option<Vec<Model>>,
client: Arc<dyn HttpClient>,
}
@@ -348,56 +342,31 @@ impl CopilotChat {
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
let dir_path = copilot_chat_config_dir();
cx.spawn({
let client = client.clone();
async move |cx| {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
fs.clone(),
dir_path.clone(),
config_paths,
);
while let Some(contents) = parent_watch_rx.next().await {
let oauth_token = extract_oauth_token(contents);
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.oauth_token = oauth_token.clone();
cx.notify();
});
}
})?;
if let Some(ref oauth_token) = oauth_token {
let api_token = request_api_token(oauth_token, client.clone()).await?;
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.api_token = Some(api_token.clone());
cx.notify();
});
}
})?;
let models = get_models(api_token.api_key, client.clone()).await?;
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.models = Some(models);
cx.notify();
});
}
})?;
cx.spawn(async move |cx| {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
fs.clone(),
dir_path.clone(),
config_paths,
);
while let Some(contents) = parent_watch_rx.next().await {
let oauth_token = extract_oauth_token(contents);
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.oauth_token = oauth_token;
cx.notify();
});
}
}
anyhow::Ok(())
})?;
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
Self {
oauth_token: None,
api_token: None,
models: None,
client,
}
}
@@ -406,10 +375,6 @@ impl CopilotChat {
self.oauth_token.is_some()
}
pub fn models(&self) -> Option<&[Model]> {
self.models.as_deref()
}
pub async fn stream_completion(
request: Request,
mut cx: AsyncApp,
@@ -444,61 +409,6 @@ impl CopilotChat {
}
}
async fn get_models(api_token: String, client: Arc<dyn HttpClient>) -> Result<Vec<Model>> {
let all_models = request_models(api_token, client).await?;
let mut models: Vec<Model> = all_models
.into_iter()
.filter(|model| {
// Ensure user has access to the model; Policy is present only for models that must be
// enabled in the GitHub dashboard
model.model_picker_enabled
&& model
.policy
.as_ref()
.is_none_or(|policy| policy.state == "enabled")
})
// The first model from the API response, in any given family, appear to be the non-tagged
// models, which are likely the best choice (e.g. gpt-4o rather than gpt-4o-2024-11-20)
.dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
.collect();
if let Some(default_model_position) =
models.iter().position(|model| model.id == DEFAULT_MODEL_ID)
{
let default_model = models.remove(default_model_position);
models.insert(0, default_model);
}
Ok(models)
}
async fn request_models(api_token: String, client: Arc<dyn HttpClient>) -> Result<Vec<Model>> {
let request_builder = HttpRequest::builder()
.method(Method::GET)
.uri(COPILOT_CHAT_MODELS_URL)
.header("Authorization", format!("Bearer {}", api_token))
.header("Content-Type", "application/json")
.header("Copilot-Integration-Id", "vscode-chat");
let request = request_builder.body(AsyncBody::empty())?;
let mut response = client.send(request).await?;
if response.status().is_success() {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
Ok(models)
} else {
Err(anyhow!("Failed to request models: {}", response.status()))
}
}
async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Result<ApiToken> {
let request_builder = HttpRequest::builder()
.method(Method::GET)
@@ -562,8 +472,7 @@ async fn stream_completion(
)
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.header("Copilot-Integration-Id", "vscode-chat")
.header("Copilot-Vision-Request", "true");
.header("Copilot-Integration-Id", "vscode-chat");
let is_streaming = request.stream;
@@ -618,82 +527,3 @@ async fn stream_completion(
Ok(futures::stream::once(async move { Ok(response) }).boxed())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_resilient_model_schema_deserialize() {
let json = r#"{
"data": [
{
"capabilities": {
"family": "gpt-4",
"limits": {
"max_context_window_tokens": 32768,
"max_output_tokens": 4096,
"max_prompt_tokens": 32768
},
"object": "model_capabilities",
"supports": { "streaming": true, "tool_calls": true },
"tokenizer": "cl100k_base",
"type": "chat"
},
"id": "gpt-4",
"model_picker_enabled": false,
"name": "GPT 4",
"object": "model",
"preview": false,
"vendor": "Azure OpenAI",
"version": "gpt-4-0613"
},
{
"some-unknown-field": 123
},
{
"capabilities": {
"family": "claude-3.7-sonnet",
"limits": {
"max_context_window_tokens": 200000,
"max_output_tokens": 16384,
"max_prompt_tokens": 90000,
"vision": {
"max_prompt_image_size": 3145728,
"max_prompt_images": 1,
"supported_media_types": ["image/jpeg", "image/png", "image/webp"]
}
},
"object": "model_capabilities",
"supports": {
"parallel_tool_calls": true,
"streaming": true,
"tool_calls": true,
"vision": true
},
"tokenizer": "o200k_base",
"type": "chat"
},
"id": "claude-3.7-sonnet",
"model_picker_enabled": true,
"name": "Claude 3.7 Sonnet",
"object": "model",
"policy": {
"state": "enabled",
"terms": "Enable access to the latest Claude 3.7 Sonnet model from Anthropic. [Learn more about how GitHub Copilot serves Claude 3.7 Sonnet](https://docs.github.com/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot)."
},
"preview": false,
"vendor": "Anthropic",
"version": "claude-3.7-sonnet"
}
],
"object": "list"
}"#;
let schema: ModelSchema = serde_json::from_str(&json).unwrap();
assert_eq!(schema.data.len(), 2);
assert_eq!(schema.data[0].id, "gpt-4");
assert_eq!(schema.data[1].id, "claude-3.7-sonnet");
}
}

View File

@@ -42,9 +42,7 @@ impl CodeLldbDebugAdapter {
if !launch.args.is_empty() {
map.insert("args".into(), launch.args.clone().into());
}
if !launch.env.is_empty() {
map.insert("env".into(), launch.env_json());
}
if let Some(stop_on_entry) = config.stop_on_entry {
map.insert("stopOnEntry".into(), stop_on_entry.into());
}

View File

@@ -35,10 +35,6 @@ impl GdbDebugAdapter {
map.insert("args".into(), launch.args.clone().into());
}
if !launch.env.is_empty() {
map.insert("env".into(), launch.env_json());
}
if let Some(stop_on_entry) = config.stop_on_entry {
map.insert(
"stopAtBeginningOfMainSubprogram".into(),

View File

@@ -19,8 +19,7 @@ impl GoDebugAdapter {
dap::DebugRequest::Launch(launch_config) => json!({
"program": launch_config.program,
"cwd": launch_config.cwd,
"args": launch_config.args,
"env": launch_config.env_json()
"args": launch_config.args
}),
};

View File

@@ -36,9 +36,6 @@ impl JsDebugAdapter {
if !launch.args.is_empty() {
map.insert("args".into(), launch.args.clone().into());
}
if !launch.env.is_empty() {
map.insert("env".into(), launch.env_json());
}
if let Some(stop_on_entry) = config.stop_on_entry {
map.insert("stopOnEntry".into(), stop_on_entry.into());

View File

@@ -29,7 +29,6 @@ impl PhpDebugAdapter {
"program": launch_config.program,
"cwd": launch_config.cwd,
"args": launch_config.args,
"env": launch_config.env_json(),
"stopOnEntry": config.stop_on_entry.unwrap_or_default(),
}),
request: config.request.to_dap(),

View File

@@ -32,9 +32,6 @@ impl PythonDebugAdapter {
DebugRequest::Launch(launch) => {
map.insert("program".into(), launch.program.clone().into());
map.insert("args".into(), launch.args.clone().into());
if !launch.env.is_empty() {
map.insert("env".into(), launch.env_json());
}
if let Some(stop_on_entry) = config.stop_on_entry {
map.insert("stopOnEntry".into(), stop_on_entry.into());

View File

@@ -62,7 +62,7 @@ impl DebugAdapter for RubyDebugAdapter {
let tcp_connection = definition.tcp_connection.clone().unwrap_or_default();
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
let DebugRequest::Launch(launch) = definition.request.clone() else {
let DebugRequest::Launch(mut launch) = definition.request.clone() else {
anyhow::bail!("rdbg does not yet support attaching");
};
@@ -71,6 +71,12 @@ impl DebugAdapter for RubyDebugAdapter {
format!("--port={}", port),
format!("--host={}", host),
];
if launch.args.is_empty() {
let program = launch.program.clone();
let mut split = program.split(" ");
launch.program = split.next().unwrap().to_string();
launch.args = split.map(|s| s.to_string()).collect();
}
if delegate.which(launch.program.as_ref()).is_some() {
arguments.push("--command".to_string())
}

View File

@@ -51,7 +51,6 @@ rpc.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
shlex.workspace = true
sysinfo.workspace = true
task.workspace = true
tasks_ui.workspace = true

View File

@@ -22,7 +22,7 @@ use gpui::{
use language::Buffer;
use project::debugger::session::{Session, SessionStateEvent};
use project::{Fs, ProjectPath, WorktreeId};
use project::{Fs, WorktreeId};
use project::{Project, debugger::session::ThreadStatus};
use rpc::proto::{self};
use settings::Settings;
@@ -291,7 +291,7 @@ impl DebugPanel {
let (debug_session, workspace) = this.update_in(cx, |this, window, cx| {
this.sessions.retain(|session| {
!session
session
.read(cx)
.running_state()
.read(cx)
@@ -997,7 +997,7 @@ impl DebugPanel {
worktree_id: WorktreeId,
window: &mut Window,
cx: &mut App,
) -> Task<Result<ProjectPath>> {
) -> Task<Result<()>> {
self.workspace
.update(cx, |workspace, cx| {
let Some(mut path) = workspace.absolute_path_of_worktree(worktree_id, cx) else {
@@ -1006,20 +1006,14 @@ impl DebugPanel {
let serialized_scenario = serde_json::to_value(scenario);
path.push(paths::local_debug_file_relative_path());
cx.spawn_in(window, async move |workspace, cx| {
let serialized_scenario = serialized_scenario?;
let path = path.as_path();
let fs =
workspace.update(cx, |workspace, _| workspace.app_state().fs.clone())?;
path.push(paths::local_settings_folder_relative_path());
if !fs.is_dir(path.as_path()).await {
fs.create_dir(path.as_path()).await?;
}
path.pop();
path.push(paths::local_debug_file_relative_path());
let path = path.as_path();
if !fs.is_file(path).await {
let content =
serde_json::to_string_pretty(&serde_json::Value::Array(vec![
@@ -1040,19 +1034,21 @@ impl DebugPanel {
.await?;
}
workspace.update(cx, |workspace, cx| {
workspace.update_in(cx, |workspace, window, cx| {
if let Some(project_path) = workspace
.project()
.read(cx)
.project_path_for_absolute_path(&path, cx)
{
Ok(project_path)
workspace.open_path(project_path, None, true, window, cx)
} else {
Err(anyhow!(
Task::ready(Err(anyhow!(
"Couldn't get project path for .zed/debug.json in active worktree"
))
)))
}
})?
})?.await?;
anyhow::Ok(())
})
})
.unwrap_or_else(|err| Task::ready(Err(err)))

View File

@@ -1,14 +1,11 @@
use collections::FxHashMap;
use std::{
borrow::Cow,
ops::Not,
path::{Path, PathBuf},
sync::Arc,
time::Duration,
usize,
};
use anyhow::Result;
use dap::{
DapRegistry, DebugRequest,
adapters::{DebugAdapterName, DebugTaskDefinition},
@@ -16,32 +13,26 @@ use dap::{
use editor::{Editor, EditorElement, EditorStyle};
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
Animation, AnimationExt as _, App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle,
Focusable, Render, Subscription, TextStyle, Transformation, WeakEntity, percentage,
App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render,
Subscription, TextStyle, WeakEntity,
};
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
use project::{ProjectPath, TaskContexts, TaskSourceKind, task_store::TaskStore};
use project::{TaskContexts, TaskSourceKind, task_store::TaskStore};
use settings::Settings;
use task::{DebugScenario, LaunchRequest};
use theme::ThemeSettings;
use ui::{
ActiveTheme, Button, ButtonCommon, ButtonSize, CheckboxWithLabel, Clickable, Color, Context,
ContextMenu, Disableable, DropdownMenu, FluentBuilder, Icon, IconButton, IconName, IconSize,
InteractiveElement, IntoElement, Label, LabelCommon as _, ListItem, ListItemSpacing,
ParentElement, RenderOnce, SharedString, Styled, StyledExt, ToggleButton, ToggleState,
Toggleable, Window, div, h_flex, relative, rems, v_flex,
ContextMenu, Disableable, DropdownMenu, FluentBuilder, Icon, IconName, InteractiveElement,
IntoElement, Label, LabelCommon as _, ListItem, ListItemSpacing, ParentElement, RenderOnce,
SharedString, Styled, StyledExt, ToggleButton, ToggleState, Toggleable, Window, div, h_flex,
relative, rems, v_flex,
};
use util::ResultExt;
use workspace::{ModalView, Workspace, pane};
use crate::{attach_modal::AttachModal, debugger_panel::DebugPanel};
enum SaveScenarioState {
Saving,
Saved(ProjectPath),
Failed(SharedString),
}
pub(super) struct NewSessionModal {
workspace: WeakEntity<Workspace>,
debug_panel: WeakEntity<DebugPanel>,
@@ -51,7 +42,6 @@ pub(super) struct NewSessionModal {
custom_mode: Entity<CustomMode>,
debugger: Option<DebugAdapterName>,
task_contexts: Arc<TaskContexts>,
save_scenario_state: Option<SaveScenarioState>,
_subscriptions: [Subscription; 2],
}
@@ -135,7 +125,6 @@ impl NewSessionModal {
debug_panel: debug_panel.downgrade(),
workspace: workspace_handle,
task_contexts,
save_scenario_state: None,
_subscriptions,
}
});
@@ -230,7 +219,7 @@ impl NewSessionModal {
cx.emit(DismissEvent);
})
.ok();
Result::<_, anyhow::Error>::Ok(())
anyhow::Result::<_, anyhow::Error>::Ok(())
})
.detach_and_log_err(cx);
}
@@ -390,8 +379,6 @@ impl Render for NewSessionModal {
window: &mut ui::Window,
cx: &mut ui::Context<Self>,
) -> impl ui::IntoElement {
let this = cx.weak_entity().clone();
v_flex()
.size_full()
.w(rems(34.))
@@ -497,148 +484,42 @@ impl Render for NewSessionModal {
}
}),
),
NewSessionMode::Custom => h_flex()
.child(
Button::new("new-session-modal-back", "Save to .zed/debug.json...")
.on_click(cx.listener(|this, _, window, cx| {
let Some(save_scenario) = this
.debugger
.as_ref()
.and_then(|debugger| this.debug_scenario(&debugger, cx))
.zip(this.task_contexts.worktree())
.and_then(|(scenario, worktree_id)| {
this.debug_panel
.update(cx, |panel, cx| {
panel.save_scenario(
&scenario,
worktree_id,
window,
cx,
)
})
.ok()
})
else {
return;
};
this.save_scenario_state = Some(SaveScenarioState::Saving);
cx.spawn(async move |this, cx| {
let res = save_scenario.await;
this.update(cx, |this, _| match res {
Ok(saved_file) => {
this.save_scenario_state =
Some(SaveScenarioState::Saved(saved_file))
}
Err(error) => {
this.save_scenario_state =
Some(SaveScenarioState::Failed(
error.to_string().into(),
))
}
})
.ok();
cx.background_executor()
.timer(Duration::from_secs(2))
.await;
this.update(cx, |this, _| {
this.save_scenario_state.take()
})
.ok();
NewSessionMode::Custom => div().child(
Button::new("new-session-modal-back", "Save to .zed/debug.json...")
.on_click(cx.listener(|this, _, window, cx| {
let Some(save_scenario_task) = this
.debugger
.as_ref()
.and_then(|debugger| this.debug_scenario(&debugger, cx))
.zip(this.task_contexts.worktree())
.and_then(|(scenario, worktree_id)| {
this.debug_panel
.update(cx, |panel, cx| {
panel.save_scenario(
&scenario,
worktree_id,
window,
cx,
)
})
.ok()
})
.detach();
}))
.disabled(
self.debugger.is_none()
|| self
.custom_mode
.read(cx)
.program
.read(cx)
.is_empty(cx)
|| self.save_scenario_state.is_some(),
),
)
.when_some(self.save_scenario_state.as_ref(), {
let this_entity = this.clone();
else {
return;
};
move |this, save_state| match save_state {
SaveScenarioState::Saved(saved_path) => this.child(
IconButton::new(
"new-session-modal-go-to-file",
IconName::ArrowUpRight,
)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.on_click({
let this_entity = this_entity.clone();
let saved_path = saved_path.clone();
move |_, window, cx| {
window
.spawn(cx, {
let this_entity = this_entity.clone();
let saved_path = saved_path.clone();
async move |cx| {
this_entity
.update_in(
cx,
|this, window, cx| {
this.workspace.update(
cx,
|workspace, cx| {
workspace.open_path(
saved_path
.clone(),
None,
true,
window,
cx,
)
},
)
},
)??
.await?;
this_entity
.update(cx, |_, cx| {
cx.emit(DismissEvent)
})
.ok();
anyhow::Ok(())
}
})
.detach();
}
}),
),
SaveScenarioState::Saving => this.child(
Icon::new(IconName::Spinner)
.size(IconSize::Small)
.color(Color::Muted)
.with_animation(
"Spinner",
Animation::new(Duration::from_secs(3)).repeat(),
|icon, delta| {
icon.transform(Transformation::rotate(
percentage(delta),
))
},
),
),
SaveScenarioState::Failed(error_msg) => this.child(
IconButton::new("Failed Scenario Saved", IconName::X)
.icon_size(IconSize::Small)
.icon_color(Color::Error)
.tooltip(ui::Tooltip::text(error_msg.clone())),
),
}
}),
cx.spawn(async move |this, cx| {
if save_scenario_task.await.is_ok() {
this.update(cx, |_, cx| cx.emit(DismissEvent)).ok();
}
})
.detach();
}))
.disabled(
self.debugger.is_none()
|| self.custom_mode.read(cx).program.read(cx).is_empty(cx),
),
),
})
.child(
Button::new("debugger-spawn", "Start")
@@ -714,7 +595,7 @@ impl CustomMode {
let program = cx.new(|cx| Editor::single_line(window, cx));
program.update(cx, |this, cx| {
this.set_placeholder_text("Run", cx);
this.set_placeholder_text("Program path", cx);
if let Some(past_program) = past_program {
this.set_text(past_program, window, cx);
@@ -736,29 +617,11 @@ impl CustomMode {
pub(super) fn debug_request(&self, cx: &App) -> task::LaunchRequest {
let path = self.cwd.read(cx).text(cx);
let command = self.program.read(cx).text(cx);
let mut args = shlex::split(&command).into_iter().flatten().peekable();
let mut env = FxHashMap::default();
while args.peek().is_some_and(|arg| arg.contains('=')) {
let arg = args.next().unwrap();
let (lhs, rhs) = arg.split_once('=').unwrap();
env.insert(lhs.to_string(), rhs.to_string());
}
let program = if let Some(program) = args.next() {
program
} else {
env = FxHashMap::default();
command
};
let args = args.collect::<Vec<_>>();
task::LaunchRequest {
program,
program: self.program.read(cx).text(cx),
cwd: path.is_empty().not().then(|| PathBuf::from(path)),
args,
env,
args: Default::default(),
env: Default::default(),
}
}

View File

@@ -5,7 +5,7 @@ use super::{
use anyhow::Result;
use collections::HashMap;
use dap::OutputEvent;
use editor::{Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId};
use editor::{CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId};
use fuzzy::StringMatchCandidate;
use gpui::{
Context, Entity, FocusHandle, Focusable, Render, Subscription, Task, TextStyle, WeakEntity,
@@ -401,21 +401,28 @@ impl ConsoleQueryBarCompletionProvider {
.as_ref()
.unwrap_or(&completion.label)
.to_owned();
let buffer_text = snapshot.text();
let buffer_bytes = buffer_text.as_bytes();
let new_bytes = new_text.as_bytes();
let mut word_bytes_length = 0;
for chunk in snapshot
.reversed_chunks_in_range(language::Anchor::MIN..buffer_position)
{
let mut processed_bytes = 0;
if let Some(_) = chunk.chars().rfind(|c| {
let is_whitespace = c.is_whitespace();
if !is_whitespace {
processed_bytes += c.len_utf8();
}
let mut prefix_len = 0;
for i in (0..new_bytes.len()).rev() {
if buffer_bytes.ends_with(&new_bytes[0..i]) {
prefix_len = i;
is_whitespace
}) {
word_bytes_length += processed_bytes;
break;
} else {
word_bytes_length += chunk.len();
}
}
let buffer_offset = buffer_position.to_offset(&snapshot);
let start = buffer_offset - prefix_len;
let start = snapshot.clip_offset(start, Bias::Left);
let start = buffer_offset - word_bytes_length;
let start = snapshot.anchor_before(start);
let replace_range = start..buffer_position;

View File

@@ -6,8 +6,6 @@ use gpui::{
WeakEntity, Window,
};
use language::Diagnostic;
use project::project_settings::ProjectSettings;
use settings::Settings;
use ui::{Button, ButtonLike, Color, Icon, IconName, Label, Tooltip, h_flex, prelude::*};
use workspace::{StatusItemView, ToolbarItemEvent, Workspace, item::ItemHandle};
@@ -24,11 +22,6 @@ pub struct DiagnosticIndicator {
impl Render for DiagnosticIndicator {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let indicator = h_flex().gap_2();
if !ProjectSettings::get_global(cx).diagnostics.button {
return indicator;
}
let diagnostic_indicator = match (self.summary.error_count, self.summary.warning_count) {
(0, 0) => h_flex().map(|this| {
this.child(
@@ -91,7 +84,8 @@ impl Render for DiagnosticIndicator {
None
};
indicator
h_flex()
.gap_2()
.child(
ButtonLike::new("diagnostic-indicator")
.child(diagnostic_indicator)

View File

@@ -640,7 +640,6 @@ impl CompletionsMenu {
MarkdownElement::new(markdown.clone(), hover_markdown_style(window, cx))
.code_block_renderer(markdown::CodeBlockRenderer::Default {
copy_button: false,
copy_button_on_hover: false,
border: false,
})
.on_url_click(open_markdown_url),

View File

@@ -8779,13 +8779,15 @@ impl Editor {
continue;
}
// If the selection is empty and the cursor is in the leading whitespace before the
// suggested indentation, then auto-indent the line.
let cursor = selection.head();
let current_indent = snapshot.indent_size_for_line(MultiBufferRow(cursor.row));
if let Some(suggested_indent) =
suggested_indents.get(&MultiBufferRow(cursor.row)).copied()
{
// Don't do anything if already at suggested indent
// and there is any other cursor which is not
// If there exist any empty selection in the leading whitespace, then skip
// indent for selections at the boundary.
if has_some_cursor_in_whitespace
&& cursor.column == current_indent.len
&& current_indent.len == suggested_indent.len
@@ -8793,8 +8795,6 @@ impl Editor {
continue;
}
// Adjust line and move cursor to suggested indent
// if cursor is not at suggested indent
if cursor.column < suggested_indent.len
&& cursor.column <= current_indent.len
&& current_indent.len <= suggested_indent.len
@@ -8811,14 +8811,6 @@ impl Editor {
}
continue;
}
// If current indent is more than suggested indent
// only move cursor to current indent and skip indent
if cursor.column < current_indent.len && current_indent.len > suggested_indent.len {
selection.start = Point::new(cursor.row, current_indent.len);
selection.end = selection.start;
continue;
}
}
// Otherwise, insert a hard or soft tab.

View File

@@ -4,7 +4,6 @@ use project::project_settings::DiagnosticSeverity;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources, VsCodeSettings};
use util::serde::default_true;
#[derive(Deserialize, Clone)]
pub struct EditorSettings {
@@ -277,9 +276,6 @@ pub enum ScrollBeyondLastLine {
/// Default options for buffer and project search items.
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
pub struct SearchSettings {
/// Whether to show the project search button in the status bar.
#[serde(default = "default_true")]
pub button: bool,
#[serde(default)]
pub whole_word: bool,
#[serde(default)]

View File

@@ -2871,8 +2871,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_lines(cx: &mut TestAppConte
);
cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
// test when all cursors are not at suggested indent
// then simply move to their suggested indent location
// when all cursors are to the left of the suggested indent, then auto-indent all.
cx.set_state(indoc! {"
const a: B = (
c(
@@ -2889,8 +2888,9 @@ async fn test_tab_in_leading_whitespace_auto_indents_lines(cx: &mut TestAppConte
);
"});
// test cursor already at suggested indent not moving when
// other cursors are yet to reach their suggested indents
// cursors that are already at the suggested indent level do not move
// until other cursors that are to the left of the suggested indent
// auto-indent.
cx.set_state(indoc! {"
ˇ
const a: B = (
@@ -2914,7 +2914,8 @@ async fn test_tab_in_leading_whitespace_auto_indents_lines(cx: &mut TestAppConte
ˇ)
);
"});
// test when all cursors are at suggested indent then tab is inserted
// once all multi-cursors are at the suggested
// indent level, they all insert a soft tab together.
cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
cx.assert_editor_state(indoc! {"
ˇ
@@ -2928,112 +2929,6 @@ async fn test_tab_in_leading_whitespace_auto_indents_lines(cx: &mut TestAppConte
);
"});
// test when current indent is less than suggested indent,
// we adjust line to match suggested indent and move cursor to it
//
// when no other cursor is at word boundary, all of them should move
cx.set_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ )
ˇ )
);
"});
cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
cx.assert_editor_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ)
ˇ)
);
"});
// test when current indent is less than suggested indent,
// we adjust line to match suggested indent and move cursor to it
//
// when some other cursor is at word boundary, it should not move
cx.set_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ )
ˇ)
);
"});
cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
cx.assert_editor_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ)
ˇ)
);
"});
// test when current indent is more than suggested indent,
// we just move cursor to current indent instead of suggested indent
//
// when no other cursor is at word boundary, all of them should move
cx.set_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ )
ˇ )
);
"});
cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
cx.assert_editor_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ)
ˇ)
);
"});
cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
cx.assert_editor_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ)
ˇ)
);
"});
// test when current indent is more than suggested indent,
// we just move cursor to current indent instead of suggested indent
//
// when some other cursor is at word boundary, it doesn't move
cx.set_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ )
ˇ)
);
"});
cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
cx.assert_editor_state(indoc! {"
const a: B = (
c(
d(
ˇ
ˇ)
ˇ)
);
"});
// handle auto-indent when there are multiple cursors on the same line
cx.set_state(indoc! {"
const a: B = (
@@ -9005,7 +8900,6 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
},
})
.await
.into_response()
.unwrap();
Ok(Some(json!(null)))
}
@@ -19259,7 +19153,6 @@ async fn test_apply_code_lens_actions_with_commands(cx: &mut gpui::TestAppContex
},
)
.await
.into_response()
.unwrap();
Ok(Some(json!(null)))
}

View File

@@ -2882,7 +2882,8 @@ impl EditorElement {
text_x: Pixels,
rows: &Range<DisplayRow>,
line_layouts: &[LineWithInvisibles],
editor_margins: &EditorMargins,
gutter_dimensions: &GutterDimensions,
right_margin: Pixels,
line_height: Pixels,
em_width: Pixels,
text_hitbox: &Hitbox,
@@ -2942,6 +2943,11 @@ impl EditorElement {
})
.is_ok();
let margins = EditorMargins {
gutter: *gutter_dimensions,
right: right_margin,
};
div()
.size_full()
.children(
@@ -2950,7 +2956,7 @@ impl EditorElement {
window,
app: cx,
anchor_x,
margins: editor_margins,
margins: &margins,
line_height,
em_width,
block_id,
@@ -2969,7 +2975,7 @@ impl EditorElement {
..
} => {
let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id);
let result = v_flex().id(block_id).w_full().pr(editor_margins.right);
let result = v_flex().id(block_id).w_full();
let jump_data = header_jump_data(snapshot, block_row_start, *height, first_excerpt);
result
@@ -3000,10 +3006,8 @@ impl EditorElement {
if sticky_header_excerpt_id != Some(excerpt.id) {
let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
result = result.child(div().pr(editor_margins.right).child(
self.render_buffer_header(
excerpt, false, selected, false, jump_data, window, cx,
),
result = result.child(self.render_buffer_header(
excerpt, false, selected, false, jump_data, window, cx,
));
} else {
result =
@@ -3050,7 +3054,7 @@ impl EditorElement {
if let Some((x_target, line_width)) = x_position {
let margin = em_width * 2;
if line_width + final_size.width + margin
< editor_width + editor_margins.gutter.full_width()
< editor_width + gutter_dimensions.full_width()
&& !row_block_types.contains_key(&(row - 1))
&& element_height_in_lines == 1
{
@@ -3060,10 +3064,10 @@ impl EditorElement {
element_height_in_lines = 0;
row_block_types.insert(row, is_block);
} else {
let max_offset = editor_width + editor_margins.gutter.full_width()
- final_size.width;
let max_offset =
editor_width + gutter_dimensions.full_width() - final_size.width;
let min_offset = (x_target + em_width - final_size.width)
.max(editor_margins.gutter.full_width());
.max(gutter_dimensions.full_width());
x_offset = x_target.min(max_offset).max(min_offset);
}
}
@@ -3279,7 +3283,8 @@ impl EditorElement {
text_hitbox: &Hitbox,
editor_width: Pixels,
scroll_width: &mut Pixels,
editor_margins: &EditorMargins,
gutter_dimensions: &GutterDimensions,
right_margin: Pixels,
em_width: Pixels,
text_x: Pixels,
line_height: Pixels,
@@ -3319,7 +3324,8 @@ impl EditorElement {
text_x,
&rows,
line_layouts,
editor_margins,
gutter_dimensions,
right_margin,
line_height,
em_width,
text_hitbox,
@@ -3357,7 +3363,7 @@ impl EditorElement {
.size
.width
.max(fixed_block_max_width)
.max(editor_margins.gutter.width + *scroll_width)
.max(gutter_dimensions.width + *scroll_width)
.into(),
(BlockStyle::Fixed, _) => unreachable!(),
};
@@ -3376,7 +3382,8 @@ impl EditorElement {
text_x,
&rows,
line_layouts,
editor_margins,
gutter_dimensions,
right_margin,
line_height,
em_width,
text_hitbox,
@@ -3416,7 +3423,7 @@ impl EditorElement {
.size
.width
.max(fixed_block_max_width)
.max(editor_margins.gutter.width + *scroll_width),
.max(gutter_dimensions.width + *scroll_width),
),
BlockStyle::Sticky => AvailableSpace::Definite(hitbox.size.width),
};
@@ -3430,7 +3437,8 @@ impl EditorElement {
text_x,
&rows,
line_layouts,
editor_margins,
gutter_dimensions,
right_margin,
line_height,
em_width,
text_hitbox,
@@ -3462,8 +3470,7 @@ impl EditorElement {
}
if resized_blocks.is_empty() {
*scroll_width =
(*scroll_width).max(fixed_block_max_width - editor_margins.gutter.width);
*scroll_width = (*scroll_width).max(fixed_block_max_width - gutter_dimensions.width);
Ok((blocks, row_block_types))
} else {
Err(resized_blocks)
@@ -3516,7 +3523,6 @@ impl EditorElement {
StickyHeaderExcerpt { excerpt }: StickyHeaderExcerpt<'_>,
scroll_position: f32,
line_height: Pixels,
right_margin: Pixels,
snapshot: &EditorSnapshot,
hitbox: &Hitbox,
selected_buffer_ids: &Vec<BufferId>,
@@ -3535,13 +3541,11 @@ impl EditorElement {
let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
let available_width = hitbox.bounds.size.width - right_margin;
let mut header = v_flex()
.relative()
.child(
div()
.w(available_width)
.w(hitbox.bounds.size.width)
.h(FILE_HEADER_HEIGHT as f32 * line_height)
.bg(linear_gradient(
0.,
@@ -3578,7 +3582,7 @@ impl EditorElement {
}
let size = size(
AvailableSpace::Definite(available_width),
AvailableSpace::Definite(hitbox.size.width),
AvailableSpace::MinContent,
);
@@ -7173,14 +7177,9 @@ impl Element for EditorElement {
let editor_width =
text_width - gutter_dimensions.margin - 2 * em_width - right_margin;
let editor_margins = EditorMargins {
gutter: gutter_dimensions,
right: right_margin,
};
// Offset the content_bounds from the text_bounds by the gutter margin (which
// is roughly half a character wide) to make hit testing work more like how we want.
let content_offset = point(editor_margins.gutter.margin, Pixels::ZERO);
let content_offset = point(gutter_dimensions.margin, Pixels::ZERO);
let editor_content_width = editor_width - content_offset.x;
@@ -7636,7 +7635,8 @@ impl Element for EditorElement {
&text_hitbox,
editor_width,
&mut scroll_width,
&editor_margins,
&gutter_dimensions,
right_margin,
em_width,
gutter_dimensions.full_width(),
line_height,
@@ -7665,7 +7665,6 @@ impl Element for EditorElement {
sticky_header_excerpt,
scroll_position.y,
line_height,
right_margin,
&snapshot,
&hitbox,
&selected_buffer_ids,

View File

@@ -897,7 +897,6 @@ impl InfoPopover {
MarkdownElement::new(markdown, hover_markdown_style(window, cx))
.code_block_renderer(markdown::CodeBlockRenderer::Default {
copy_button: false,
copy_button_on_hover: false,
border: false,
})
.on_url_click(open_markdown_url),

View File

@@ -1409,7 +1409,6 @@ pub mod tests {
fake_server
.request::<lsp::request::InlayHintRefreshRequest>(())
.await
.into_response()
.expect("inlay refresh request failed");
cx.executor().run_until_parked();
editor
@@ -1493,7 +1492,6 @@ pub mod tests {
token: lsp::ProgressToken::String(progress_token.to_string()),
})
.await
.into_response()
.expect("work done progress create request failed");
cx.executor().run_until_parked();
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
@@ -1865,7 +1863,6 @@ pub mod tests {
fake_server
.request::<lsp::request::InlayHintRefreshRequest>(())
.await
.into_response()
.expect("inlay refresh request failed");
cx.executor().run_until_parked();
editor
@@ -2011,7 +2008,6 @@ pub mod tests {
fake_server
.request::<lsp::request::InlayHintRefreshRequest>(())
.await
.into_response()
.expect("inlay refresh request failed");
cx.executor().run_until_parked();
editor
@@ -2074,7 +2070,6 @@ pub mod tests {
fake_server
.request::<lsp::request::InlayHintRefreshRequest>(())
.await
.into_response()
.expect("inlay refresh request failed");
cx.executor().run_until_parked();
editor

View File

@@ -397,7 +397,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
cx.observe_global::<SettingsStore>(move |cx| {
let settings = &ProjectSettings::get_global(cx).node;
let options = NodeBinaryOptions {
allow_path_lookup: !settings.ignore_system_version,
allow_path_lookup: !settings.ignore_system_version.unwrap_or_default(),
allow_binary_download: true,
use_paths: settings.path.as_ref().map(|node_path| {
let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref());
@@ -417,7 +417,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
tx.send(Some(options)).log_err();
})
.detach();
let node_runtime = NodeRuntime::new(client.http_client(), None, rx);
let node_runtime = NodeRuntime::new(client.http_client(), rx);
let extension_host_proxy = ExtensionHostProxy::global(cx);

View File

@@ -1,19 +0,0 @@
url = "https://github.com/zed-industries/zed"
revision = "main"
require_lsp = false
prompt = """
I need to explore the codebase to understand what files are available in the project. What can you tell me about the structure of the codebase?
Please find all uses of the 'find_path' function in the src directory.
Also, can you tell me what the capital of France is? And how does garbage collection work in programming languages?
"""
profile_name = "minimal"
[thread_assertions]
no_hallucinated_tool_calls = """The agent should not hallucinate tool calls - for example, by writing markdown code blocks that simulate commands like `find`, `grep`, `ls`, etc. - since no tools are available. However, it is totally fine if the agent describes to the user what should be done, e.g. telling the user \"You can run `find` to...\" etc."""
doesnt_hallucinate_file_paths = """The agent should not make up file paths or pretend to know the structure of the project when tools are not available."""
correctly_answers_general_questions = """The agent should correctly answer general knowledge questions about the capital of France and garbage collection without asking for more context, demonstrating it can still be helpful with areas it knows about."""

View File

@@ -806,6 +806,6 @@ trait ToWasmtimeResult<T> {
impl<T> ToWasmtimeResult<T> for Result<T> {
fn to_wasmtime_result(self) -> wasmtime::Result<Result<T, String>> {
Ok(self.map_err(|error| format!("{error:?}")))
Ok(self.map_err(|error| error.to_string()))
}
}

View File

@@ -2340,19 +2340,15 @@ impl Fs for FakeFs {
fn chunks(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
rope.chunks().flat_map(move |chunk| {
let mut newline = false;
let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
chunk
.lines()
.flat_map(move |line| {
let ending = if newline {
Some(line_ending.as_str())
} else {
None
};
newline = true;
ending.into_iter().chain([line])
})
.chain(end_with_newline)
chunk.split('\n').flat_map(move |line| {
let ending = if newline {
Some(line_ending.as_str())
} else {
None
};
newline = true;
ending.into_iter().chain([line])
})
})
}

View File

@@ -306,7 +306,8 @@ impl PickerDelegate for BranchListDelegate {
cx.background_executor().clone(),
)
.await
.into_iter()
.iter()
.cloned()
.map(|candidate| BranchEntry {
branch: all_branches[candidate.candidate_id].clone(),
positions: candidate.positions,

View File

@@ -1051,8 +1051,8 @@ impl GitPanel {
repo.checkout_files(
"HEAD",
entries
.into_iter()
.map(|entries| entries.repo_path)
.iter()
.map(|entries| entries.repo_path.clone())
.collect(),
cx,
)
@@ -2765,9 +2765,9 @@ impl GitPanel {
let potential_co_authors = self.potential_co_authors(cx);
let (tooltip_label, icon) = if self.add_coauthors {
("Remove co-authored-by", IconName::Person)
} else {
("Add co-authored-by", IconName::UserCheck)
} else {
("Remove co-authored-by", IconName::Person)
};
if potential_co_authors.is_empty() {

View File

@@ -161,7 +161,7 @@ blade-graphics = { workspace = true, optional = true }
blade-macros = { workspace = true, optional = true }
blade-util = { workspace = true, optional = true }
bytemuck = { version = "1", optional = true }
cosmic-text = { version = "0.14.0", optional = true }
cosmic-text = { version = "0.13.2", optional = true }
font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "5474cfad4b719a72ec8ed2cb7327b2b01fd10568", features = [
"source-fontconfig-dlopen",
], optional = true }

View File

@@ -490,7 +490,7 @@ impl Interactivity {
/// Bind the given callback on the hover start and end events of this element. Note that the boolean
/// passed to the callback is true when the hover starts and false when it ends.
/// The imperative API equivalent to [`StatefulInteractiveElement::on_hover`]
/// The imperative API equivalent to [`StatefulInteractiveElement::on_drag`]
///
/// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback.
pub fn on_hover(&mut self, listener: impl Fn(&bool, &mut Window, &mut App) + 'static)
@@ -544,15 +544,6 @@ impl Interactivity {
pub fn occlude_mouse(&mut self) {
self.occlude_mouse = true;
}
/// Registers event handles that stop propagation of mouse events for non-scroll events.
/// The imperative API equivalent to [`InteractiveElement::block_mouse_except_scroll`]
pub fn stop_mouse_events_except_scroll(&mut self) {
self.on_any_mouse_down(|_, _, cx| cx.stop_propagation());
self.on_any_mouse_up(|_, _, cx| cx.stop_propagation());
self.on_click(|_, _, cx| cx.stop_propagation());
self.on_hover(|_, _, cx| cx.stop_propagation());
}
}
/// A trait for elements that want to use the standard GPUI event handlers that don't
@@ -928,17 +919,11 @@ pub trait InteractiveElement: Sized {
self
}
/// Stops propagation of left mouse down event.
/// Block the mouse from interacting with this element or any of its children
/// The fluent API equivalent to [`Interactivity::occlude_mouse`]
fn block_mouse_down(mut self) -> Self {
self.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
}
/// Registers event handles that stop propagation of mouse events for non-scroll events.
/// The fluent API equivalent to [`Interactivity::block_mouse_except_scroll`]
fn stop_mouse_events_except_scroll(mut self) -> Self {
self.interactivity().stop_mouse_events_except_scroll();
self
}
}
/// A trait for elements that want to use the standard GPUI interactivity features

View File

@@ -6,8 +6,8 @@ use crate::{
use anyhow::{Context as _, Ok, Result, anyhow};
use collections::HashMap;
use cosmic_text::{
Attrs, AttrsList, CacheKey, Family, Font as CosmicTextFont, FontFeatures as CosmicFontFeatures,
FontSystem, ShapeBuffer, ShapeLine, SwashCache,
Attrs, AttrsList, CacheKey, Family, Font as CosmicTextFont, FontSystem, ShapeBuffer, ShapeLine,
SwashCache,
};
use itertools::Itertools;
@@ -21,29 +21,15 @@ use std::{borrow::Cow, sync::Arc};
pub(crate) struct CosmicTextSystem(RwLock<CosmicTextSystemState>);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct FontKey {
family: SharedString,
features: FontFeatures,
}
impl FontKey {
fn new(family: SharedString, features: FontFeatures) -> Self {
Self { family, features }
}
}
struct CosmicTextSystemState {
swash_cache: SwashCache,
font_system: FontSystem,
scratch: ShapeBuffer,
/// Contains all already loaded fonts, including all faces. Indexed by `FontId`.
loaded_fonts_store: Vec<Arc<CosmicTextFont>>,
/// Contains enabled font features for each loaded font.
features_store: Vec<CosmicFontFeatures>,
/// Caches the `FontId`s associated with a specific family to avoid iterating the font database
/// for every font face in a family.
font_ids_by_family_cache: HashMap<FontKey, SmallVec<[FontId; 4]>>,
font_ids_by_family_cache: HashMap<SharedString, SmallVec<[FontId; 4]>>,
/// The name of each font associated with the given font id
postscript_names: HashMap<FontId, String>,
}
@@ -58,7 +44,6 @@ impl CosmicTextSystem {
swash_cache: SwashCache::new(),
scratch: ShapeBuffer::default(),
loaded_fonts_store: Vec::new(),
features_store: Vec::new(),
font_ids_by_family_cache: HashMap::default(),
postscript_names: HashMap::default(),
}))
@@ -93,13 +78,15 @@ impl PlatformTextSystem for CosmicTextSystem {
fn font_id(&self, font: &Font) -> Result<FontId> {
// todo(linux): Do we need to use CosmicText's Font APIs? Can we consolidate this to use font_kit?
let mut state = self.0.write();
let key = FontKey::new(font.family.clone(), font.features.clone());
let candidates = if let Some(font_ids) = state.font_ids_by_family_cache.get(&key) {
let candidates = if let Some(font_ids) = state.font_ids_by_family_cache.get(&font.family) {
font_ids.as_slice()
} else {
let font_ids = state.load_family(&font.family, &font.features)?;
state.font_ids_by_family_cache.insert(key.clone(), font_ids);
state.font_ids_by_family_cache[&key].as_ref()
state
.font_ids_by_family_cache
.insert(font.family.clone(), font_ids);
state.font_ids_by_family_cache[&font.family].as_ref()
};
// todo(linux) ideally we would make fontdb's `find_best_match` pub instead of using font-kit here
@@ -242,24 +229,9 @@ impl CosmicTextSystemState {
continue;
};
// Convert features into cosmic_text struct.
let mut font_features = CosmicFontFeatures::new();
for feature in _features.0.iter() {
let name_bytes: [u8; 4] = feature
.0
.as_bytes()
.try_into()
.map_err(|_| anyhow!("Incorrect feature flag format"))?;
let tag = cosmic_text::FeatureTag::new(&name_bytes);
font_features.set(tag, feature.1);
}
let font_id = FontId(self.loaded_fonts_store.len());
font_ids.push(font_id);
self.loaded_fonts_store.push(font);
self.features_store.push(font_features);
self.postscript_names.insert(font_id, postscript_name);
}
@@ -299,9 +271,6 @@ impl CosmicTextSystemState {
fn raster_bounds(&mut self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {
let font = &self.loaded_fonts_store[params.font_id.0];
let subpixel_shift = params
.subpixel_variant
.map(|v| v as f32 / (SUBPIXEL_VARIANTS as f32 * params.scale_factor));
let image = self
.swash_cache
.get_image(
@@ -310,7 +279,7 @@ impl CosmicTextSystemState {
font.id(),
params.glyph_id.0 as u16,
(params.font_size * params.scale_factor).into(),
(subpixel_shift.x, subpixel_shift.y.trunc()),
(0.0, 0.0),
cosmic_text::CacheKeyFlags::empty(),
)
.0,
@@ -392,22 +361,18 @@ impl CosmicTextSystemState {
#[profiling::function]
fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout {
let mut attrs_list = AttrsList::new(&Attrs::new());
let mut attrs_list = AttrsList::new(Attrs::new());
let mut offs = 0;
for run in font_runs {
let font = &self.loaded_fonts_store[run.font_id.0];
let font = self.font_system.db().face(font.id()).unwrap();
let features = self.features_store[run.font_id.0].clone();
attrs_list.add_span(
offs..(offs + run.len),
&Attrs::new()
Attrs::new()
.family(Family::Name(&font.families.first().unwrap().0))
.stretch(font.stretch)
.style(font.style)
.weight(font.weight)
.font_features(features),
.weight(font.weight),
);
offs += run.len;
}

View File

@@ -5,21 +5,23 @@ use collections::{FxHashMap, FxHashSet};
use itertools::Itertools;
use util::ResultExt;
use windows::Win32::{
Foundation::{HANDLE, HGLOBAL},
Foundation::HANDLE,
System::{
DataExchange::{
CloseClipboard, CountClipboardFormats, EmptyClipboard, EnumClipboardFormats,
GetClipboardData, GetClipboardFormatNameW, IsClipboardFormatAvailable, OpenClipboard,
RegisterClipboardFormatW, SetClipboardData,
},
Memory::{GMEM_MOVEABLE, GlobalAlloc, GlobalLock, GlobalSize, GlobalUnlock},
Memory::{GMEM_MOVEABLE, GlobalAlloc, GlobalLock, GlobalUnlock},
Ole::{CF_HDROP, CF_UNICODETEXT},
},
UI::Shell::{DragQueryFileW, HDROP},
};
use windows_core::PCWSTR;
use crate::{ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, hash};
use crate::{
ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, SmartGlobal, hash,
};
// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew
const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF;
@@ -266,10 +268,13 @@ where
}
fn read_string_from_clipboard() -> Option<ClipboardEntry> {
let text = with_clipboard_data(CF_UNICODETEXT.0 as u32, |data_ptr| {
let pcwstr = PCWSTR(data_ptr as *const u16);
String::from_utf16_lossy(unsafe { pcwstr.as_wide() })
})?;
let text = {
let global = SmartGlobal::from_raw_ptr(
unsafe { GetClipboardData(CF_UNICODETEXT.0 as u32).log_err() }?.0,
);
let text = PCWSTR(global.lock() as *const u16);
String::from_utf16_lossy(unsafe { text.as_wide() })
};
let Some(hash) = read_hash_from_clipboard() else {
return Some(ClipboardEntry::String(ClipboardString::new(text)));
};
@@ -290,23 +295,25 @@ fn read_hash_from_clipboard() -> Option<u64> {
if unsafe { IsClipboardFormatAvailable(*CLIPBOARD_HASH_FORMAT).is_err() } {
return None;
}
with_clipboard_data(*CLIPBOARD_HASH_FORMAT, |data_ptr| {
let hash_bytes: [u8; 8] = unsafe {
std::slice::from_raw_parts(data_ptr.cast::<u8>(), 8)
.to_vec()
.try_into()
.log_err()
}?;
Some(u64::from_ne_bytes(hash_bytes))
})?
let global =
SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(*CLIPBOARD_HASH_FORMAT).log_err() }?.0);
let raw_ptr = global.lock() as *const u16;
let hash_bytes: [u8; 8] = unsafe {
std::slice::from_raw_parts(raw_ptr.cast::<u8>(), 8)
.to_vec()
.try_into()
.log_err()
}?;
Some(u64::from_ne_bytes(hash_bytes))
}
fn read_metadata_from_clipboard() -> Option<String> {
unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).log_err()? };
with_clipboard_data(*CLIPBOARD_METADATA_FORMAT, |data_ptr| {
let pcwstr = PCWSTR(data_ptr as *const u16);
String::from_utf16_lossy(unsafe { pcwstr.as_wide() })
})
let global = SmartGlobal::from_raw_ptr(
unsafe { GetClipboardData(*CLIPBOARD_METADATA_FORMAT).log_err() }?.0,
);
let text = PCWSTR(global.lock() as *const u16);
Some(String::from_utf16_lossy(unsafe { text.as_wide() }))
}
fn read_image_from_clipboard(format: u32) -> Option<ClipboardEntry> {
@@ -320,52 +327,29 @@ fn format_number_to_image_format(format_number: u32) -> Option<&'static ImageFor
}
fn read_image_for_type(format_number: u32, format: ImageFormat) -> Option<ClipboardEntry> {
let (bytes, id) = with_clipboard_data_and_size(format_number, |data_ptr, size| {
let bytes = unsafe { std::slice::from_raw_parts(data_ptr as *mut u8 as _, size).to_vec() };
let id = hash(&bytes);
(bytes, id)
})?;
let global = SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(format_number).log_err() }?.0);
let image_ptr = global.lock();
let iamge_size = global.size();
let bytes =
unsafe { std::slice::from_raw_parts(image_ptr as *mut u8 as _, iamge_size).to_vec() };
let id = hash(&bytes);
Some(ClipboardEntry::Image(Image { format, bytes, id }))
}
fn read_files_from_clipboard() -> Option<ClipboardEntry> {
let text = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr| {
let hdrop = HDROP(data_ptr);
let mut filenames = String::new();
with_file_names(hdrop, |file_name| {
filenames.push_str(&file_name);
});
filenames
})?;
let global =
SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(CF_HDROP.0 as u32).log_err() }?.0);
let hdrop = HDROP(global.lock());
let mut filenames = String::new();
with_file_names(hdrop, |file_name| {
filenames.push_str(&file_name);
});
Some(ClipboardEntry::String(ClipboardString {
text,
text: filenames,
metadata: None,
}))
}
fn with_clipboard_data<F, R>(format: u32, f: F) -> Option<R>
where
F: FnOnce(*mut std::ffi::c_void) -> R,
{
let global = HGLOBAL(unsafe { GetClipboardData(format).log_err() }?.0);
let data_ptr = unsafe { GlobalLock(global) };
let result = f(data_ptr);
unsafe { GlobalUnlock(global).log_err() };
Some(result)
}
fn with_clipboard_data_and_size<F, R>(format: u32, f: F) -> Option<R>
where
F: FnOnce(*mut std::ffi::c_void, usize) -> R,
{
let global = HGLOBAL(unsafe { GetClipboardData(format).log_err() }?.0);
let size = unsafe { GlobalSize(global) };
let data_ptr = unsafe { GlobalLock(global) };
let result = f(data_ptr, size);
unsafe { GlobalUnlock(global).log_err() };
Some(result)
}
impl From<ImageFormat> for image::ImageFormat {
fn from(value: ImageFormat) -> Self {
match value {

View File

@@ -1,6 +1,11 @@
use std::ops::Deref;
use windows::Win32::{Foundation::HANDLE, UI::WindowsAndMessaging::HCURSOR};
use util::ResultExt;
use windows::Win32::{
Foundation::{HANDLE, HGLOBAL},
System::Memory::{GlobalLock, GlobalSize, GlobalUnlock},
UI::WindowsAndMessaging::HCURSOR,
};
#[derive(Debug, Clone, Copy)]
pub(crate) struct SafeHandle {
@@ -45,3 +50,30 @@ impl Deref for SafeCursor {
&self.raw
}
}
#[derive(Debug, Clone)]
pub(crate) struct SmartGlobal {
raw: HGLOBAL,
}
impl SmartGlobal {
pub(crate) fn from_raw_ptr(ptr: *mut std::ffi::c_void) -> Self {
Self { raw: HGLOBAL(ptr) }
}
pub(crate) fn lock(&self) -> *mut std::ffi::c_void {
unsafe { GlobalLock(self.raw) }
}
pub(crate) fn size(&self) -> usize {
unsafe { GlobalSize(self.raw) }
}
}
impl Drop for SmartGlobal {
fn drop(&mut self) {
unsafe {
GlobalUnlock(self.raw).log_err();
}
}
}

View File

@@ -467,7 +467,7 @@ impl TextStyle {
len,
font: Font {
family: self.font_family.clone(),
features: self.font_features.clone(),
features: Default::default(),
fallbacks: self.font_fallbacks.clone(),
weight: self.font_weight,
style: self.font_style,

View File

@@ -155,7 +155,6 @@ pub enum IconName {
ListCollapse,
ListTree,
ListX,
LoadCircle,
LockOutlined,
MagnifyingGlass,
MailOpen,

View File

@@ -24,11 +24,13 @@ indoc.workspace = true
inline_completion.workspace = true
language.workspace = true
paths.workspace = true
proto.workspace = true
regex.workspace = true
settings.workspace = true
supermaven.workspace = true
telemetry.workspace = true
ui.workspace = true
util.workspace = true
workspace-hack.workspace = true
workspace.workspace = true
zed_actions.workspace = true

View File

@@ -14,6 +14,7 @@ use gpui::{
pulsating_between,
};
use indoc::indoc;
use inline_completion::EditPredictionUsage;
use language::{
EditPredictionsMode, File, Language,
language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings},
@@ -29,6 +30,7 @@ use ui::{
Clickable, ContextMenu, ContextMenuEntry, DocumentationSide, IconButton, IconButtonShape,
Indicator, PopoverMenu, PopoverMenuHandle, ProgressBar, Tooltip, prelude::*,
};
use util::maybe;
use workspace::{
StatusItemView, Toast, Workspace, create_and_open_local_file, item::ItemHandle,
notifications::NotificationId,
@@ -235,17 +237,11 @@ impl Render for InlineCompletionButton {
let current_user_terms_accepted =
self.user_store.read(cx).current_user_has_accepted_terms();
let has_subscription = self.user_store.read(cx).current_plan().is_some()
&& self.user_store.read(cx).subscription_period().is_some();
if !has_subscription || !current_user_terms_accepted.unwrap_or(false) {
if !current_user_terms_accepted.unwrap_or(false) {
let signed_in = current_user_terms_accepted.is_some();
let tooltip_meta = if signed_in {
if has_subscription {
"Read Terms of Service"
} else {
"Choose a Plan"
}
"Read Terms of Service"
} else {
"Sign in to use"
};
@@ -403,44 +399,64 @@ impl InlineCompletionButton {
let fs = self.fs.clone();
let line_height = window.line_height();
if let Some(usage) = self
.edit_prediction_provider
.as_ref()
.and_then(|provider| provider.usage(cx))
{
menu = menu.header("Usage");
menu = menu
.custom_entry(
move |_window, cx| {
let used_percentage = match usage.limit {
UsageLimit::Limited(limit) => {
Some((usage.amount as f32 / limit as f32) * 100.)
}
UsageLimit::Unlimited => None,
};
if let Some(provider) = self.edit_prediction_provider.as_ref() {
let usage = provider.usage(cx).or_else(|| {
let user_store = self.user_store.read(cx);
h_flex()
.flex_1()
.gap_1p5()
.children(
used_percentage
.map(|percent| ProgressBar::new("usage", percent, 100., cx)),
)
.child(
Label::new(match usage.limit {
UsageLimit::Limited(limit) => {
format!("{} / {limit}", usage.amount)
}
UsageLimit::Unlimited => format!("{} / ∞", usage.amount),
})
.size(LabelSize::Small)
.color(Color::Muted),
)
.into_any_element()
},
move |_, cx| cx.open_url(&zed_urls::account_url(cx)),
)
.separator();
maybe!({
let amount = user_store.edit_predictions_usage_amount()?;
let limit = user_store.edit_predictions_usage_limit()?.variant?;
Some(EditPredictionUsage {
amount: amount as i32,
limit: match limit {
proto::usage_limit::Variant::Limited(limited) => {
zed_llm_client::UsageLimit::Limited(limited.limit as i32)
}
proto::usage_limit::Variant::Unlimited(_) => {
zed_llm_client::UsageLimit::Unlimited
}
},
})
})
});
if let Some(usage) = usage {
menu = menu.header("Usage");
menu = menu
.custom_entry(
move |_window, cx| {
let used_percentage = match usage.limit {
UsageLimit::Limited(limit) => {
Some((usage.amount as f32 / limit as f32) * 100.)
}
UsageLimit::Unlimited => None,
};
h_flex()
.flex_1()
.gap_1p5()
.children(
used_percentage.map(|percent| {
ProgressBar::new("usage", percent, 100., cx)
}),
)
.child(
Label::new(match usage.limit {
UsageLimit::Limited(limit) => {
format!("{} / {limit}", usage.amount)
}
UsageLimit::Unlimited => format!("{} / ∞", usage.amount),
})
.size(LabelSize::Small)
.color(Color::Muted),
)
.into_any_element()
},
move |_, cx| cx.open_url(&zed_urls::account_url(cx)),
)
.separator();
}
}
menu = menu.header("Show Edit Predictions For");
@@ -835,7 +851,7 @@ async fn open_disabled_globs_setting_in_editor(
});
if !edits.is_empty() {
item.edit(edits, cx);
item.edit(edits.iter().cloned(), cx);
}
let text = item.buffer().read(cx).snapshot(cx).text();

View File

@@ -104,10 +104,6 @@ impl LanguageModelImage {
// so this method is more of a rough guess.
(width * height) / 750
}
pub fn to_base64_url(&self) -> String {
format!("data:image/png;base64,{}", self.source)
}
}
fn encode_as_base64(data: Arc<Image>, image: image::DynamicImage) -> Result<Vec<u8>> {

View File

@@ -11,26 +11,14 @@ workspace = true
[lib]
path = "src/language_model_selector.rs"
[features]
test-support = [
"gpui/test-support",
]
[dependencies]
collections.workspace = true
feature_flags.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
language_model.workspace = true
log.workspace = true
ordered-float.workspace = true
picker.workspace = true
proto.workspace = true
ui.workspace = true
workspace-hack.workspace = true
zed_actions.workspace = true
[dev-dependencies]
gpui = { workspace = true, "features" = ["test-support"] }
language_model = { workspace = true, "features" = ["test-support"] }

View File

@@ -1,18 +1,15 @@
use std::{cmp::Reverse, sync::Arc};
use std::sync::Arc;
use collections::{HashSet, IndexMap};
use feature_flags::ZedProFeatureFlag;
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
use gpui::{
Action, AnyElement, AnyView, App, BackgroundExecutor, Corner, DismissEvent, Entity,
EventEmitter, FocusHandle, Focusable, Subscription, Task, WeakEntity,
action_with_deprecated_aliases,
Action, AnyElement, AnyView, App, Corner, DismissEvent, Entity, EventEmitter, FocusHandle,
Focusable, Subscription, Task, WeakEntity, action_with_deprecated_aliases,
};
use language_model::{
AuthenticateError, ConfiguredModel, LanguageModel, LanguageModelProviderId,
LanguageModelRegistry,
};
use ordered_float::OrderedFloat;
use picker::{Picker, PickerDelegate};
use proto::Plan;
use ui::{ListItem, ListItemSpacing, PopoverMenu, PopoverMenuHandle, PopoverTrigger, prelude::*};
@@ -325,23 +322,6 @@ struct GroupedModels {
}
impl GroupedModels {
pub fn new(other: Vec<ModelInfo>, recommended: Vec<ModelInfo>) -> Self {
let mut other_by_provider: IndexMap<_, Vec<ModelInfo>> = IndexMap::default();
for model in other {
let provider = model.model.provider_id();
if let Some(models) = other_by_provider.get_mut(&provider) {
models.push(model);
} else {
other_by_provider.insert(provider, vec![model]);
}
}
Self {
recommended,
other: other_by_provider,
}
}
fn entries(&self) -> Vec<LanguageModelPickerEntry> {
let mut entries = Vec::new();
@@ -369,20 +349,6 @@ impl GroupedModels {
}
entries
}
fn model_infos(&self) -> Vec<ModelInfo> {
let other = self
.other
.values()
.flat_map(|model| model.iter())
.cloned()
.collect::<Vec<_>>();
self.recommended
.iter()
.chain(&other)
.cloned()
.collect::<Vec<_>>()
}
}
enum LanguageModelPickerEntry {
@@ -390,78 +356,6 @@ enum LanguageModelPickerEntry {
Separator(SharedString),
}
struct ModelMatcher {
models: Vec<ModelInfo>,
bg_executor: BackgroundExecutor,
candidates: Vec<StringMatchCandidate>,
}
impl ModelMatcher {
fn new(models: Vec<ModelInfo>, bg_executor: BackgroundExecutor) -> ModelMatcher {
let candidates = Self::make_match_candidates(&models);
Self {
models,
bg_executor,
candidates,
}
}
pub fn fuzzy_search(&self, query: &str) -> Vec<ModelInfo> {
let mut matches = self.bg_executor.block(match_strings(
&self.candidates,
&query,
false,
100,
&Default::default(),
self.bg_executor.clone(),
));
let sorting_key = |mat: &StringMatch| {
let candidate = &self.candidates[mat.candidate_id];
(Reverse(OrderedFloat(mat.score)), candidate.id)
};
matches.sort_unstable_by_key(sorting_key);
let matched_models: Vec<_> = matches
.into_iter()
.map(|mat| self.models[mat.candidate_id].clone())
.collect();
matched_models
}
pub fn exact_search(&self, query: &str) -> Vec<ModelInfo> {
self.models
.iter()
.filter(|m| {
m.model
.name()
.0
.to_lowercase()
.contains(&query.to_lowercase())
})
.cloned()
.collect::<Vec<_>>()
}
fn make_match_candidates(model_infos: &Vec<ModelInfo>) -> Vec<StringMatchCandidate> {
model_infos
.iter()
.enumerate()
.map(|(index, model)| {
StringMatchCandidate::new(
index,
&format!(
"{}/{}",
&model.model.provider_name().0,
&model.model.name().0
),
)
})
.collect::<Vec<_>>()
}
}
impl PickerDelegate for LanguageModelPickerDelegate {
type ListItem = AnyElement;
@@ -502,45 +396,56 @@ impl PickerDelegate for LanguageModelPickerDelegate {
) -> Task<()> {
let all_models = self.all_models.clone();
let current_index = self.selected_index;
let bg_executor = cx.background_executor();
let language_model_registry = LanguageModelRegistry::global(cx);
let configured_providers = language_model_registry
.read(cx)
.providers()
.into_iter()
.filter(|provider| provider.is_authenticated(cx))
.collect::<Vec<_>>();
let configured_provider_ids = configured_providers
.iter()
.filter(|provider| provider.is_authenticated(cx))
.map(|provider| provider.id())
.collect::<Vec<_>>();
let recommended_models = all_models
.recommended
.iter()
.filter(|m| configured_provider_ids.contains(&m.model.provider_id()))
.cloned()
.collect::<Vec<_>>();
let available_models = all_models
.model_infos()
.iter()
.filter(|m| configured_provider_ids.contains(&m.model.provider_id()))
.cloned()
.collect::<Vec<_>>();
let matcher_rec = ModelMatcher::new(recommended_models, bg_executor.clone());
let matcher_all = ModelMatcher::new(available_models, bg_executor.clone());
let recommended = matcher_rec.exact_search(&query);
let all = matcher_all.fuzzy_search(&query);
let filtered_models = GroupedModels::new(all, recommended);
cx.spawn_in(window, async move |this, cx| {
let filtered_models = cx
.background_spawn(async move {
let matches = |info: &ModelInfo| {
info.model
.name()
.0
.to_lowercase()
.contains(&query.to_lowercase())
};
let recommended_models = all_models
.recommended
.iter()
.filter(|r| {
configured_providers.contains(&r.model.provider_id()) && matches(r)
})
.cloned()
.collect();
let mut other_models = IndexMap::default();
for (provider_id, models) in &all_models.other {
if configured_providers.contains(&provider_id) {
other_models.insert(
provider_id.clone(),
models
.iter()
.filter(|m| matches(m))
.cloned()
.collect::<Vec<_>>(),
);
}
}
GroupedModels {
recommended: recommended_models,
other: other_models,
}
})
.await;
this.update_in(cx, |this, window, cx| {
this.delegate.filtered_entries = filtered_models.entries();
// Preserve selection focus
@@ -702,187 +607,3 @@ impl PickerDelegate for LanguageModelPickerDelegate {
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::{future::BoxFuture, stream::BoxStream};
use gpui::{AsyncApp, TestAppContext, http_client};
use language_model::{
LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId,
LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
LanguageModelRequest, LanguageModelToolChoice,
};
use ui::IconName;
#[derive(Clone)]
struct TestLanguageModel {
name: LanguageModelName,
id: LanguageModelId,
provider_id: LanguageModelProviderId,
provider_name: LanguageModelProviderName,
}
impl TestLanguageModel {
fn new(name: &str, provider: &str) -> Self {
Self {
name: LanguageModelName::from(name.to_string()),
id: LanguageModelId::from(name.to_string()),
provider_id: LanguageModelProviderId::from(provider.to_string()),
provider_name: LanguageModelProviderName::from(provider.to_string()),
}
}
}
impl LanguageModel for TestLanguageModel {
fn id(&self) -> LanguageModelId {
self.id.clone()
}
fn name(&self) -> LanguageModelName {
self.name.clone()
}
fn provider_id(&self) -> LanguageModelProviderId {
self.provider_id.clone()
}
fn provider_name(&self) -> LanguageModelProviderName {
self.provider_name.clone()
}
fn supports_tools(&self) -> bool {
false
}
fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool {
false
}
fn telemetry_id(&self) -> String {
format!("{}/{}", self.provider_id.0, self.name.0)
}
fn max_token_count(&self) -> usize {
1000
}
fn count_tokens(
&self,
_: LanguageModelRequest,
_: &App,
) -> BoxFuture<'static, http_client::Result<usize>> {
unimplemented!()
}
fn stream_completion(
&self,
_: LanguageModelRequest,
_: &AsyncApp,
) -> BoxFuture<
'static,
http_client::Result<
BoxStream<
'static,
http_client::Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
>,
>,
> {
unimplemented!()
}
}
fn create_models(model_specs: Vec<(&str, &str)>) -> Vec<ModelInfo> {
model_specs
.into_iter()
.map(|(provider, name)| ModelInfo {
model: Arc::new(TestLanguageModel::new(name, provider)),
icon: IconName::Ai,
})
.collect()
}
fn assert_models_eq(result: Vec<ModelInfo>, expected: Vec<&str>) {
assert_eq!(
result.len(),
expected.len(),
"Number of models doesn't match"
);
for (i, expected_name) in expected.iter().enumerate() {
assert_eq!(
result[i].model.telemetry_id(),
*expected_name,
"Model at position {} doesn't match expected model",
i
);
}
}
#[gpui::test]
fn test_exact_match(cx: &mut TestAppContext) {
let models = create_models(vec![
("zed", "Claude 3.7 Sonnet"),
("zed", "Claude 3.7 Sonnet Thinking"),
("zed", "gpt-4.1"),
("zed", "gpt-4.1-nano"),
("openai", "gpt-3.5-turbo"),
("openai", "gpt-4.1"),
("openai", "gpt-4.1-nano"),
("ollama", "mistral"),
("ollama", "deepseek"),
]);
let matcher = ModelMatcher::new(models, cx.background_executor.clone());
// The order of models should be maintained, case doesn't matter
let results = matcher.exact_search("GPT-4.1");
assert_models_eq(
results,
vec![
"zed/gpt-4.1",
"zed/gpt-4.1-nano",
"openai/gpt-4.1",
"openai/gpt-4.1-nano",
],
);
}
#[gpui::test]
fn test_fuzzy_match(cx: &mut TestAppContext) {
let models = create_models(vec![
("zed", "Claude 3.7 Sonnet"),
("zed", "Claude 3.7 Sonnet Thinking"),
("zed", "gpt-4.1"),
("zed", "gpt-4.1-nano"),
("openai", "gpt-3.5-turbo"),
("openai", "gpt-4.1"),
("openai", "gpt-4.1-nano"),
("ollama", "mistral"),
("ollama", "deepseek"),
]);
let matcher = ModelMatcher::new(models, cx.background_executor.clone());
// Results should preserve models order whenever possible.
// In the case below, `zed/gpt-4.1` and `openai/gpt-4.1` have identical
// similarity scores, but `zed/gpt-4.1` was higher in the models list,
// so it should appear first in the results.
let results = matcher.fuzzy_search("41");
assert_models_eq(
results,
vec![
"zed/gpt-4.1",
"openai/gpt-4.1",
"zed/gpt-4.1-nano",
"openai/gpt-4.1-nano",
],
);
// Model provider should be searchable as well
let results = matcher.fuzzy_search("ol"); // meaning "ollama"
assert_models_eq(results, vec!["ollama/mistral", "ollama/deepseek"]);
// Fuzzy search
let results = matcher.fuzzy_search("z4n");
assert_models_eq(results, vec!["zed/gpt-4.1-nano"]);
}
}

View File

@@ -15,15 +15,13 @@ path = "src/language_models.rs"
anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
aws-config = { workspace = true, features = ["behavior-version-latest"] }
aws-credential-types = { workspace = true, features = [
"hardcoded-credentials",
] }
aws-credential-types = { workspace = true, features = ["hardcoded-credentials"] }
aws_http_client.workspace = true
bedrock.workspace = true
client.workspace = true
collections.workspace = true
credentials_provider.workspace = true
copilot.workspace = true
copilot = { workspace = true, features = ["schemars"] }
deepseek = { workspace = true, features = ["schemars"] }
editor.workspace = true
feature_flags.workspace = true

View File

@@ -180,12 +180,9 @@ impl State {
fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(async move |state, cx| {
client
.authenticate_and_connect(true, &cx)
.await
.into_response()?;
state.update(cx, |_, cx| cx.notify())
cx.spawn(async move |this, cx| {
client.authenticate_and_connect(true, &cx).await?;
this.update(cx, |_, cx| cx.notify())
})
}
@@ -614,7 +611,7 @@ impl CloudLanguageModel {
.and_then(|plan| zed_llm_client::Plan::from_str(plan).ok())
{
let plan = match plan {
zed_llm_client::Plan::ZedFree => Plan::Free,
zed_llm_client::Plan::Free => Plan::Free,
zed_llm_client::Plan::ZedPro => Plan::ZedPro,
zed_llm_client::Plan::ZedProTrial => Plan::ZedProTrial,
};

View File

@@ -5,8 +5,8 @@ use std::sync::Arc;
use anyhow::{Result, anyhow};
use collections::HashMap;
use copilot::copilot_chat::{
ChatMessage, ChatMessageContent, CopilotChat, ImageUrl, Model as CopilotChatModel, ModelVendor,
Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall,
ChatMessage, CopilotChat, Model as CopilotChatModel, Request as CopilotChatRequest,
ResponseEvent, Tool, ToolCall,
};
use copilot::{Copilot, Status};
use futures::future::BoxFuture;
@@ -20,11 +20,12 @@ use language_model::{
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolSchemaFormat,
LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason,
LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolUse, MessageContent,
RateLimiter, Role, StopReason,
};
use settings::SettingsStore;
use std::time::Duration;
use strum::IntoEnumIterator;
use ui::prelude::*;
use super::anthropic::count_anthropic_tokens;
@@ -99,26 +100,17 @@ impl LanguageModelProvider for CopilotChatLanguageModelProvider {
IconName::Copilot
}
fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
let models = CopilotChat::global(cx).and_then(|m| m.read(cx).models())?;
models
.first()
.map(|model| self.create_language_model(model.clone()))
fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
Some(self.create_language_model(CopilotChatModel::default()))
}
fn default_fast_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
// The default model should be Copilot Chat's 'base model', which is likely a relatively fast
// model (e.g. 4o) and a sensible choice when considering premium requests
self.default_model(cx)
fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
Some(self.create_language_model(CopilotChatModel::default_fast()))
}
fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
let Some(models) = CopilotChat::global(cx).and_then(|m| m.read(cx).models()) else {
return Vec::new();
};
models
.iter()
.map(|model| self.create_language_model(model.clone()))
fn provided_models(&self, _cx: &App) -> Vec<Arc<dyn LanguageModel>> {
CopilotChatModel::iter()
.map(|model| self.create_language_model(model))
.collect()
}
@@ -195,15 +187,13 @@ impl LanguageModel for CopilotChatLanguageModel {
}
fn supports_tools(&self) -> bool {
self.model.supports_tools()
}
fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
match self.model.vendor() {
ModelVendor::OpenAI | ModelVendor::Anthropic => {
LanguageModelToolSchemaFormat::JsonSchema
}
ModelVendor::Google => LanguageModelToolSchemaFormat::JsonSchemaSubset,
match self.model {
CopilotChatModel::Gpt4o
| CopilotChatModel::Gpt4_1
| CopilotChatModel::O4Mini
| CopilotChatModel::Claude3_5Sonnet
| CopilotChatModel::Claude3_7Sonnet => true,
_ => false,
}
}
@@ -228,13 +218,25 @@ impl LanguageModel for CopilotChatLanguageModel {
request: LanguageModelRequest,
cx: &App,
) -> BoxFuture<'static, Result<usize>> {
match self.model.vendor() {
ModelVendor::Anthropic => count_anthropic_tokens(request, cx),
ModelVendor::Google => count_google_tokens(request, cx),
ModelVendor::OpenAI => {
let model = open_ai::Model::from_id(self.model.id()).unwrap_or_default();
count_open_ai_tokens(request, model, cx)
match self.model {
CopilotChatModel::Claude3_5Sonnet
| CopilotChatModel::Claude3_7Sonnet
| CopilotChatModel::Claude3_7SonnetThinking => count_anthropic_tokens(request, cx),
CopilotChatModel::Gemini20Flash | CopilotChatModel::Gemini25Pro => {
count_google_tokens(request, cx)
}
CopilotChatModel::Gpt4o => count_open_ai_tokens(request, open_ai::Model::FourOmni, cx),
CopilotChatModel::Gpt4 => count_open_ai_tokens(request, open_ai::Model::Four, cx),
CopilotChatModel::Gpt4_1 => {
count_open_ai_tokens(request, open_ai::Model::FourPointOne, cx)
}
CopilotChatModel::Gpt3_5Turbo => {
count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
}
CopilotChatModel::O1 => count_open_ai_tokens(request, open_ai::Model::O1, cx),
CopilotChatModel::O3Mini => count_open_ai_tokens(request, open_ai::Model::O3Mini, cx),
CopilotChatModel::O3 => count_open_ai_tokens(request, open_ai::Model::O3, cx),
CopilotChatModel::O4Mini => count_open_ai_tokens(request, open_ai::Model::O4Mini, cx),
}
}
@@ -366,34 +368,25 @@ pub fn map_to_language_model_completion_events(
}
Some("tool_calls") => {
events.extend(state.tool_calls_by_index.drain().map(
|(_, tool_call)| {
// The model can output an empty string
// to indicate the absence of arguments.
// When that happens, create an empty
// object instead.
let arguments = if tool_call.arguments.is_empty() {
Ok(serde_json::Value::Object(Default::default()))
} else {
serde_json::Value::from_str(&tool_call.arguments)
};
match arguments {
Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: tool_call.id.clone().into(),
name: tool_call.name.as_str().into(),
is_input_complete: true,
input,
raw_input: tool_call.arguments.clone(),
},
)),
Err(error) => {
Err(LanguageModelCompletionError::BadInputJson {
id: tool_call.id.into(),
tool_name: tool_call.name.as_str().into(),
raw_input: tool_call.arguments.into(),
json_parse_error: error.to_string(),
})
}
|(_, tool_call)| match serde_json::Value::from_str(
&tool_call.arguments,
) {
Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: tool_call.id.clone().into(),
name: tool_call.name.as_str().into(),
is_input_complete: true,
input,
raw_input: tool_call.arguments.clone(),
},
)),
Err(error) => {
Err(LanguageModelCompletionError::BadInputJson {
id: tool_call.id.into(),
tool_name: tool_call.name.as_str().into(),
raw_input: tool_call.arguments.into(),
json_parse_error: error.to_string(),
})
}
},
));
@@ -428,6 +421,8 @@ impl CopilotChatLanguageModel {
&self,
request: LanguageModelRequest,
) -> Result<CopilotChatRequest> {
let model = self.model.clone();
let mut request_messages: Vec<LanguageModelRequestMessage> = Vec::new();
for message in request.messages {
if let Some(last_message) = request_messages.last_mut() {
@@ -444,6 +439,23 @@ impl CopilotChatLanguageModel {
let mut tool_called = false;
let mut messages: Vec<ChatMessage> = Vec::new();
for message in request_messages {
let text_content = {
let mut buffer = String::new();
for string in message.content.iter().filter_map(|content| match content {
MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
Some(text.as_str())
}
MessageContent::ToolUse(_)
| MessageContent::RedactedThinking(_)
| MessageContent::ToolResult(_)
| MessageContent::Image(_) => None,
}) {
buffer.push_str(string);
}
buffer
};
match message.role {
Role::User => {
for content in &message.content {
@@ -455,36 +467,9 @@ impl CopilotChatLanguageModel {
}
}
let mut content_parts = Vec::new();
for content in &message.content {
match content {
MessageContent::Text(text) | MessageContent::Thinking { text, .. }
if !text.is_empty() =>
{
if let Some(ChatMessageContent::Text { text: text_content }) =
content_parts.last_mut()
{
text_content.push_str(text);
} else {
content_parts.push(ChatMessageContent::Text {
text: text.to_string(),
});
}
}
MessageContent::Image(image) if self.model.supports_vision() => {
content_parts.push(ChatMessageContent::Image {
image_url: ImageUrl {
url: image.to_base64_url(),
},
});
}
_ => {}
}
}
if !content_parts.is_empty() {
if !text_content.is_empty() {
messages.push(ChatMessage::User {
content: content_parts,
content: text_content,
});
}
}
@@ -505,23 +490,6 @@ impl CopilotChatLanguageModel {
}
}
let text_content = {
let mut buffer = String::new();
for string in message.content.iter().filter_map(|content| match content {
MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
Some(text.as_str())
}
MessageContent::ToolUse(_)
| MessageContent::RedactedThinking(_)
| MessageContent::ToolResult(_)
| MessageContent::Image(_) => None,
}) {
buffer.push_str(string);
}
buffer
};
messages.push(ChatMessage::Assistant {
content: if text_content.is_empty() {
None
@@ -568,9 +536,9 @@ impl CopilotChatLanguageModel {
Ok(CopilotChatRequest {
intent: true,
n: 1,
stream: self.model.uses_streaming(),
stream: model.uses_streaming(),
temperature: 0.1,
model: self.model.id().to_string(),
model,
messages,
tools,
tool_choice: request.tool_choice.map(|choice| match choice {

View File

@@ -38,10 +38,8 @@ impl IntoElement for InstructionListItem {
(self.button_label, self.button_link)
{
let link = button_link.clone();
let unique_id = SharedString::from(format!("{}-button", self.label));
h_flex().flex_wrap().child(Label::new(self.label)).child(
Button::new(unique_id, button_label)
Button::new("link-button", button_label)
.style(ButtonStyle::Subtle)
.icon(IconName::ArrowUpRight)
.icon_size(IconSize::XSmall)

View File

@@ -1238,12 +1238,12 @@ impl Render for LspLogToolbarItemView {
}
});
let available_language_servers: Vec<_> = menu_rows
.into_iter()
.iter()
.map(|row| {
(
row.server_id,
row.server_name,
row.worktree_root_name,
row.server_name.clone(),
row.worktree_root_name.clone(),
row.selected_entry,
)
})

View File

@@ -77,8 +77,6 @@
(comment) @comment
(hash_bang_line) @comment
[
(string)
(template_string)

View File

@@ -104,8 +104,6 @@
(comment) @comment
(hash_bang_line) @comment
[
(string)
(template_string)

Some files were not shown because too many files have changed in this diff Show More