Compare commits

..

4 Commits

Author SHA1 Message Date
Antonio Scandurra
de8f1c3c84 WIP 2025-06-30 21:01:37 +02:00
Antonio Scandurra
9351e959a5 WIP 2025-06-30 19:32:42 +02:00
Antonio Scandurra
dac0838a80 WIP 2025-06-30 14:24:24 +02:00
Antonio Scandurra
38d5f36d38 WIP 2025-06-30 12:15:19 +02:00
200 changed files with 7295 additions and 14241 deletions

64
Cargo.lock generated
View File

@@ -78,6 +78,7 @@ dependencies = [
"language",
"language_model",
"log",
"markdown",
"parking_lot",
"paths",
"postage",
@@ -107,39 +108,6 @@ dependencies = [
"zstd",
]
[[package]]
name = "agent2"
version = "0.1.0"
dependencies = [
"anyhow",
"assistant_tool",
"assistant_tools",
"chrono",
"client",
"collections",
"ctor",
"env_logger 0.11.8",
"fs",
"futures 0.3.31",
"gpui",
"gpui_tokio",
"handlebars 4.5.0",
"language_model",
"language_models",
"parking_lot",
"project",
"reqwest_client",
"rust-embed",
"schemars",
"serde",
"serde_json",
"settings",
"smol",
"thiserror 2.0.12",
"util",
"worktree",
]
[[package]]
name = "agent_settings"
version = "0.1.0"
@@ -1944,6 +1912,7 @@ dependencies = [
"serde_json",
"strum 0.27.1",
"thiserror 2.0.12",
"tokio",
"workspace-hack",
]
@@ -4165,7 +4134,7 @@ dependencies = [
[[package]]
name = "dap-types"
version = "0.0.1"
source = "git+https://github.com/zed-industries/dap-types?rev=7f39295b441614ca9dbf44293e53c32f666897f9#7f39295b441614ca9dbf44293e53c32f666897f9"
source = "git+https://github.com/zed-industries/dap-types?rev=b40956a7f4d1939da67429d941389ee306a3a308#b40956a7f4d1939da67429d941389ee306a3a308"
dependencies = [
"schemars",
"serde",
@@ -4846,7 +4815,6 @@ dependencies = [
"pretty_assertions",
"project",
"rand 0.8.5",
"regex",
"release_channel",
"rpc",
"schemars",
@@ -8880,7 +8848,6 @@ dependencies = [
"http_client",
"imara-diff",
"indoc",
"inventory",
"itertools 0.14.0",
"log",
"lsp",
@@ -8979,10 +8946,8 @@ dependencies = [
"aws-credential-types",
"aws_http_client",
"bedrock",
"chrono",
"client",
"collections",
"component",
"copilot",
"credentials_provider",
"deepseek",
@@ -14089,13 +14054,12 @@ dependencies = [
[[package]]
name = "schemars"
version = "1.0.1"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe8c9d1c68d67dd9f97ecbc6f932b60eb289c5dbddd8aa1405484a8fd2fcd984"
checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
dependencies = [
"dyn-clone",
"indexmap",
"ref-cast",
"schemars_derive",
"serde",
"serde_json",
@@ -14103,9 +14067,9 @@ dependencies = [
[[package]]
name = "schemars_derive"
version = "1.0.1"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ca9fcb757952f8e8629b9ab066fc62da523c46c2b247b1708a3be06dd82530b"
checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d"
dependencies = [
"proc-macro2",
"quote",
@@ -14604,22 +14568,13 @@ dependencies = [
name = "settings_ui"
version = "0.1.0"
dependencies = [
"collections",
"command_palette",
"command_palette_hooks",
"component",
"db",
"editor",
"feature_flags",
"fs",
"fuzzy",
"gpui",
"log",
"menu",
"paths",
"project",
"schemars",
"search",
"serde",
"settings",
"theme",
@@ -16056,7 +16011,6 @@ dependencies = [
"futures 0.3.31",
"gpui",
"indexmap",
"inventory",
"log",
"palette",
"parking_lot",
@@ -20174,9 +20128,9 @@ dependencies = [
[[package]]
name = "zed_llm_client"
version = "0.8.5"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c740e29260b8797ad252c202ea09a255b3cbc13f30faaf92fb6b2490336106e0"
checksum = "de7d9523255f4e00ee3d0918e5407bd252d798a4a8e71f6d37f23317a1588203"
dependencies = [
"anyhow",
"serde",

View File

@@ -4,7 +4,6 @@ members = [
"crates/activity_indicator",
"crates/agent_ui",
"crates/agent",
"crates/agent2",
"crates/agent_settings",
"crates/anthropic",
"crates/askpass",
@@ -445,7 +444,7 @@ core-video = { version = "0.4.3", features = ["metal"] }
cpal = "0.16"
criterion = { version = "0.5", features = ["html_reports"] }
ctor = "0.4.0"
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "7f39295b441614ca9dbf44293e53c32f666897f9" }
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "b40956a7f4d1939da67429d941389ee306a3a308" }
dashmap = "6.0"
derive_more = "0.99.17"
dirs = "4.0"
@@ -541,7 +540,7 @@ rustc-hash = "2.1.0"
rustls = { version = "0.23.26" }
rustls-platform-verifier = "0.5.0"
scap = { git = "https://github.com/zed-industries/scap", rev = "08f0a01417505cc0990b9931a37e5120db92e0d0", default-features = false }
schemars = { version = "1.0", features = ["indexmap2"] }
schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] }
semver = "1.0"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
@@ -626,7 +625,7 @@ wasmtime = { version = "29", default-features = false, features = [
wasmtime-wasi = "29"
which = "6.0.0"
workspace-hack = "0.1.0"
zed_llm_client = "0.8.5"
zed_llm_client = "0.8.4"
zstd = "0.11"
[workspace.dependencies.async-stripe]

View File

@@ -1067,12 +1067,5 @@
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePreviousItem"
}
},
{
"context": "KeymapEditor",
"use_key_equivalents": true,
"bindings": {
"ctrl-f": "search::FocusSearch"
}
}
]

View File

@@ -1167,12 +1167,5 @@
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePreviousItem"
}
},
{
"context": "KeymapEditor",
"use_key_equivalents": true,
"bindings": {
"cmd-f": "search::FocusSearch"
}
}
]

View File

@@ -210,8 +210,7 @@
"ctrl-w space": "editor::OpenExcerptsSplit",
"ctrl-w g space": "editor::OpenExcerptsSplit",
"ctrl-6": "pane::AlternateFile",
"ctrl-^": "pane::AlternateFile",
".": "vim::Repeat"
"ctrl-^": "pane::AlternateFile"
}
},
{
@@ -220,6 +219,7 @@
"ctrl-[": "editor::Cancel",
"escape": "editor::Cancel",
":": "command_palette::Toggle",
".": "vim::Repeat",
"c": "vim::PushChange",
"shift-c": "vim::ChangeToEndOfLine",
"d": "vim::PushDelete",
@@ -849,25 +849,6 @@
"shift-u": "git::UnstageAll"
}
},
{
"context": "Editor && mode == auto_height && VimControl",
"bindings": {
// TODO: Implement search
"/": null,
"?": null,
"#": null,
"*": null,
"n": null,
"shift-n": null
}
},
{
"context": "GitCommit > Editor && VimControl && vim_mode == normal",
"bindings": {
"ctrl-c": "menu::Cancel",
"escape": "menu::Cancel"
}
},
{
"context": "Editor && edit_prediction",
"bindings": {
@@ -879,7 +860,14 @@
{
"context": "MessageEditor > Editor && VimControl",
"bindings": {
"enter": "agent::Chat"
"enter": "agent::Chat",
// TODO: Implement search
"/": null,
"?": null,
"#": null,
"*": null,
"n": null,
"shift-n": null
}
},
{

View File

@@ -42,6 +42,7 @@ itertools.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
markdown.workspace = true
paths.workspace = true
postage.workspace = true
project.workspace = true

View File

@@ -1,3 +1,4 @@
mod agent2;
pub mod agent_profile;
pub mod context;
pub mod context_server_tool;
@@ -5,15 +6,17 @@ pub mod context_store;
pub mod history_store;
pub mod thread;
pub mod thread_store;
pub mod tool_use;
mod zed_agent;
pub use agent2::*;
pub use context::{AgentContext, ContextId, ContextLoadResult};
pub use context_store::ContextStore;
pub use thread::{
LastRestoreCheckpoint, Message, MessageCrease, MessageId, MessageSegment, Thread, ThreadError,
ThreadEvent, ThreadFeedback, ThreadId, ThreadSummary, TokenUsageRatio,
LastRestoreCheckpoint, Message, MessageCrease, MessageSegment, Thread, ThreadError,
ThreadEvent, ThreadFeedback, ThreadTitle, TokenUsageRatio,
};
pub use thread_store::{SerializedThread, TextThreadStore, ThreadStore};
pub use zed_agent::*;
pub fn init(cx: &mut gpui::App) {
thread_store::init(cx);

117
crates/agent/src/agent2.rs Normal file
View File

@@ -0,0 +1,117 @@
use anyhow::Result;
use assistant_tool::{Tool, ToolResultOutput};
use futures::{channel::oneshot, future::BoxFuture, stream::BoxStream};
use gpui::SharedString;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{
fmt::{self, Display},
sync::Arc,
};
#[derive(
Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema,
)]
pub struct ThreadId(SharedString);
impl ThreadId {
pub fn as_str(&self) -> &str {
&self.0
}
pub fn to_string(&self) -> String {
self.0.to_string()
}
}
impl From<&str> for ThreadId {
fn from(value: &str) -> Self {
ThreadId(SharedString::from(value.to_string()))
}
}
impl Display for ThreadId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct MessageId(pub usize);
#[derive(Debug, Clone)]
pub struct AgentThreadToolCallId(SharedString);
pub enum AgentThreadResponseEvent {
Text(String),
Thinking(String),
ToolCallChunk {
id: AgentThreadToolCallId,
tool: Arc<dyn Tool>,
input: serde_json::Value,
},
ToolCall {
id: AgentThreadToolCallId,
tool: Arc<dyn Tool>,
input: serde_json::Value,
response_tx: oneshot::Sender<Result<ToolResultOutput>>,
},
}
pub enum AgentThreadMessage {
User {
id: MessageId,
chunks: Vec<AgentThreadUserMessageChunk>,
},
Assistant {
id: MessageId,
chunks: Vec<AgentThreadAssistantMessageChunk>,
},
}
pub enum AgentThreadUserMessageChunk {
Text(String),
// here's where we would put mentions, etc.
}
pub enum AgentThreadAssistantMessageChunk {
Text(String),
Thinking(String),
ToolResult {
id: AgentThreadToolCallId,
tool: Arc<dyn Tool>,
input: serde_json::Value,
output: Result<ToolResultOutput>,
},
}
pub struct AgentThreadResponse {
pub user_message_id: MessageId,
pub assistant_message_id: MessageId,
pub events: BoxStream<'static, Result<AgentThreadResponseEvent>>,
}
pub trait Agent {
fn create_thread() -> BoxFuture<'static, Result<Arc<dyn AgentThread>>>;
fn list_threads();
fn load_thread(&self, thread_id: ThreadId) -> BoxFuture<'static, Result<Arc<dyn AgentThread>>>;
}
pub trait AgentThread {
fn id(&self) -> ThreadId;
fn title(&self) -> BoxFuture<'static, Result<String>>;
fn summary(&self) -> BoxFuture<'static, Result<String>>;
fn messages(&self) -> BoxFuture<'static, Result<Vec<AgentThreadMessage>>>;
fn truncate(&self, message_id: MessageId) -> BoxFuture<'static, Result<()>>;
fn edit(
&self,
message_id: MessageId,
content: Vec<AgentThreadUserMessageChunk>,
max_iterations: usize,
) -> BoxFuture<'static, Result<AgentThreadResponse>>;
fn send(
&self,
content: Vec<AgentThreadUserMessageChunk>,
max_iterations: usize,
) -> BoxFuture<'static, Result<AgentThreadResponse>>;
}

View File

@@ -96,11 +96,16 @@ impl AgentProfile {
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
match source {
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
ToolSource::ContextServer { id } => settings
.context_servers
.get(id.as_ref())
.and_then(|preset| preset.tools.get(name.as_str()).copied())
.unwrap_or(settings.enable_all_context_servers),
ToolSource::ContextServer { id } => {
if settings.enable_all_context_servers {
return true;
}
let Some(preset) = settings.context_servers.get(id.as_ref()) else {
return false;
};
*preset.tools.get(name.as_str()).unwrap_or(&false)
}
}
}
}
@@ -111,7 +116,7 @@ mod tests {
use assistant_tool::ToolRegistry;
use collections::IndexMap;
use gpui::SharedString;
use gpui::TestAppContext;
use gpui::{AppContext, TestAppContext};
use http_client::FakeHttpClient;
use project::Project;
use settings::{Settings, SettingsStore};

View File

@@ -581,7 +581,7 @@ impl ThreadContextHandle {
}
pub fn title(&self, cx: &App) -> SharedString {
self.thread.read(cx).summary().or_default()
self.thread.read(cx).title().or_default()
}
fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
@@ -589,7 +589,7 @@ impl ThreadContextHandle {
let text = Thread::wait_for_detailed_summary_or_text(&self.thread, cx).await?;
let title = self
.thread
.read_with(cx, |thread, _cx| thread.summary().or_default())
.read_with(cx, |thread, _cx| thread.title().or_default())
.ok()?;
let context = AgentContext::Thread(ThreadContext {
title,

View File

@@ -1,10 +1,11 @@
use crate::{
MessageId, ThreadId,
context::{
AgentContextHandle, AgentContextKey, ContextId, ContextKind, DirectoryContextHandle,
FetchedUrlContext, FileContextHandle, ImageContext, RulesContextHandle,
SelectionContextHandle, SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle,
},
thread::{MessageId, Thread, ThreadId},
thread::Thread,
thread_store::ThreadStore,
};
use anyhow::{Context as _, Result, anyhow};
@@ -71,7 +72,8 @@ impl ContextStore {
) -> Vec<AgentContextHandle> {
let existing_context = thread
.messages()
.take_while(|message| exclude_messages_from_id.is_none_or(|id| message.id != id))
.iter()
.take_while(|message| exclude_messages_from_id.is_none_or(|id| message.id() != id))
.flat_map(|message| {
message
.loaded_context
@@ -441,7 +443,7 @@ impl ContextStore {
match context {
AgentContextHandle::Thread(thread_context) => {
self.context_thread_ids
.remove(thread_context.thread.read(cx).id());
.remove(&thread_context.thread.read(cx).id());
}
AgentContextHandle::TextThread(text_thread_context) => {
if let Some(path) = text_thread_context.context.read(cx).path() {

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,7 @@
use crate::{
MessageId, ThreadId,
context_server_tool::ContextServerTool,
thread::{
DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId,
},
thread::{DetailedSummaryState, ExceededWindowError, ProjectSnapshot, Thread},
};
use agent_settings::{AgentProfileId, CompletionMode};
use anyhow::{Context as _, Result, anyhow};
@@ -400,35 +399,17 @@ impl ThreadStore {
self.threads.iter()
}
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
cx.new(|cx| {
Thread::new(
self.project.clone(),
self.tools.clone(),
self.prompt_builder.clone(),
self.project_context.clone(),
cx,
)
})
}
pub fn create_thread_from_serialized(
&mut self,
serialized: SerializedThread,
cx: &mut Context<Self>,
) -> Entity<Thread> {
cx.new(|cx| {
Thread::deserialize(
ThreadId::new(),
serialized,
self.project.clone(),
self.tools.clone(),
self.prompt_builder.clone(),
self.project_context.clone(),
None,
cx,
)
})
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Thread>>> {
todo!()
// cx.new(|cx| {
// Thread::new(
// self.project.clone(),
// self.tools.clone(),
// self.prompt_builder.clone(),
// self.project_context.clone(),
// cx,
// )
// })
}
pub fn open_thread(
@@ -447,51 +428,53 @@ impl ThreadStore {
.await?
.with_context(|| format!("no thread found with ID: {id:?}"))?;
let thread = this.update_in(cx, |this, window, cx| {
cx.new(|cx| {
Thread::deserialize(
id.clone(),
thread,
this.project.clone(),
this.tools.clone(),
this.prompt_builder.clone(),
this.project_context.clone(),
Some(window),
cx,
)
})
})?;
Ok(thread)
todo!();
// let thread = this.update_in(cx, |this, window, cx| {
// cx.new(|cx| {
// Thread::deserialize(
// id.clone(),
// thread,
// this.project.clone(),
// this.tools.clone(),
// this.prompt_builder.clone(),
// this.project_context.clone(),
// Some(window),
// cx,
// )
// })
// })?;
// Ok(thread)
})
}
pub fn save_thread(&self, thread: &Entity<Thread>, cx: &mut Context<Self>) -> Task<Result<()>> {
let (metadata, serialized_thread) =
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
todo!()
// let (metadata, serialized_thread) =
// thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(async move |this, cx| {
let serialized_thread = serialized_thread.await?;
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.save_thread(metadata, serialized_thread).await?;
// let database_future = ThreadsDatabase::global_future(cx);
// cx.spawn(async move |this, cx| {
// let serialized_thread = serialized_thread.await?;
// let database = database_future.await.map_err(|err| anyhow!(err))?;
// database.save_thread(metadata, serialized_thread).await?;
this.update(cx, |this, cx| this.reload(cx))?.await
})
// this.update(cx, |this, cx| this.reload(cx))?.await
// })
}
pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.delete_thread(id.clone()).await?;
todo!()
// let id = id.clone();
// let database_future = ThreadsDatabase::global_future(cx);
// cx.spawn(async move |this, cx| {
// let database = database_future.await.map_err(|err| anyhow!(err))?;
// database.delete_thread(id.clone()).await?;
this.update(cx, |this, cx| {
this.threads.retain(|thread| thread.id != id);
cx.notify();
})
})
// this.update(cx, |this, cx| {
// this.threads.retain(|thread| thread.id != id);
// cx.notify();
// })
// })
}
pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
@@ -1067,7 +1050,7 @@ impl ThreadsDatabase {
#[cfg(test)]
mod tests {
use super::*;
use crate::thread::{DetailedSummaryState, MessageId};
use crate::{MessageId, thread::DetailedSummaryState};
use chrono::Utc;
use language_model::{Role, TokenUsage};
use pretty_assertions::assert_eq;

View File

@@ -1,567 +0,0 @@
use crate::{
thread::{MessageId, PromptId, ThreadId},
thread_store::SerializedMessage,
};
use anyhow::Result;
use assistant_tool::{
AnyToolCard, Tool, ToolResultContent, ToolResultOutput, ToolUseStatus, ToolWorkingSet,
};
use collections::HashMap;
use futures::{FutureExt as _, future::Shared};
use gpui::{App, Entity, SharedString, Task, Window};
use icons::IconName;
use language_model::{
ConfiguredModel, LanguageModel, LanguageModelRequest, LanguageModelToolResult,
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, Role,
};
use project::Project;
use std::sync::Arc;
use util::truncate_lines_to_byte_limit;
#[derive(Debug)]
pub struct ToolUse {
pub id: LanguageModelToolUseId,
pub name: SharedString,
pub ui_text: SharedString,
pub status: ToolUseStatus,
pub input: serde_json::Value,
pub icon: icons::IconName,
pub needs_confirmation: bool,
}
pub struct ToolUseState {
tools: Entity<ToolWorkingSet>,
tool_uses_by_assistant_message: HashMap<MessageId, Vec<LanguageModelToolUse>>,
tool_results: HashMap<LanguageModelToolUseId, LanguageModelToolResult>,
pending_tool_uses_by_id: HashMap<LanguageModelToolUseId, PendingToolUse>,
tool_result_cards: HashMap<LanguageModelToolUseId, AnyToolCard>,
tool_use_metadata_by_id: HashMap<LanguageModelToolUseId, ToolUseMetadata>,
}
impl ToolUseState {
pub fn new(tools: Entity<ToolWorkingSet>) -> Self {
Self {
tools,
tool_uses_by_assistant_message: HashMap::default(),
tool_results: HashMap::default(),
pending_tool_uses_by_id: HashMap::default(),
tool_result_cards: HashMap::default(),
tool_use_metadata_by_id: HashMap::default(),
}
}
/// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s.
///
/// Accepts a function to filter the tools that should be used to populate the state.
///
/// If `window` is `None` (e.g., when in headless mode or when running evals),
/// tool cards won't be deserialized
pub fn from_serialized_messages(
tools: Entity<ToolWorkingSet>,
messages: &[SerializedMessage],
project: Entity<Project>,
window: Option<&mut Window>, // None in headless mode
cx: &mut App,
) -> Self {
let mut this = Self::new(tools);
let mut tool_names_by_id = HashMap::default();
let mut window = window;
for message in messages {
match message.role {
Role::Assistant => {
if !message.tool_uses.is_empty() {
let tool_uses = message
.tool_uses
.iter()
.map(|tool_use| LanguageModelToolUse {
id: tool_use.id.clone(),
name: tool_use.name.clone().into(),
raw_input: tool_use.input.to_string(),
input: tool_use.input.clone(),
is_input_complete: true,
})
.collect::<Vec<_>>();
tool_names_by_id.extend(
tool_uses
.iter()
.map(|tool_use| (tool_use.id.clone(), tool_use.name.clone())),
);
this.tool_uses_by_assistant_message
.insert(message.id, tool_uses);
for tool_result in &message.tool_results {
let tool_use_id = tool_result.tool_use_id.clone();
let Some(tool_use) = tool_names_by_id.get(&tool_use_id) else {
log::warn!("no tool name found for tool use: {tool_use_id:?}");
continue;
};
this.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name: tool_use.clone(),
is_error: tool_result.is_error,
content: tool_result.content.clone(),
output: tool_result.output.clone(),
},
);
if let Some(window) = &mut window {
if let Some(tool) = this.tools.read(cx).tool(tool_use, cx) {
if let Some(output) = tool_result.output.clone() {
if let Some(card) = tool.deserialize_card(
output,
project.clone(),
window,
cx,
) {
this.tool_result_cards.insert(tool_use_id, card);
}
}
}
}
}
}
}
Role::System | Role::User => {}
}
}
this
}
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
let mut cancelled_tool_uses = Vec::new();
self.pending_tool_uses_by_id
.retain(|tool_use_id, tool_use| {
if matches!(tool_use.status, PendingToolUseStatus::Error { .. }) {
return true;
}
let content = "Tool canceled by user".into();
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name: tool_use.name.clone(),
content,
output: None,
is_error: true,
},
);
cancelled_tool_uses.push(tool_use.clone());
false
});
cancelled_tool_uses
}
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
self.pending_tool_uses_by_id.values().collect()
}
pub fn tool_uses_for_message(&self, id: MessageId, cx: &App) -> Vec<ToolUse> {
let Some(tool_uses_for_message) = &self.tool_uses_by_assistant_message.get(&id) else {
return Vec::new();
};
let mut tool_uses = Vec::new();
for tool_use in tool_uses_for_message.iter() {
let tool_result = self.tool_results.get(&tool_use.id);
let status = (|| {
if let Some(tool_result) = tool_result {
let content = tool_result
.content
.to_str()
.map(|str| str.to_owned().into())
.unwrap_or_default();
return if tool_result.is_error {
ToolUseStatus::Error(content)
} else {
ToolUseStatus::Finished(content)
};
}
if let Some(pending_tool_use) = self.pending_tool_uses_by_id.get(&tool_use.id) {
match pending_tool_use.status {
PendingToolUseStatus::Idle => ToolUseStatus::Pending,
PendingToolUseStatus::NeedsConfirmation { .. } => {
ToolUseStatus::NeedsConfirmation
}
PendingToolUseStatus::Running { .. } => ToolUseStatus::Running,
PendingToolUseStatus::Error(ref err) => {
ToolUseStatus::Error(err.clone().into())
}
PendingToolUseStatus::InputStillStreaming => {
ToolUseStatus::InputStillStreaming
}
}
} else {
ToolUseStatus::Pending
}
})();
let (icon, needs_confirmation) =
if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) {
(tool.icon(), tool.needs_confirmation(&tool_use.input, cx))
} else {
(IconName::Cog, false)
};
tool_uses.push(ToolUse {
id: tool_use.id.clone(),
name: tool_use.name.clone().into(),
ui_text: self.tool_ui_label(
&tool_use.name,
&tool_use.input,
tool_use.is_input_complete,
cx,
),
input: tool_use.input.clone(),
status,
icon,
needs_confirmation,
})
}
tool_uses
}
pub fn tool_ui_label(
&self,
tool_name: &str,
input: &serde_json::Value,
is_input_complete: bool,
cx: &App,
) -> SharedString {
if let Some(tool) = self.tools.read(cx).tool(tool_name, cx) {
if is_input_complete {
tool.ui_text(input).into()
} else {
tool.still_streaming_ui_text(input).into()
}
} else {
format!("Unknown tool {tool_name:?}").into()
}
}
pub fn tool_results_for_message(
&self,
assistant_message_id: MessageId,
) -> Vec<&LanguageModelToolResult> {
let Some(tool_uses) = self
.tool_uses_by_assistant_message
.get(&assistant_message_id)
else {
return Vec::new();
};
tool_uses
.iter()
.filter_map(|tool_use| self.tool_results.get(&tool_use.id))
.collect()
}
pub fn message_has_tool_results(&self, assistant_message_id: MessageId) -> bool {
self.tool_uses_by_assistant_message
.get(&assistant_message_id)
.map_or(false, |results| !results.is_empty())
}
pub fn tool_result(
&self,
tool_use_id: &LanguageModelToolUseId,
) -> Option<&LanguageModelToolResult> {
self.tool_results.get(tool_use_id)
}
pub fn tool_result_card(&self, tool_use_id: &LanguageModelToolUseId) -> Option<&AnyToolCard> {
self.tool_result_cards.get(tool_use_id)
}
pub fn insert_tool_result_card(
&mut self,
tool_use_id: LanguageModelToolUseId,
card: AnyToolCard,
) {
self.tool_result_cards.insert(tool_use_id, card);
}
pub fn request_tool_use(
&mut self,
assistant_message_id: MessageId,
tool_use: LanguageModelToolUse,
metadata: ToolUseMetadata,
cx: &App,
) -> Arc<str> {
let tool_uses = self
.tool_uses_by_assistant_message
.entry(assistant_message_id)
.or_default();
let mut existing_tool_use_found = false;
for existing_tool_use in tool_uses.iter_mut() {
if existing_tool_use.id == tool_use.id {
*existing_tool_use = tool_use.clone();
existing_tool_use_found = true;
}
}
if !existing_tool_use_found {
tool_uses.push(tool_use.clone());
}
let status = if tool_use.is_input_complete {
self.tool_use_metadata_by_id
.insert(tool_use.id.clone(), metadata);
PendingToolUseStatus::Idle
} else {
PendingToolUseStatus::InputStillStreaming
};
let ui_text: Arc<str> = self
.tool_ui_label(
&tool_use.name,
&tool_use.input,
tool_use.is_input_complete,
cx,
)
.into();
let may_perform_edits = self
.tools
.read(cx)
.tool(&tool_use.name, cx)
.is_some_and(|tool| tool.may_perform_edits());
self.pending_tool_uses_by_id.insert(
tool_use.id.clone(),
PendingToolUse {
assistant_message_id,
id: tool_use.id,
name: tool_use.name.clone(),
ui_text: ui_text.clone(),
input: tool_use.input,
may_perform_edits,
status,
},
);
ui_text
}
pub fn run_pending_tool(
&mut self,
tool_use_id: LanguageModelToolUseId,
ui_text: SharedString,
task: Task<()>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
tool_use.ui_text = ui_text.into();
tool_use.status = PendingToolUseStatus::Running {
_task: task.shared(),
};
}
}
pub fn confirm_tool_use(
&mut self,
tool_use_id: LanguageModelToolUseId,
ui_text: impl Into<Arc<str>>,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
tool: Arc<dyn Tool>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
let ui_text = ui_text.into();
tool_use.ui_text = ui_text.clone();
let confirmation = Confirmation {
tool_use_id,
input,
request,
tool,
ui_text,
};
tool_use.status = PendingToolUseStatus::NeedsConfirmation(Arc::new(confirmation));
}
}
pub fn insert_tool_output(
&mut self,
tool_use_id: LanguageModelToolUseId,
tool_name: Arc<str>,
output: Result<ToolResultOutput>,
configured_model: Option<&ConfiguredModel>,
) -> Option<PendingToolUse> {
let metadata = self.tool_use_metadata_by_id.remove(&tool_use_id);
telemetry::event!(
"Agent Tool Finished",
model = metadata
.as_ref()
.map(|metadata| metadata.model.telemetry_id()),
model_provider = metadata
.as_ref()
.map(|metadata| metadata.model.provider_id().to_string()),
thread_id = metadata.as_ref().map(|metadata| metadata.thread_id.clone()),
prompt_id = metadata.as_ref().map(|metadata| metadata.prompt_id.clone()),
tool_name,
success = output.is_ok()
);
match output {
Ok(output) => {
let tool_result = output.content;
const BYTES_PER_TOKEN_ESTIMATE: usize = 3;
let old_use = self.pending_tool_uses_by_id.remove(&tool_use_id);
// Protect from overly large output
let tool_output_limit = configured_model
.map(|model| model.model.max_token_count() as usize * BYTES_PER_TOKEN_ESTIMATE)
.unwrap_or(usize::MAX);
let content = match tool_result {
ToolResultContent::Text(text) => {
let text = if text.len() < tool_output_limit {
text
} else {
let truncated = truncate_lines_to_byte_limit(&text, tool_output_limit);
format!(
"Tool result too long. The first {} bytes:\n\n{}",
truncated.len(),
truncated
)
};
LanguageModelToolResultContent::Text(text.into())
}
ToolResultContent::Image(language_model_image) => {
if language_model_image.estimate_tokens() < tool_output_limit {
LanguageModelToolResultContent::Image(language_model_image)
} else {
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name,
content: "Tool responded with an image that would exceeded the remaining tokens".into(),
is_error: true,
output: None,
},
);
return old_use;
}
}
};
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name,
content,
is_error: false,
output: output.output,
},
);
old_use
}
Err(err) => {
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name,
content: LanguageModelToolResultContent::Text(err.to_string().into()),
is_error: true,
output: None,
},
);
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
tool_use.status = PendingToolUseStatus::Error(err.to_string().into());
}
self.pending_tool_uses_by_id.get(&tool_use_id).cloned()
}
}
}
pub fn has_tool_results(&self, assistant_message_id: MessageId) -> bool {
self.tool_uses_by_assistant_message
.contains_key(&assistant_message_id)
}
pub fn tool_results(
&self,
assistant_message_id: MessageId,
) -> impl Iterator<Item = (&LanguageModelToolUse, Option<&LanguageModelToolResult>)> {
self.tool_uses_by_assistant_message
.get(&assistant_message_id)
.into_iter()
.flatten()
.map(|tool_use| (tool_use, self.tool_results.get(&tool_use.id)))
}
}
#[derive(Debug, Clone)]
pub struct PendingToolUse {
pub id: LanguageModelToolUseId,
/// The ID of the Assistant message in which the tool use was requested.
#[allow(unused)]
pub assistant_message_id: MessageId,
pub name: Arc<str>,
pub ui_text: Arc<str>,
pub input: serde_json::Value,
pub status: PendingToolUseStatus,
pub may_perform_edits: bool,
}
#[derive(Debug, Clone)]
pub struct Confirmation {
pub tool_use_id: LanguageModelToolUseId,
pub input: serde_json::Value,
pub ui_text: Arc<str>,
pub request: Arc<LanguageModelRequest>,
pub tool: Arc<dyn Tool>,
}
#[derive(Debug, Clone)]
pub enum PendingToolUseStatus {
InputStillStreaming,
Idle,
NeedsConfirmation(Arc<Confirmation>),
Running { _task: Shared<Task<()>> },
Error(#[allow(unused)] Arc<str>),
}
impl PendingToolUseStatus {
pub fn is_idle(&self) -> bool {
matches!(self, PendingToolUseStatus::Idle)
}
pub fn is_error(&self) -> bool {
matches!(self, PendingToolUseStatus::Error(_))
}
pub fn needs_confirmation(&self) -> bool {
matches!(self, PendingToolUseStatus::NeedsConfirmation { .. })
}
}
#[derive(Clone)]
pub struct ToolUseMetadata {
pub model: Arc<dyn LanguageModel>,
pub thread_id: ThreadId,
pub prompt_id: PromptId,
}

View File

@@ -0,0 +1 @@
pub struct ZedAgentThread {}

View File

@@ -1,49 +0,0 @@
[package]
name = "agent2"
version = "0.1.0"
edition = "2021"
license = "GPL-3.0-or-later"
publish = false
[lib]
path = "src/agent2.rs"
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
assistant_tool.workspace = true
assistant_tools.workspace = true
chrono.workspace = true
collections.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true
handlebars = { workspace = true, features = ["rust-embed"] }
language_model.workspace = true
language_models.workspace = true
parking_lot.workspace = true
project.workspace = true
rust-embed.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
thiserror.workspace = true
util.workspace = true
worktree.workspace = true
[dev-dependencies]
ctor.workspace = true
client = { workspace = true, "features" = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, "features" = ["test-support"] }
gpui = { workspace = true, "features" = ["test-support"] }
gpui_tokio.workspace = true
language_model = { workspace = true, "features" = ["test-support"] }
project = { workspace = true, "features" = ["test-support"] }
reqwest_client.workspace = true
settings = { workspace = true, "features" = ["test-support"] }
worktree = { workspace = true, "features" = ["test-support"] }

View File

@@ -1 +0,0 @@
../../LICENSE-GPL

View File

@@ -1,6 +0,0 @@
mod prompts;
mod templates;
mod thread;
mod tools;
pub use thread::*;

View File

@@ -1,29 +0,0 @@
use crate::{
templates::{BaseTemplate, Template, Templates, WorktreeData},
thread::Prompt,
};
use anyhow::Result;
use gpui::{App, Entity};
use project::Project;
struct BasePrompt {
project: Entity<Project>,
}
impl Prompt for BasePrompt {
fn render(&self, templates: &Templates, cx: &App) -> Result<String> {
BaseTemplate {
os: std::env::consts::OS.to_string(),
shell: util::get_system_shell(),
worktrees: self
.project
.read(cx)
.worktrees(cx)
.map(|worktree| WorktreeData {
root_name: worktree.read(cx).root_name().to_string(),
})
.collect(),
}
.render(templates)
}
}

View File

@@ -1,57 +0,0 @@
use std::sync::Arc;
use anyhow::Result;
use handlebars::Handlebars;
use rust_embed::RustEmbed;
use serde::Serialize;
#[derive(RustEmbed)]
#[folder = "src/templates"]
#[include = "*.hbs"]
struct Assets;
pub struct Templates(Handlebars<'static>);
impl Templates {
pub fn new() -> Arc<Self> {
let mut handlebars = Handlebars::new();
handlebars.register_embed_templates::<Assets>().unwrap();
Arc::new(Self(handlebars))
}
}
pub trait Template: Sized {
const TEMPLATE_NAME: &'static str;
fn render(&self, templates: &Templates) -> Result<String>
where
Self: Serialize + Sized,
{
Ok(templates.0.render(Self::TEMPLATE_NAME, self)?)
}
}
#[derive(Serialize)]
pub struct BaseTemplate {
pub os: String,
pub shell: String,
pub worktrees: Vec<WorktreeData>,
}
impl Template for BaseTemplate {
const TEMPLATE_NAME: &'static str = "base.hbs";
}
#[derive(Serialize)]
pub struct WorktreeData {
pub root_name: String,
}
#[derive(Serialize)]
pub struct GlobTemplate {
pub project_roots: String,
}
impl Template for GlobTemplate {
const TEMPLATE_NAME: &'static str = "glob.hbs";
}

View File

@@ -1,56 +0,0 @@
You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.
## Communication
1. Be conversational but professional.
2. Refer to the USER in the second person and yourself in the first person.
3. Format your responses in markdown. Use backticks to format file, directory, function, and class names.
4. NEVER lie or make things up.
5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing.
## Tool Use
1. Make sure to adhere to the tools schema.
2. Provide every required argument.
3. DO NOT use tools to access items that are already available in the context section.
4. Use only the tools that are currently available.
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
## Searching and Reading
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
If appropriate, use tool calls to explore the current project, which contains the following root directories:
{{#each worktrees}}
- `{{root_name}}`
{{/each}}
- When providing paths to tools, the path should always begin with a path that starts with a project root directory listed above.
- When looking for symbols in the project, prefer the `grep` tool.
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
- Bias towards not asking the user for help if you can find the answer yourself.
## Fixing Diagnostics
1. Make 1-2 attempts at fixing diagnostics, then defer to the user.
2. Never simplify code you've written just to solve diagnostics. Complete, mostly correct code is more valuable than perfect code that doesn't solve the problem.
## Debugging
When debugging, only make code changes if you are certain that you can solve the problem.
Otherwise, follow debugging best practices:
1. Address the root cause instead of the symptoms.
2. Add descriptive logging statements and error messages to track variable and code state.
3. Add test functions and statements to isolate the problem.
## Calling External APIs
1. Unless explicitly requested by the user, use the best suited external APIs and packages to solve the task. There is no need to ask the user for permission.
2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file. If no such file exists or if the package is not present, use the latest version that is in your training data.
3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
## System Information
Operating System: {{os}}
Default Shell: {{shell}}

View File

@@ -1,8 +0,0 @@
Find paths on disk with glob patterns.
Assume that all glob patterns are matched in a project directory with the following entries.
{{project_roots}}
When searching with patterns that begin with literal path components, e.g. `foo/bar/**/*.rs`, be
sure to anchor them with one of the directories listed above.

View File

@@ -1,420 +0,0 @@
use crate::templates::Templates;
use anyhow::{anyhow, Result};
use futures::{channel::mpsc, future};
use gpui::{App, Context, SharedString, Task};
use language_model::{
CompletionIntent, CompletionMode, LanguageModel, LanguageModelCompletionError,
LanguageModelCompletionEvent, LanguageModelRequest, LanguageModelRequestMessage,
LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent,
LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, Role, StopReason,
};
use schemars::{JsonSchema, Schema};
use serde::Deserialize;
use smol::stream::StreamExt;
use std::{collections::BTreeMap, sync::Arc};
use util::ResultExt;
#[derive(Debug)]
pub struct AgentMessage {
pub role: Role,
pub content: Vec<MessageContent>,
}
pub type AgentResponseEvent = LanguageModelCompletionEvent;
pub trait Prompt {
fn render(&self, prompts: &Templates, cx: &App) -> Result<String>;
}
pub struct Thread {
messages: Vec<AgentMessage>,
completion_mode: CompletionMode,
/// Holds the task that handles agent interaction until the end of the turn.
/// Survives across multiple requests as the model performs tool calls and
/// we run tools, report their results.
running_turn: Option<Task<()>>,
system_prompts: Vec<Arc<dyn Prompt>>,
tools: BTreeMap<SharedString, Arc<dyn AgentToolErased>>,
templates: Arc<Templates>,
// project: Entity<Project>,
// action_log: Entity<ActionLog>,
}
impl Thread {
pub fn new(templates: Arc<Templates>) -> Self {
Self {
messages: Vec::new(),
completion_mode: CompletionMode::Normal,
system_prompts: Vec::new(),
running_turn: None,
tools: BTreeMap::default(),
templates,
}
}
pub fn set_mode(&mut self, mode: CompletionMode) {
self.completion_mode = mode;
}
pub fn messages(&self) -> &[AgentMessage] {
&self.messages
}
pub fn add_tool(&mut self, tool: impl AgentTool) {
self.tools.insert(tool.name(), tool.erase());
}
pub fn remove_tool(&mut self, name: &str) -> bool {
self.tools.remove(name).is_some()
}
/// Sending a message results in the model streaming a response, which could include tool calls.
/// After calling tools, the model will stops and waits for any outstanding tool calls to be completed and their results sent.
/// The returned channel will report all the occurrences in which the model stops before erroring or ending its turn.
pub fn send(
&mut self,
model: Arc<dyn LanguageModel>,
content: impl Into<MessageContent>,
cx: &mut Context<Self>,
) -> mpsc::UnboundedReceiver<Result<AgentResponseEvent, LanguageModelCompletionError>> {
cx.notify();
let (events_tx, events_rx) =
mpsc::unbounded::<Result<AgentResponseEvent, LanguageModelCompletionError>>();
let system_message = self.build_system_message(cx);
self.messages.extend(system_message);
self.messages.push(AgentMessage {
role: Role::User,
content: vec![content.into()],
});
self.running_turn = Some(cx.spawn(async move |thread, cx| {
let turn_result = async {
// Perform one request, then keep looping if the model makes tool calls.
let mut completion_intent = CompletionIntent::UserPrompt;
loop {
let request = thread.update(cx, |thread, cx| {
thread.build_completion_request(completion_intent, cx)
})?;
// println!(
// "request: {}",
// serde_json::to_string_pretty(&request).unwrap()
// );
// Stream events, appending to messages and collecting up tool uses.
let mut events = model.stream_completion(request, cx).await?;
let mut tool_uses = Vec::new();
while let Some(event) = events.next().await {
match event {
Ok(event) => {
thread
.update(cx, |thread, cx| {
tool_uses.extend(thread.handle_streamed_completion_event(
event,
events_tx.clone(),
cx,
));
})
.ok();
}
Err(error) => {
events_tx.unbounded_send(Err(error)).ok();
break;
}
}
}
// If there are no tool uses, the turn is done.
if tool_uses.is_empty() {
break;
}
// If there are tool uses, wait for their results to be
// computed, then send them together in a single message on
// the next loop iteration.
let tool_results = future::join_all(tool_uses).await;
thread
.update(cx, |thread, _cx| {
thread.messages.push(AgentMessage {
role: Role::User,
content: tool_results.into_iter().map(Into::into).collect(),
});
})
.ok();
completion_intent = CompletionIntent::ToolResults;
}
Ok(())
}
.await;
if let Err(error) = turn_result {
events_tx.unbounded_send(Err(error)).ok();
}
}));
events_rx
}
pub fn build_system_message(&mut self, cx: &App) -> Option<AgentMessage> {
let mut system_message = AgentMessage {
role: Role::System,
content: Vec::new(),
};
for prompt in &self.system_prompts {
if let Some(rendered_prompt) = prompt.render(&self.templates, cx).log_err() {
system_message
.content
.push(MessageContent::Text(rendered_prompt));
}
}
(!system_message.content.is_empty()).then_some(system_message)
}
/// A helper method that's called on every streamed completion event.
/// Returns an optional tool result task, which the main agentic loop in
/// send will send back to the model when it resolves.
fn handle_streamed_completion_event(
&mut self,
event: LanguageModelCompletionEvent,
events_tx: mpsc::UnboundedSender<Result<AgentResponseEvent, LanguageModelCompletionError>>,
cx: &mut Context<Self>,
) -> Option<Task<LanguageModelToolResult>> {
use LanguageModelCompletionEvent::*;
events_tx.unbounded_send(Ok(event.clone())).ok();
match event {
Text(new_text) => self.handle_text_event(new_text, cx),
Thinking { text, signature } => {
todo!()
}
ToolUse(tool_use) => {
return self.handle_tool_use_event(tool_use, cx);
}
StartMessage { role, .. } => {
self.messages.push(AgentMessage {
role,
content: Vec::new(),
});
}
UsageUpdate(_) => {}
Stop(stop_reason) => self.handle_stop_event(stop_reason),
StatusUpdate(_completion_request_status) => {}
RedactedThinking { data } => todo!(),
ToolUseJsonParseError {
id,
tool_name,
raw_input,
json_parse_error,
} => todo!(),
}
None
}
fn handle_stop_event(&mut self, stop_reason: StopReason) {
match stop_reason {
StopReason::EndTurn | StopReason::ToolUse => {}
StopReason::MaxTokens => todo!(),
StopReason::Refusal => todo!(),
}
}
fn handle_text_event(&mut self, new_text: String, cx: &mut Context<Self>) {
let last_message = self.last_assistant_message();
if let Some(MessageContent::Text(text)) = last_message.content.last_mut() {
text.push_str(&new_text);
} else {
last_message.content.push(MessageContent::Text(new_text));
}
cx.notify();
}
fn handle_tool_use_event(
&mut self,
tool_use: LanguageModelToolUse,
cx: &mut Context<Self>,
) -> Option<Task<LanguageModelToolResult>> {
cx.notify();
let last_message = self.last_assistant_message();
// Ensure the last message ends in the current tool use
let push_new_tool_use = last_message.content.last_mut().map_or(true, |content| {
if let MessageContent::ToolUse(last_tool_use) = content {
if last_tool_use.id == tool_use.id {
*last_tool_use = tool_use.clone();
false
} else {
true
}
} else {
true
}
});
if push_new_tool_use {
last_message.content.push(tool_use.clone().into());
}
if !tool_use.is_input_complete {
return None;
}
if let Some(tool) = self.tools.get(tool_use.name.as_ref()) {
let pending_tool_result = tool.clone().run(tool_use.input, cx);
Some(cx.foreground_executor().spawn(async move {
match pending_tool_result.await {
Ok(tool_output) => LanguageModelToolResult {
tool_use_id: tool_use.id,
tool_name: tool_use.name,
is_error: false,
content: LanguageModelToolResultContent::Text(Arc::from(tool_output)),
output: None,
},
Err(error) => LanguageModelToolResult {
tool_use_id: tool_use.id,
tool_name: tool_use.name,
is_error: true,
content: LanguageModelToolResultContent::Text(Arc::from(error.to_string())),
output: None,
},
}
}))
} else {
Some(Task::ready(LanguageModelToolResult {
content: LanguageModelToolResultContent::Text(Arc::from(format!(
"No tool named {} exists",
tool_use.name
))),
tool_use_id: tool_use.id,
tool_name: tool_use.name,
is_error: true,
output: None,
}))
}
}
/// Guarantees the last message is from the assistant and returns a mutable reference.
fn last_assistant_message(&mut self) -> &mut AgentMessage {
if self
.messages
.last()
.map_or(true, |m| m.role != Role::Assistant)
{
self.messages.push(AgentMessage {
role: Role::Assistant,
content: Vec::new(),
});
}
self.messages.last_mut().unwrap()
}
fn build_completion_request(
&self,
completion_intent: CompletionIntent,
cx: &mut App,
) -> LanguageModelRequest {
LanguageModelRequest {
thread_id: None,
prompt_id: None,
intent: Some(completion_intent),
mode: Some(self.completion_mode),
messages: self.build_request_messages(),
tools: self
.tools
.values()
.filter_map(|tool| {
Some(LanguageModelRequestTool {
name: tool.name().to_string(),
description: tool.description(cx).to_string(),
input_schema: tool
.input_schema(LanguageModelToolSchemaFormat::JsonSchema)
.log_err()?,
})
})
.collect(),
tool_choice: None,
stop: Vec::new(),
temperature: None,
}
}
fn build_request_messages(&self) -> Vec<LanguageModelRequestMessage> {
self.messages
.iter()
.map(|message| LanguageModelRequestMessage {
role: message.role,
content: message.content.clone(),
cache: false,
})
.collect()
}
}
pub trait AgentTool
where
Self: 'static + Sized,
{
type Input: for<'de> Deserialize<'de> + JsonSchema;
fn name(&self) -> SharedString;
fn description(&self, _cx: &mut App) -> SharedString {
let schema = schemars::schema_for!(Self::Input);
SharedString::new(
schema
.get("description")
.and_then(|description| description.as_str())
.unwrap_or_default(),
)
}
/// Returns the JSON schema that describes the tool's input.
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Schema {
assistant_tools::root_schema_for::<Self::Input>(format)
}
/// Runs the tool with the provided input.
fn run(self: Arc<Self>, input: Self::Input, cx: &mut App) -> Task<Result<String>>;
fn erase(self) -> Arc<dyn AgentToolErased> {
Arc::new(Erased(Arc::new(self)))
}
}
pub struct Erased<T>(T);
pub trait AgentToolErased {
fn name(&self) -> SharedString;
fn description(&self, cx: &mut App) -> SharedString;
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value>;
fn run(self: Arc<Self>, input: serde_json::Value, cx: &mut App) -> Task<Result<String>>;
}
impl<T> AgentToolErased for Erased<Arc<T>>
where
T: AgentTool,
{
fn name(&self) -> SharedString {
self.0.name()
}
fn description(&self, cx: &mut App) -> SharedString {
self.0.description(cx)
}
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
Ok(serde_json::to_value(self.0.input_schema(format))?)
}
fn run(self: Arc<Self>, input: serde_json::Value, cx: &mut App) -> Task<Result<String>> {
let parsed_input: Result<T::Input> = serde_json::from_value(input).map_err(Into::into);
match parsed_input {
Ok(input) => self.0.clone().run(input, cx),
Err(error) => Task::ready(Err(anyhow!(error))),
}
}
}

View File

@@ -1,254 +0,0 @@
use super::*;
use client::{proto::language_server_prompt_request, Client, UserStore};
use fs::FakeFs;
use gpui::{AppContext, Entity, TestAppContext};
use language_model::{
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelRegistry, MessageContent, StopReason,
};
use reqwest_client::ReqwestClient;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use smol::stream::StreamExt;
use std::{sync::Arc, time::Duration};
mod test_tools;
use test_tools::*;
#[gpui::test]
async fn test_echo(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
let events = agent
.update(cx, |agent, cx| {
agent.send(model.clone(), "Testing: Reply with 'Hello'", cx)
})
.collect()
.await;
agent.update(cx, |agent, _cx| {
assert_eq!(
agent.messages.last().unwrap().content,
vec![MessageContent::Text("Hello".to_string())]
);
});
assert_eq!(stop_events(events), vec![StopReason::EndTurn]);
}
#[gpui::test]
async fn test_basic_tool_calls(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
// Test a tool call that's likely to complete *before* streaming stops.
let events = agent
.update(cx, |agent, cx| {
agent.add_tool(EchoTool);
agent.send(
model.clone(),
"Now test the echo tool with 'Hello'. Does it work? Say 'Yes' or 'No'.",
cx,
)
})
.collect()
.await;
assert_eq!(
stop_events(events),
vec![StopReason::ToolUse, StopReason::EndTurn]
);
// Test a tool calls that's likely to complete *after* streaming stops.
let events = agent
.update(cx, |agent, cx| {
agent.remove_tool(&AgentTool::name(&EchoTool));
agent.add_tool(DelayTool);
agent.send(
model.clone(),
"Now call the delay tool with 200ms. When the timer goes off, then you echo the output of the tool.",
cx,
)
})
.collect()
.await;
assert_eq!(
stop_events(events),
vec![StopReason::ToolUse, StopReason::EndTurn]
);
agent.update(cx, |agent, _cx| {
assert!(agent
.messages
.last()
.unwrap()
.content
.iter()
.any(|content| {
if let MessageContent::Text(text) = content {
text.contains("Ding")
} else {
false
}
}));
});
}
#[gpui::test]
async fn test_streaming_tool_calls(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
// Test a tool call that's likely to complete *before* streaming stops.
let mut events = agent.update(cx, |agent, cx| {
agent.add_tool(WordListTool);
agent.send(model.clone(), "Test the word_list tool.", cx)
});
let mut saw_partial_tool_use = false;
while let Some(event) = events.next().await {
if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use_event)) = event {
agent.update(cx, |agent, _cx| {
// Look for a tool use in the agent's last message
let last_content = agent.messages().last().unwrap().content.last().unwrap();
if let MessageContent::ToolUse(last_tool_use) = last_content {
assert_eq!(last_tool_use.name.as_ref(), "word_list");
if tool_use_event.is_input_complete {
last_tool_use
.input
.get("a")
.expect("'a' has streamed because input is now complete");
last_tool_use
.input
.get("g")
.expect("'g' has streamed because input is now complete");
} else {
if !last_tool_use.is_input_complete
&& last_tool_use.input.get("g").is_none()
{
saw_partial_tool_use = true;
}
}
} else {
panic!("last content should be a tool use");
}
});
}
}
assert!(
saw_partial_tool_use,
"should see at least one partially streamed tool use in the history"
);
}
#[gpui::test]
async fn test_concurrent_tool_calls(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
// Test concurrent tool calls with different delay times
let events = agent
.update(cx, |agent, cx| {
agent.add_tool(DelayTool);
agent.send(
model.clone(),
"Call the delay tool twice in the same message. Once with 100ms. Once with 300ms. When both timers are complete, describe the outputs.",
cx,
)
})
.collect()
.await;
let stop_reasons = stop_events(events);
if stop_reasons.len() == 2 {
assert_eq!(stop_reasons, vec![StopReason::ToolUse, StopReason::EndTurn]);
} else if stop_reasons.len() == 3 {
assert_eq!(
stop_reasons,
vec![
StopReason::ToolUse,
StopReason::ToolUse,
StopReason::EndTurn
]
);
} else {
panic!("Expected either 1 or 2 tool uses followed by end turn");
}
agent.update(cx, |agent, _cx| {
let last_message = agent.messages.last().unwrap();
let text = last_message
.content
.iter()
.filter_map(|content| {
if let MessageContent::Text(text) = content {
Some(text.as_str())
} else {
None
}
})
.collect::<String>();
assert!(text.contains("Ding"));
});
}
/// Filters out the stop events for asserting against in tests
fn stop_events(
result_events: Vec<Result<AgentResponseEvent, LanguageModelCompletionError>>,
) -> Vec<StopReason> {
result_events
.into_iter()
.filter_map(|event| match event.unwrap() {
LanguageModelCompletionEvent::Stop(stop_reason) => Some(stop_reason),
_ => None,
})
.collect()
}
struct AgentTest {
model: Arc<dyn LanguageModel>,
agent: Entity<Thread>,
}
async fn setup(cx: &mut TestAppContext) -> AgentTest {
cx.executor().allow_parking();
cx.update(settings::init);
let fs = FakeFs::new(cx.executor().clone());
// let project = Project::test(fs.clone(), [], cx).await;
// let action_log = cx.new(|_| ActionLog::new(project.clone()));
let templates = Templates::new();
let agent = cx.new(|_| Thread::new(templates));
let model = cx
.update(|cx| {
gpui_tokio::init(cx);
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
cx.set_http_client(Arc::new(http_client));
client::init_settings(cx);
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
language_model::init(client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
let models = LanguageModelRegistry::read_global(cx);
let model = models
.available_models(cx)
.find(|model| model.id().0 == "claude-3-7-sonnet-latest")
.unwrap();
let provider = models.provider(&model.provider_id()).unwrap();
let authenticated = provider.authenticate(cx);
cx.spawn(async move |cx| {
authenticated.await.unwrap();
model
})
})
.await;
AgentTest { model, agent }
}
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}

View File

@@ -1,83 +0,0 @@
use super::*;
/// A tool that echoes its input
#[derive(JsonSchema, Serialize, Deserialize)]
pub struct EchoToolInput {
/// The text to echo.
text: String,
}
pub struct EchoTool;
impl AgentTool for EchoTool {
type Input = EchoToolInput;
fn name(&self) -> SharedString {
"echo".into()
}
fn run(self: Arc<Self>, input: Self::Input, _cx: &mut App) -> Task<Result<String>> {
Task::ready(Ok(input.text))
}
}
/// A tool that waits for a specified delay
#[derive(JsonSchema, Serialize, Deserialize)]
pub struct DelayToolInput {
/// The delay in milliseconds.
ms: u64,
}
pub struct DelayTool;
impl AgentTool for DelayTool {
type Input = DelayToolInput;
fn name(&self) -> SharedString {
"delay".into()
}
fn run(self: Arc<Self>, input: Self::Input, cx: &mut App) -> Task<Result<String>>
where
Self: Sized,
{
cx.foreground_executor().spawn(async move {
smol::Timer::after(Duration::from_millis(input.ms)).await;
Ok("Ding".to_string())
})
}
}
/// A tool that takes an object with map from letters to random words starting with that letter.
/// All fiealds are required! Pass a word for every letter!
#[derive(JsonSchema, Serialize, Deserialize)]
pub struct WordListInput {
/// Provide a random word that starts with A.
a: Option<String>,
/// Provide a random word that starts with B.
b: Option<String>,
/// Provide a random word that starts with C.
c: Option<String>,
/// Provide a random word that starts with D.
d: Option<String>,
/// Provide a random word that starts with E.
e: Option<String>,
/// Provide a random word that starts with F.
f: Option<String>,
/// Provide a random word that starts with G.
g: Option<String>,
}
pub struct WordListTool;
impl AgentTool for WordListTool {
type Input = WordListInput;
fn name(&self) -> SharedString {
"word_list".into()
}
fn run(self: Arc<Self>, _input: Self::Input, _cx: &mut App) -> Task<Result<String>> {
Task::ready(Ok("ok".to_string()))
}
}

View File

@@ -1 +0,0 @@
mod glob;

View File

@@ -1,76 +0,0 @@
use anyhow::{anyhow, Result};
use gpui::{App, AppContext, Entity, SharedString, Task};
use project::Project;
use schemars::JsonSchema;
use serde::Deserialize;
use std::{path::PathBuf, sync::Arc};
use util::paths::PathMatcher;
use worktree::Snapshot as WorktreeSnapshot;
use crate::{
templates::{GlobTemplate, Template, Templates},
thread::AgentTool,
};
// Description is dynamic, see `fn description` below
#[derive(Deserialize, JsonSchema)]
struct GlobInput {
/// A POSIX glob pattern
glob: SharedString,
}
struct GlobTool {
project: Entity<Project>,
templates: Arc<Templates>,
}
impl AgentTool for GlobTool {
type Input = GlobInput;
fn name(&self) -> SharedString {
"glob".into()
}
fn description(&self, cx: &mut App) -> SharedString {
let project_roots = self
.project
.read(cx)
.worktrees(cx)
.map(|worktree| worktree.read(cx).root_name().into())
.collect::<Vec<String>>()
.join("\n");
GlobTemplate { project_roots }
.render(&self.templates)
.expect("template failed to render")
.into()
}
fn run(self: Arc<Self>, input: Self::Input, cx: &mut App) -> Task<Result<String>> {
let path_matcher = match PathMatcher::new([&input.glob]) {
Ok(matcher) => matcher,
Err(error) => return Task::ready(Err(anyhow!(error))),
};
let snapshots: Vec<WorktreeSnapshot> = self
.project
.read(cx)
.worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect();
cx.background_spawn(async move {
let paths = snapshots.iter().flat_map(|snapshot| {
let root_name = PathBuf::from(snapshot.root_name());
snapshot
.entries(false, 0)
.map(move |entry| root_name.join(&entry.path))
.filter(|path| path_matcher.is_match(&path))
});
let output = paths
.map(|path| format!("{}\n", path.display()))
.collect::<String>();
Ok(output)
})
}
}

View File

@@ -6,10 +6,9 @@ use anyhow::{Result, bail};
use collections::IndexMap;
use gpui::{App, Pixels, SharedString};
use language_model::LanguageModel;
use schemars::{JsonSchema, json_schema};
use schemars::{JsonSchema, schema::Schema};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use std::borrow::Cow;
pub use crate::agent_profile::*;
@@ -50,7 +49,7 @@ pub struct AgentSettings {
pub dock: AgentDockPosition,
pub default_width: Pixels,
pub default_height: Pixels,
pub default_model: Option<LanguageModelSelection>,
pub default_model: LanguageModelSelection,
pub inline_assistant_model: Option<LanguageModelSelection>,
pub commit_message_model: Option<LanguageModelSelection>,
pub thread_summary_model: Option<LanguageModelSelection>,
@@ -212,6 +211,7 @@ impl AgentSettingsContent {
}
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
#[schemars(deny_unknown_fields)]
pub struct AgentSettingsContent {
/// Whether the Agent is enabled.
///
@@ -321,27 +321,29 @@ pub struct LanguageModelSelection {
pub struct LanguageModelProviderSetting(pub String);
impl JsonSchema for LanguageModelProviderSetting {
fn schema_name() -> Cow<'static, str> {
fn schema_name() -> String {
"LanguageModelProviderSetting".into()
}
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
json_schema!({
"enum": [
"anthropic",
"amazon-bedrock",
"google",
"lmstudio",
"ollama",
"openai",
"zed.dev",
"copilot_chat",
"deepseek",
"openrouter",
"mistral",
"vercel"
]
})
fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
schemars::schema::SchemaObject {
enum_values: Some(vec![
"anthropic".into(),
"amazon-bedrock".into(),
"google".into(),
"lmstudio".into(),
"ollama".into(),
"openai".into(),
"zed.dev".into(),
"copilot_chat".into(),
"deepseek".into(),
"openrouter".into(),
"mistral".into(),
"vercel".into(),
]),
..Default::default()
}
.into()
}
}
@@ -357,6 +359,15 @@ impl From<&str> for LanguageModelProviderSetting {
}
}
impl Default for LanguageModelSelection {
fn default() -> Self {
Self {
provider: LanguageModelProviderSetting("openai".to_string()),
model: "gpt-4".to_string(),
}
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
pub struct AgentProfileContent {
pub name: Arc<str>,
@@ -400,10 +411,7 @@ impl Settings for AgentSettings {
&mut settings.default_height,
value.default_height.map(Into::into),
);
settings.default_model = value
.default_model
.clone()
.or(settings.default_model.take());
merge(&mut settings.default_model, value.default_model.clone());
settings.inline_assistant_model = value
.inline_assistant_model
.clone()

File diff suppressed because it is too large Load Diff

View File

@@ -16,9 +16,7 @@ use gpui::{
Focusable, ScrollHandle, Subscription, Task, Transformation, WeakEntity, percentage,
};
use language::LanguageRegistry;
use language_model::{
LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
};
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
use notifications::status_toast::{StatusToast, ToastIcon};
use project::{
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
@@ -88,14 +86,6 @@ impl AgentConfiguration {
let scroll_handle = ScrollHandle::new();
let scrollbar_state = ScrollbarState::new(scroll_handle.clone());
let mut expanded_provider_configurations = HashMap::default();
if LanguageModelRegistry::read_global(cx)
.provider(&ZED_CLOUD_PROVIDER_ID)
.map_or(false, |cloud_provider| cloud_provider.must_accept_terms(cx))
{
expanded_provider_configurations.insert(ZED_CLOUD_PROVIDER_ID, true);
}
let mut this = Self {
fs,
language_registry,
@@ -104,7 +94,7 @@ impl AgentConfiguration {
configuration_views_by_provider: HashMap::default(),
context_server_store,
expanded_context_server_tools: HashMap::default(),
expanded_provider_configurations,
expanded_provider_configurations: HashMap::default(),
tools,
_registry_subscription: registry_subscription,
scroll_handle,

View File

@@ -180,7 +180,7 @@ impl ConfigurationSource {
}
fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand)>) -> String {
let (name, command, args, env) = match existing {
let (name, path, args, env) = match existing {
Some((id, cmd)) => {
let args = serde_json::to_string(&cmd.args).unwrap();
let env = serde_json::to_string(&cmd.env.unwrap_or_default()).unwrap();
@@ -198,12 +198,14 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand)
r#"{{
/// The name of your MCP server
"{name}": {{
/// The command which runs the MCP server
"command": "{command}",
/// The arguments to pass to the MCP server
"args": {args},
/// The environment variables to set
"env": {env}
"command": {{
/// The path to the executable
"path": "{path}",
/// The arguments to pass to the executable
"args": {args},
/// The environment variables to set for the executable
"env": {env}
}}
}}
}}"#
)
@@ -437,7 +439,8 @@ fn parse_input(text: &str) -> Result<(ContextServerId, ContextServerCommand)> {
let object = value.as_object().context("Expected object")?;
anyhow::ensure!(object.len() == 1, "Expected exactly one key-value pair");
let (context_server_name, value) = object.into_iter().next().unwrap();
let command: ContextServerCommand = serde_json::from_value(value.clone())?;
let command = value.get("command").context("Expected command")?;
let command: ContextServerCommand = serde_json::from_value(command.clone())?;
Ok((ContextServerId(context_server_name.clone().into()), command))
}

View File

@@ -211,7 +211,7 @@ impl AgentDiffPane {
}
fn update_title(&mut self, cx: &mut Context<Self>) {
let new_title = self.thread.read(cx).summary().unwrap_or("Agent Changes");
let new_title = self.thread.read(cx).title().unwrap_or("Agent Changes");
if new_title != self.title {
self.title = new_title;
cx.emit(EditorEvent::TitleChanged);
@@ -461,7 +461,7 @@ impl Item for AgentDiffPane {
}
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
let summary = self.thread.read(cx).summary().unwrap_or("Agent Changes");
let summary = self.thread.read(cx).title().unwrap_or("Agent Changes");
Label::new(format!("Review: {}", summary))
.color(if params.selected {
Color::Default
@@ -1369,8 +1369,6 @@ impl AgentDiff {
| ThreadEvent::MessageDeleted(_)
| ThreadEvent::SummaryGenerated
| ThreadEvent::SummaryChanged
| ThreadEvent::UsePendingTools { .. }
| ThreadEvent::ToolFinished { .. }
| ThreadEvent::CheckpointChanged
| ThreadEvent::ToolConfirmationNeeded
| ThreadEvent::ToolUseLimitReached
@@ -1801,7 +1799,10 @@ mod tests {
})
.await
.unwrap();
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
let thread = thread_store
.update(cx, |store, cx| store.create_thread(cx))
.await
.unwrap();
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
let (workspace, cx) =
@@ -1966,7 +1967,10 @@ mod tests {
})
.await
.unwrap();
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
let thread = thread_store
.update(cx, |store, cx| store.create_thread(cx))
.await
.unwrap();
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
let (workspace, cx) =

View File

@@ -45,7 +45,7 @@ impl AgentModelSelector {
let registry = LanguageModelRegistry::read_global(cx);
if let Some(provider) = registry.provider(&model.provider_id())
{
thread.set_configured_model(
thread.set_model(
Some(ConfiguredModel {
provider,
model: model.clone(),

View File

@@ -26,7 +26,7 @@ use crate::{
ui::AgentOnboardingModal,
};
use agent::{
Thread, ThreadError, ThreadEvent, ThreadId, ThreadSummary, TokenUsageRatio,
Thread, ThreadError, ThreadEvent, ThreadId, ThreadTitle, TokenUsageRatio,
context_store::ContextStore,
history_store::{HistoryEntryId, HistoryStore},
thread_store::{TextThreadStore, ThreadStore},
@@ -41,7 +41,7 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use fs::Fs;
use gpui::{
Action, Animation, AnimationExt as _, AnyElement, App, AsyncWindowContext, ClipboardItem,
Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, Hsla,
Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, FontWeight,
KeyContext, Pixels, Subscription, Task, UpdateGlobal, WeakEntity, linear_color_stop,
linear_gradient, prelude::*, pulsating_between,
};
@@ -59,7 +59,7 @@ use theme::ThemeSettings;
use time::UtcOffset;
use ui::utils::WithRemSize;
use ui::{
Banner, Callout, CheckboxWithLabel, ContextMenu, ElevationIndex, KeyBinding, PopoverMenu,
Banner, CheckboxWithLabel, ContextMenu, ElevationIndex, KeyBinding, PopoverMenu,
PopoverMenuHandle, ProgressBar, Tab, Tooltip, Vector, VectorName, prelude::*,
};
use util::ResultExt as _;
@@ -72,7 +72,7 @@ use zed_actions::{
agent::{OpenConfiguration, OpenOnboardingModal, ResetOnboarding},
assistant::{OpenRulesLibrary, ToggleFocus},
};
use zed_llm_client::{CompletionIntent, UsageLimit};
use zed_llm_client::UsageLimit;
const AGENT_PANEL_KEY: &str = "agent_panel";
@@ -252,7 +252,7 @@ impl ActiveView {
thread.update(cx, |thread, cx| {
thread.thread().update(cx, |thread, cx| {
thread.set_summary(new_summary, cx);
thread.set_title(new_summary, cx);
});
})
}
@@ -278,7 +278,7 @@ impl ActiveView {
let editor = editor.clone();
move |_, thread, event, window, cx| match event {
ThreadEvent::SummaryGenerated => {
let summary = thread.read(cx).summary().or_default();
let summary = thread.read(cx).title().or_default();
editor.update(cx, |editor, cx| {
editor.set_text(summary, window, cx);
@@ -492,10 +492,15 @@ impl AgentPanel {
None
};
let thread = thread_store
.update(cx, |this, cx| this.create_thread(cx))?
.await?;
let panel = workspace.update_in(cx, |workspace, window, cx| {
let panel = cx.new(|cx| {
Self::new(
workspace,
thread,
thread_store,
context_store,
prompt_store,
@@ -518,13 +523,13 @@ impl AgentPanel {
fn new(
workspace: &Workspace,
thread: Entity<Thread>,
thread_store: Entity<ThreadStore>,
context_store: Entity<TextThreadStore>,
prompt_store: Option<Entity<PromptStore>>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
let fs = workspace.app_state().fs.clone();
let user_store = workspace.app_state().user_store.clone();
let project = workspace.project();
@@ -647,11 +652,12 @@ impl AgentPanel {
|this, _, event: &language_model::Event, cx| match event {
language_model::Event::DefaultModelChanged => match &this.active_view {
ActiveView::Thread { thread, .. } => {
thread
.read(cx)
.thread()
.clone()
.update(cx, |thread, cx| thread.get_or_init_configured_model(cx));
// todo!(do we need this?);
// thread
// .read(cx)
// .thread()
// .clone()
// .update(cx, |thread, cx| thread.get_or_init_configured_model(cx));
}
ActiveView::TextThread { .. }
| ActiveView::History
@@ -784,46 +790,61 @@ impl AgentPanel {
.detach_and_log_err(cx);
}
let active_thread = cx.new(|cx| {
ActiveThread::new(
thread.clone(),
self.thread_store.clone(),
self.context_store.clone(),
context_store.clone(),
self.language_registry.clone(),
self.workspace.clone(),
window,
cx,
)
});
let fs = self.fs.clone();
let user_store = self.user_store.clone();
let thread_store = self.thread_store.clone();
let text_thread_store = self.context_store.clone();
let prompt_store = self.prompt_store.clone();
let language_registry = self.language_registry.clone();
let workspace = self.workspace.clone();
let message_editor = cx.new(|cx| {
MessageEditor::new(
self.fs.clone(),
self.workspace.clone(),
self.user_store.clone(),
context_store.clone(),
self.prompt_store.clone(),
self.thread_store.downgrade(),
self.context_store.downgrade(),
thread.clone(),
window,
cx,
)
});
cx.spawn_in(window, async move |this, cx| {
let thread = thread.await?;
let active_thread = cx.new_window_entity(|window, cx| {
ActiveThread::new(
thread.clone(),
thread_store.clone(),
text_thread_store.clone(),
context_store.clone(),
language_registry.clone(),
workspace.clone(),
window,
cx,
)
})?;
if let Some(text) = preserved_text {
message_editor.update(cx, |editor, cx| {
editor.set_text(text, window, cx);
});
}
let message_editor = cx.new_window_entity(|window, cx| {
MessageEditor::new(
fs.clone(),
workspace.clone(),
user_store.clone(),
context_store.clone(),
prompt_store.clone(),
thread_store.downgrade(),
text_thread_store.downgrade(),
thread.clone(),
window,
cx,
)
})?;
message_editor.focus_handle(cx).focus(window);
if let Some(text) = preserved_text {
message_editor.update_in(cx, |editor, window, cx| {
editor.set_text(text, window, cx);
});
}
let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
self.set_active_view(thread_view, window, cx);
this.update_in(cx, |this, window, cx| {
message_editor.focus_handle(cx).focus(window);
AgentDiff::set_active_thread(&self.workspace, &thread, window, cx);
let thread_view =
ActiveView::thread(active_thread.clone(), message_editor, window, cx);
this.set_active_view(thread_view, window, cx);
AgentDiff::set_active_thread(&this.workspace, &thread, window, cx);
})
})
.detach_and_log_err(cx);
}
fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -1254,23 +1275,11 @@ impl AgentPanel {
return;
}
let model = thread_state.configured_model().map(|cm| cm.model.clone());
if let Some(model) = model {
thread.update(cx, |active_thread, cx| {
active_thread.thread().update(cx, |thread, cx| {
thread.insert_invisible_continue_message(cx);
thread.advance_prompt_id();
thread.send_to_model(
model,
CompletionIntent::UserPrompt,
Some(window.window_handle()),
cx,
);
});
});
} else {
log::warn!("No configured model available for continuation");
}
thread.update(cx, |active_thread, cx| {
active_thread
.thread()
.update(cx, |thread, cx| thread.resume(window, cx))
});
}
fn toggle_burn_mode(
@@ -1552,24 +1561,24 @@ impl AgentPanel {
let state = {
let active_thread = active_thread.read(cx);
if active_thread.is_empty() {
&ThreadSummary::Pending
&ThreadTitle::Pending
} else {
active_thread.summary(cx)
}
};
match state {
ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT.clone())
ThreadTitle::Pending => Label::new(ThreadTitle::DEFAULT.clone())
.truncate()
.into_any_element(),
ThreadSummary::Generating => Label::new(LOADING_SUMMARY_PLACEHOLDER)
ThreadTitle::Generating => Label::new(LOADING_SUMMARY_PLACEHOLDER)
.truncate()
.into_any_element(),
ThreadSummary::Ready(_) => div()
ThreadTitle::Ready(_) => div()
.w_full()
.child(change_title_editor.clone())
.into_any_element(),
ThreadSummary::Error => h_flex()
ThreadTitle::Error => h_flex()
.w_full()
.child(change_title_editor.clone())
.child(
@@ -2024,8 +2033,10 @@ impl AgentPanel {
.read(cx)
.thread()
.read(cx)
.configured_model()
.map_or(false, |model| model.provider.id() == ZED_CLOUD_PROVIDER_ID);
.model()
.map_or(false, |model| {
model.provider.id().0 == ZED_CLOUD_PROVIDER_ID
});
if !is_using_zed_provider {
return false;
@@ -2598,7 +2609,7 @@ impl AgentPanel {
Some(ConfigurationError::ProviderPendingTermsAcceptance(provider)) => {
parent.child(Banner::new().severity(ui::Severity::Warning).child(
h_flex().w_full().children(provider.render_accept_terms(
LanguageModelProviderTosView::ThreadEmptyState,
LanguageModelProviderTosView::ThreadtEmptyState,
cx,
)),
))
@@ -2627,7 +2638,7 @@ impl AgentPanel {
return None;
}
let model = thread.configured_model()?.model;
let model = thread.model()?.model;
let focus_handle = self.focus_handle(cx);
@@ -2689,90 +2700,58 @@ impl AgentPanel {
Some(div().px_2().pb_2().child(banner).into_any_element())
}
fn create_copy_button(&self, message: impl Into<String>) -> impl IntoElement {
let message = message.into();
IconButton::new("copy", IconName::Copy)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.tooltip(Tooltip::text("Copy Error Message"))
.on_click(move |_, _, cx| {
cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
})
}
fn dismiss_error_button(
&self,
thread: &Entity<ActiveThread>,
cx: &mut Context<Self>,
) -> impl IntoElement {
IconButton::new("dismiss", IconName::Close)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.tooltip(Tooltip::text("Dismiss Error"))
.on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.notify();
}
}))
}
fn upgrade_button(
&self,
thread: &Entity<ActiveThread>,
cx: &mut Context<Self>,
) -> impl IntoElement {
Button::new("upgrade", "Upgrade")
.label_size(LabelSize::Small)
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.open_url(&zed_urls::account_url(cx));
cx.notify();
}
}))
}
fn error_callout_bg(&self, cx: &Context<Self>) -> Hsla {
cx.theme().status().error.opacity(0.08)
}
fn render_payment_required_error(
&self,
thread: &Entity<ActiveThread>,
cx: &mut Context<Self>,
) -> AnyElement {
const ERROR_MESSAGE: &str =
"You reached your free usage limit. Upgrade to Zed Pro for more prompts.";
const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used.";
let icon = Icon::new(IconName::XCircle)
.size(IconSize::Small)
.color(Color::Error);
div()
.border_t_1()
.border_color(cx.theme().colors().border)
v_flex()
.gap_0p5()
.child(
Callout::new()
.icon(icon)
.title("Free Usage Exceeded")
.description(ERROR_MESSAGE)
.tertiary_action(self.upgrade_button(thread, cx))
.secondary_action(self.create_copy_button(ERROR_MESSAGE))
.primary_action(self.dismiss_error_button(thread, cx))
.bg_color(self.error_callout_bg(cx)),
h_flex()
.gap_1p5()
.items_center()
.child(Icon::new(IconName::XCircle).color(Color::Error))
.child(Label::new("Free Usage Exceeded").weight(FontWeight::MEDIUM)),
)
.into_any_element()
.child(
div()
.id("error-message")
.max_h_24()
.overflow_y_scroll()
.child(Label::new(ERROR_MESSAGE)),
)
.child(
h_flex()
.justify_end()
.mt_1()
.gap_1()
.child(self.create_copy_button(ERROR_MESSAGE))
.child(Button::new("subscribe", "Subscribe").on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.open_url(&zed_urls::account_url(cx));
cx.notify();
}
})))
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.notify();
}
}))),
)
.into_any()
}
fn render_model_request_limit_reached_error(
@@ -2782,28 +2761,67 @@ impl AgentPanel {
cx: &mut Context<Self>,
) -> AnyElement {
let error_message = match plan {
Plan::ZedPro => "Upgrade to usage-based billing for more prompts.",
Plan::ZedProTrial | Plan::Free => "Upgrade to Zed Pro for more prompts.",
Plan::ZedPro => {
"Model request limit reached. Upgrade to usage-based billing for more requests."
}
Plan::ZedProTrial => {
"Model request limit reached. Upgrade to Zed Pro for more requests."
}
Plan::Free => "Model request limit reached. Upgrade to Zed Pro for more requests.",
};
let call_to_action = match plan {
Plan::ZedPro => "Upgrade to usage-based billing",
Plan::ZedProTrial => "Upgrade to Zed Pro",
Plan::Free => "Upgrade to Zed Pro",
};
let icon = Icon::new(IconName::XCircle)
.size(IconSize::Small)
.color(Color::Error);
div()
.border_t_1()
.border_color(cx.theme().colors().border)
v_flex()
.gap_0p5()
.child(
Callout::new()
.icon(icon)
.title("Model Prompt Limit Reached")
.description(error_message)
.tertiary_action(self.upgrade_button(thread, cx))
.secondary_action(self.create_copy_button(error_message))
.primary_action(self.dismiss_error_button(thread, cx))
.bg_color(self.error_callout_bg(cx)),
h_flex()
.gap_1p5()
.items_center()
.child(Icon::new(IconName::XCircle).color(Color::Error))
.child(Label::new("Model Request Limit Reached").weight(FontWeight::MEDIUM)),
)
.into_any_element()
.child(
div()
.id("error-message")
.max_h_24()
.overflow_y_scroll()
.child(Label::new(error_message)),
)
.child(
h_flex()
.justify_end()
.mt_1()
.gap_1()
.child(self.create_copy_button(error_message))
.child(
Button::new("subscribe", call_to_action).on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.open_url(&zed_urls::account_url(cx));
cx.notify();
}
})),
)
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.notify();
}
}))),
)
.into_any()
}
fn render_error_message(
@@ -2814,24 +2832,40 @@ impl AgentPanel {
cx: &mut Context<Self>,
) -> AnyElement {
let message_with_header = format!("{}\n{}", header, message);
let icon = Icon::new(IconName::XCircle)
.size(IconSize::Small)
.color(Color::Error);
div()
.border_t_1()
.border_color(cx.theme().colors().border)
v_flex()
.gap_0p5()
.child(
Callout::new()
.icon(icon)
.title(header)
.description(message.clone())
.primary_action(self.dismiss_error_button(thread, cx))
.secondary_action(self.create_copy_button(message_with_header))
.bg_color(self.error_callout_bg(cx)),
h_flex()
.gap_1p5()
.items_center()
.child(Icon::new(IconName::XCircle).color(Color::Error))
.child(Label::new(header).weight(FontWeight::MEDIUM)),
)
.into_any_element()
.child(
div()
.id("error-message")
.max_h_32()
.overflow_y_scroll()
.child(Label::new(message.clone())),
)
.child(
h_flex()
.justify_end()
.mt_1()
.gap_1()
.child(self.create_copy_button(message_with_header))
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener({
let thread = thread.clone();
move |_, _, _, cx| {
thread.update(cx, |this, _cx| {
this.clear_last_error();
});
cx.notify();
}
}))),
)
.into_any()
}
fn render_prompt_editor(
@@ -2976,6 +3010,15 @@ impl AgentPanel {
}
}
fn create_copy_button(&self, message: impl Into<String>) -> impl IntoElement {
let message = message.into();
IconButton::new("copy", IconName::Copy)
.on_click(move |_, _, cx| {
cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
})
.tooltip(Tooltip::text("Copy Error Message"))
}
fn key_context(&self) -> KeyContext {
let mut key_context = KeyContext::new_with_defaults();
key_context.add("AgentPanel");
@@ -3057,9 +3100,18 @@ impl Render for AgentPanel {
thread.clone().into_any_element()
})
.children(self.render_tool_use_limit_reached(window, cx))
.child(h_flex().child(message_editor.clone()))
.when_some(thread.read(cx).last_error(), |this, last_error| {
this.child(
div()
.absolute()
.right_3()
.bottom_12()
.max_w_96()
.py_2()
.px_3()
.elevation_2(cx)
.occlude()
.child(match last_error {
ThreadError::PaymentRequired => {
self.render_payment_required_error(thread, cx)
@@ -3073,7 +3125,6 @@ impl Render for AgentPanel {
.into_any(),
)
})
.child(h_flex().child(message_editor.clone()))
.child(self.render_drag_target(cx)),
ActiveView::History => parent.child(self.history.clone()),
ActiveView::TextThread {

View File

@@ -92,7 +92,6 @@ actions!(
#[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
#[serde(deny_unknown_fields)]
pub struct NewThread {
#[serde(default)]
from_thread_id: Option<ThreadId>,
@@ -100,7 +99,6 @@ pub struct NewThread {
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
#[serde(deny_unknown_fields)]
pub struct ManageProfiles {
#[serde(default)]
pub customize_tools: Option<AgentProfileId>,
@@ -123,7 +121,7 @@ pub(crate) enum ModelUsageContext {
impl ModelUsageContext {
pub fn configured_model(&self, cx: &App) -> Option<ConfiguredModel> {
match self {
Self::Thread(thread) => thread.read(cx).configured_model(),
Self::Thread(thread) => thread.read(cx).model(),
Self::InlineAssistant => {
LanguageModelRegistry::read_global(cx).inline_assistant_model()
}
@@ -211,7 +209,7 @@ fn update_active_language_model_from_settings(cx: &mut App) {
}
}
let default = settings.default_model.as_ref().map(to_selected_model);
let default = to_selected_model(&settings.default_model);
let inline_assistant = settings
.inline_assistant_model
.as_ref()
@@ -231,7 +229,7 @@ fn update_active_language_model_from_settings(cx: &mut App) {
.collect::<Vec<_>>();
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.select_default_model(default.as_ref(), cx);
registry.select_default_model(Some(&default), cx);
registry.select_inline_assistant_model(inline_assistant.as_ref(), cx);
registry.select_commit_message_model(commit_message.as_ref(), cx);
registry.select_thread_summary_model(thread_summary.as_ref(), cx);

View File

@@ -670,7 +670,7 @@ fn recent_context_picker_entries(
let mut threads = unordered_thread_entries(thread_store, text_thread_store, cx)
.filter(|(_, thread)| match thread {
ThreadContextEntry::Thread { id, .. } => {
Some(id) != active_thread_id && !current_threads.contains(id)
Some(id) != active_thread_id.as_ref() && !current_threads.contains(id)
}
ThreadContextEntry::Context { .. } => true,
})

View File

@@ -169,13 +169,13 @@ impl ContextStrip {
if self
.context_store
.read(cx)
.includes_thread(active_thread.id())
.includes_thread(&active_thread.id())
{
return None;
}
Some(SuggestedContext::Thread {
name: active_thread.summary().or_default(),
name: active_thread.title().or_default(),
thread: weak_active_thread,
})
} else if let Some(active_context_editor) = panel.active_context_editor() {

View File

@@ -399,7 +399,7 @@ impl PickerDelegate for LanguageModelPickerDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let all_models = self.all_models.clone();
let active_model = (self.get_active_model)(cx);
let current_index = self.selected_index;
let bg_executor = cx.background_executor();
let language_model_registry = LanguageModelRegistry::global(cx);
@@ -441,9 +441,12 @@ impl PickerDelegate for LanguageModelPickerDelegate {
cx.spawn_in(window, async move |this, cx| {
this.update_in(cx, |this, window, cx| {
this.delegate.filtered_entries = filtered_models.entries();
// Finds the currently selected model in the list
let new_index =
Self::get_active_model_index(&this.delegate.filtered_entries, active_model);
// Preserve selection focus
let new_index = if current_index >= this.delegate.filtered_entries.len() {
0
} else {
current_index
};
this.set_selected_index(new_index, Some(picker::Direction::Down), true, window, cx);
cx.notify();
})

View File

@@ -387,25 +387,14 @@ impl MessageEditor {
thread
.update(cx, |thread, cx| {
thread.insert_user_message(
user_message,
loaded_context,
checkpoint.ok(),
user_message_creases,
cx,
);
})
.log_err();
thread
.update(cx, |thread, cx| {
thread.advance_prompt_id();
thread.send_to_model(
model,
CompletionIntent::UserPrompt,
Some(window_handle),
cx,
);
todo!();
// thread.send(
// user_message,
// loaded_context,
// checkpoint.ok(),
// user_message_creases,
// cx,
// );
})
.log_err();
})
@@ -1250,7 +1239,9 @@ impl MessageEditor {
self.thread
.read(cx)
.configured_model()
.map_or(false, |model| model.provider.id() == ZED_CLOUD_PROVIDER_ID)
.map_or(false, |model| {
model.provider.id().0 == ZED_CLOUD_PROVIDER_ID
})
}
fn render_usage_callout(&self, line_height: Pixels, cx: &mut Context<Self>) -> Option<Div> {

View File

@@ -156,7 +156,7 @@ impl Render for ProfileSelector {
.map(|profile| profile.name.clone())
.unwrap_or_else(|| "Unknown".into());
let configured_model = self.thread.read(cx).configured_model().or_else(|| {
let configured_model = self.thread.read(cx).model().or_else(|| {
let model_registry = LanguageModelRegistry::read_global(cx);
model_registry.default_model()
});

View File

@@ -498,7 +498,7 @@ impl AddedContext {
render_hover: {
let thread = handle.thread.clone();
Some(Rc::new(move |_, cx| {
let text = thread.read(cx).latest_detailed_summary_or_text();
let text = thread.read(cx).latest_detailed_summary_or_text(cx);
ContextPillHover::new_text(text.clone(), cx).into()
}))
},

View File

@@ -6,7 +6,7 @@ use anyhow::{Context as _, Result, anyhow};
use chrono::{DateTime, Utc};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use http_client::http::{self, HeaderMap, HeaderValue};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, StatusCode};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use serde::{Deserialize, Serialize};
use strum::{EnumIter, EnumString};
use thiserror::Error;
@@ -356,7 +356,7 @@ pub async fn complete(
.send(request)
.await
.map_err(AnthropicError::HttpSend)?;
let status_code = response.status();
let status = response.status();
let mut body = String::new();
response
.body_mut()
@@ -364,12 +364,12 @@ pub async fn complete(
.await
.map_err(AnthropicError::ReadResponse)?;
if status_code.is_success() {
if status.is_success() {
Ok(serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse)?)
} else {
Err(AnthropicError::HttpResponseError {
status_code,
message: body,
status: status.as_u16(),
body,
})
}
}
@@ -444,7 +444,11 @@ impl RateLimitInfo {
}
Self {
retry_after: parse_retry_after(headers),
retry_after: headers
.get("retry-after")
.and_then(|v| v.to_str().ok())
.and_then(|v| v.parse::<u64>().ok())
.map(Duration::from_secs),
requests: RateLimit::from_headers("requests", headers).ok(),
tokens: RateLimit::from_headers("tokens", headers).ok(),
input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
@@ -453,17 +457,6 @@ impl RateLimitInfo {
}
}
/// Parses the Retry-After header value as an integer number of seconds (anthropic always uses
/// seconds). Note that other services might specify an HTTP date or some other format for this
/// header. Returns `None` if the header is not present or cannot be parsed.
pub fn parse_retry_after(headers: &HeaderMap<HeaderValue>) -> Option<Duration> {
headers
.get("retry-after")
.and_then(|v| v.to_str().ok())
.and_then(|v| v.parse::<u64>().ok())
.map(Duration::from_secs)
}
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> anyhow::Result<&'a str> {
Ok(headers
.get(key)
@@ -527,10 +520,6 @@ pub async fn stream_completion_with_rate_limit_info(
})
.boxed();
Ok((stream, Some(rate_limits)))
} else if response.status().as_u16() == 529 {
Err(AnthropicError::ServerOverloaded {
retry_after: rate_limits.retry_after,
})
} else if let Some(retry_after) = rate_limits.retry_after {
Err(AnthropicError::RateLimit { retry_after })
} else {
@@ -543,9 +532,10 @@ pub async fn stream_completion_with_rate_limit_info(
match serde_json::from_str::<Event>(&body) {
Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
Ok(_) | Err(_) => Err(AnthropicError::HttpResponseError {
status_code: response.status(),
message: body,
Ok(_) => Err(AnthropicError::UnexpectedResponseFormat(body)),
Err(_) => Err(AnthropicError::HttpResponseError {
status: response.status().as_u16(),
body: body,
}),
}
}
@@ -811,19 +801,16 @@ pub enum AnthropicError {
ReadResponse(io::Error),
/// HTTP error response from the API
HttpResponseError {
status_code: StatusCode,
message: String,
},
HttpResponseError { status: u16, body: String },
/// Rate limit exceeded
RateLimit { retry_after: Duration },
/// Server overloaded
ServerOverloaded { retry_after: Option<Duration> },
/// API returned an error response
ApiError(ApiError),
/// Unexpected response format
UnexpectedResponseFormat(String),
}
#[derive(Debug, Serialize, Deserialize, Error)]

View File

@@ -2140,8 +2140,7 @@ impl AssistantContext {
);
}
LanguageModelCompletionEvent::ToolUse(_) |
LanguageModelCompletionEvent::ToolUseJsonParseError { .. } |
LanguageModelCompletionEvent::UsageUpdate(_) => {}
LanguageModelCompletionEvent::UsageUpdate(_) => {}
}
});

View File

@@ -46,7 +46,6 @@ pub use find_path_tool::FindPathToolInput;
pub use grep_tool::{GrepTool, GrepToolInput};
pub use open_tool::OpenTool;
pub use read_file_tool::{ReadFileTool, ReadFileToolInput};
pub use schema::root_schema_for;
pub use terminal_tool::TerminalTool;
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {

View File

@@ -29,7 +29,6 @@ use std::{
path::Path,
str::FromStr,
sync::mpsc,
time::Duration,
};
use util::path;
@@ -1659,14 +1658,12 @@ async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) ->
match request().await {
Ok(result) => return Ok(result),
Err(err) => match err.downcast::<LanguageModelCompletionError>() {
Ok(err) => match &err {
LanguageModelCompletionError::RateLimitExceeded { retry_after, .. }
| LanguageModelCompletionError::ServerOverloaded { retry_after, .. } => {
let retry_after = retry_after.unwrap_or(Duration::from_secs(5));
Ok(err) => match err {
LanguageModelCompletionError::RateLimitExceeded { retry_after } => {
// Wait for the duration supplied, with some jitter to avoid all requests being made at the same time.
let jitter = retry_after.mul_f64(rand::thread_rng().gen_range(0.0..1.0));
eprintln!(
"Attempt #{attempt}: {err}. Retry after {retry_after:?} + jitter of {jitter:?}"
"Attempt #{attempt}: Rate limit exceeded. Retry after {retry_after:?} + jitter of {jitter:?}"
);
Timer::after(retry_after + jitter).await;
continue;

View File

@@ -1,328 +0,0 @@
use crate::commit::get_messages;
use crate::{GitRemote, Oid};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use futures::AsyncWriteExt;
use gpui::SharedString;
use serde::{Deserialize, Serialize};
use std::process::Stdio;
use std::{ops::Range, path::Path};
use text::Rope;
use time::OffsetDateTime;
use time::UtcOffset;
use time::macros::format_description;
pub use git2 as libgit;
#[derive(Debug, Clone, Default)]
pub struct Blame {
pub entries: Vec<BlameEntry>,
pub messages: HashMap<Oid, String>,
pub remote_url: Option<String>,
}
#[derive(Clone, Debug, Default)]
pub struct ParsedCommitMessage {
pub message: SharedString,
pub permalink: Option<url::Url>,
pub pull_request: Option<crate::hosting_provider::PullRequest>,
pub remote: Option<GitRemote>,
}
impl Blame {
pub async fn for_path(
git_binary: &Path,
working_directory: &Path,
path: &Path,
content: &Rope,
remote_url: Option<String>,
) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, content).await?;
let mut entries = parse_git_blame(&output)?;
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
let mut unique_shas = HashSet::default();
for entry in entries.iter_mut() {
unique_shas.insert(entry.sha);
}
let shas = unique_shas.into_iter().collect::<Vec<_>>();
let messages = get_messages(working_directory, &shas)
.await
.context("failed to get commit messages")?;
Ok(Self {
entries,
messages,
remote_url,
})
}
}
const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
pub struct BlameEntry {
pub sha: Oid,
pub range: Range<u32>,
pub original_line_number: u32,
pub author: Option<String>,
pub author_mail: Option<String>,
pub author_time: Option<i64>,
pub author_tz: Option<String>,
pub committer_name: Option<String>,
pub committer_email: Option<String>,
pub committer_time: Option<i64>,
pub committer_tz: Option<String>,
pub summary: Option<String>,
pub previous: Option<String>,
pub filename: String,
}
impl BlameEntry {
// Returns a BlameEntry by parsing the first line of a `git blame --incremental`
// entry. The line MUST have this format:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
fn new_from_blame_line(line: &str) -> Result<BlameEntry> {
let mut parts = line.split_whitespace();
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
.ok_or_else(|| anyhow!("failed to parse sha"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
let range = start_line..end_line;
Ok(Self {
sha,
range,
original_line_number,
..Default::default()
})
}
pub fn author_offset_date_time(&self) -> Result<time::OffsetDateTime> {
if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) {
let format = format_description!("[offset_hour][offset_minute]");
let offset = UtcOffset::parse(author_tz, &format)?;
let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?;
Ok(date_time_utc.to_offset(offset))
} else {
// Directly return current time in UTC if there's no committer time or timezone
Ok(time::OffsetDateTime::now_utc())
}
}
}
// parse_git_blame parses the output of `git blame --incremental`, which returns
// all the blame-entries for a given path incrementally, as it finds them.
//
// Each entry *always* starts with:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
//
// Each entry *always* ends with:
//
// filename <whitespace-quoted-filename-goes-here>
//
// Line numbers are 1-indexed.
//
// A `git blame --incremental` entry looks like this:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1
// author Joe Schmoe
// author-mail <joe.schmoe@example.com>
// author-time 1709741400
// author-tz +0100
// committer Joe Schmoe
// committer-mail <joe.schmoe@example.com>
// committer-time 1709741400
// committer-tz +0100
// summary Joe's cool commit
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// If the entry has the same SHA as an entry that was already printed then no
// signature information is printed:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html
fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
let mut entries: Vec<BlameEntry> = Vec::new();
let mut index: HashMap<Oid, usize> = HashMap::default();
let mut current_entry: Option<BlameEntry> = None;
for line in output.lines() {
let mut done = false;
match &mut current_entry {
None => {
let mut new_entry = BlameEntry::new_from_blame_line(line)?;
if let Some(existing_entry) = index
.get(&new_entry.sha)
.and_then(|slot| entries.get(*slot))
{
new_entry.author.clone_from(&existing_entry.author);
new_entry
.author_mail
.clone_from(&existing_entry.author_mail);
new_entry.author_time = existing_entry.author_time;
new_entry.author_tz.clone_from(&existing_entry.author_tz);
new_entry
.committer_name
.clone_from(&existing_entry.committer_name);
new_entry
.committer_email
.clone_from(&existing_entry.committer_email);
new_entry.committer_time = existing_entry.committer_time;
new_entry
.committer_tz
.clone_from(&existing_entry.committer_tz);
new_entry.summary.clone_from(&existing_entry.summary);
}
current_entry.replace(new_entry);
}
Some(entry) => {
let Some((key, value)) = line.split_once(' ') else {
continue;
};
let is_committed = !entry.sha.is_zero();
match key {
"filename" => {
entry.filename = value.into();
done = true;
}
"previous" => entry.previous = Some(value.into()),
"summary" if is_committed => entry.summary = Some(value.into()),
"author" if is_committed => entry.author = Some(value.into()),
"author-mail" if is_committed => entry.author_mail = Some(value.into()),
"author-time" if is_committed => {
entry.author_time = Some(value.parse::<i64>()?)
}
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
"committer" if is_committed => entry.committer_name = Some(value.into()),
"committer-mail" if is_committed => entry.committer_email = Some(value.into()),
"committer-time" if is_committed => {
entry.committer_time = Some(value.parse::<i64>()?)
}
"committer-tz" if is_committed => entry.committer_tz = Some(value.into()),
_ => {}
}
}
};
if done {
if let Some(entry) = current_entry.take() {
index.insert(entry.sha, entries.len());
// We only want annotations that have a commit.
if !entry.sha.is_zero() {
entries.push(entry);
}
}
}
}
Ok(entries)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::BlameEntry;
use super::parse_git_blame;
fn read_test_data(filename: &str) -> String {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push(filename);
std::fs::read_to_string(&path)
.unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path))
}
fn assert_eq_golden(entries: &Vec<BlameEntry>, golden_filename: &str) {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push("golden");
path.push(format!("{}.json", golden_filename));
let mut have_json =
serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON");
// We always want to save with a trailing newline.
have_json.push('\n');
let update = std::env::var("UPDATE_GOLDEN")
.map(|val| val.eq_ignore_ascii_case("true"))
.unwrap_or(false);
if update {
std::fs::create_dir_all(path.parent().unwrap())
.expect("could not create golden test data directory");
std::fs::write(&path, have_json).expect("could not write out golden data");
} else {
let want_json =
std::fs::read_to_string(&path).unwrap_or_else(|_| {
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
}).replace("\r\n", "\n");
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
}
}
#[test]
fn test_parse_git_blame_not_committed() {
let output = read_test_data("blame_incremental_not_committed");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_not_committed");
}
#[test]
fn test_parse_git_blame_simple() {
let output = read_test_data("blame_incremental_simple");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_simple");
}
#[test]
fn test_parse_git_blame_complex() {
let output = read_test_data("blame_incremental_complex");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_complex");
}
}

View File

@@ -1,374 +0,0 @@
use crate::commit::get_messages;
use crate::{GitRemote, Oid};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use futures::AsyncWriteExt;
use gpui::SharedString;
use serde::{Deserialize, Serialize};
use std::process::Stdio;
use std::{ops::Range, path::Path};
use text::Rope;
use time::OffsetDateTime;
use time::UtcOffset;
use time::macros::format_description;
pub use git2 as libgit;
#[derive(Debug, Clone, Default)]
pub struct Blame {
pub entries: Vec<BlameEntry>,
pub messages: HashMap<Oid, String>,
pub remote_url: Option<String>,
}
#[derive(Clone, Debug, Default)]
pub struct ParsedCommitMessage {
pub message: SharedString,
pub permalink: Option<url::Url>,
pub pull_request: Option<crate::hosting_provider::PullRequest>,
pub remote: Option<GitRemote>,
}
impl Blame {
pub async fn for_path(
git_binary: &Path,
working_directory: &Path,
path: &Path,
content: &Rope,
remote_url: Option<String>,
) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, content).await?;
let mut entries = parse_git_blame(&output)?;
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
let mut unique_shas = HashSet::default();
for entry in entries.iter_mut() {
unique_shas.insert(entry.sha);
}
let shas = unique_shas.into_iter().collect::<Vec<_>>();
let messages = get_messages(working_directory, &shas)
.await
.context("failed to get commit messages")?;
Ok(Self {
entries,
messages,
remote_url,
})
}
}
const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
async fn run_git_blame(
git_binary: &Path,
working_directory: &Path,
path: &Path,
contents: &Rope,
) -> Result<String> {
let mut child = util::command::new_smol_command(git_binary)
.current_dir(working_directory)
.arg("blame")
.arg("--incremental")
.arg("--contents")
.arg("-")
.arg(path.as_os_str())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
let stdin = child
.stdin
.as_mut()
.context("failed to get pipe to stdin of git blame command")?;
for chunk in contents.chunks() {
stdin.write_all(chunk.as_bytes()).await?;
}
stdin.flush().await?;
let output = child
.output()
.await
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let trimmed = stderr.trim();
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
return Ok(String::new());
}
return Err(anyhow!("git blame process failed: {}", stderr));
}
Ok(String::from_utf8(output.stdout)?)
}
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
pub struct BlameEntry {
pub sha: Oid,
pub range: Range<u32>,
pub original_line_number: u32,
pub author: Option<String>,
pub author_mail: Option<String>,
pub author_time: Option<i64>,
pub author_tz: Option<String>,
pub committer_name: Option<String>,
pub committer_email: Option<String>,
pub committer_time: Option<i64>,
pub committer_tz: Option<String>,
pub summary: Option<String>,
pub previous: Option<String>,
pub filename: String,
}
impl BlameEntry {
// Returns a BlameEntry by parsing the first line of a `git blame --incremental`
// entry. The line MUST have this format:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
fn new_from_blame_line(line: &str) -> Result<BlameEntry> {
let mut parts = line.split_whitespace();
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
.ok_or_else(|| anyhow!("failed to parse sha"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
let range = start_line..end_line;
Ok(Self {
sha,
range,
original_line_number,
..Default::default()
})
}
pub fn author_offset_date_time(&self) -> Result<time::OffsetDateTime> {
if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) {
let format = format_description!("[offset_hour][offset_minute]");
let offset = UtcOffset::parse(author_tz, &format)?;
let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?;
Ok(date_time_utc.to_offset(offset))
} else {
// Directly return current time in UTC if there's no committer time or timezone
Ok(time::OffsetDateTime::now_utc())
}
}
}
// parse_git_blame parses the output of `git blame --incremental`, which returns
// all the blame-entries for a given path incrementally, as it finds them.
//
// Each entry *always* starts with:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
//
// Each entry *always* ends with:
//
// filename <whitespace-quoted-filename-goes-here>
//
// Line numbers are 1-indexed.
//
// A `git blame --incremental` entry looks like this:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1
// author Joe Schmoe
// author-mail <joe.schmoe@example.com>
// author-time 1709741400
// author-tz +0100
// committer Joe Schmoe
// committer-mail <joe.schmoe@example.com>
// committer-time 1709741400
// committer-tz +0100
// summary Joe's cool commit
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// If the entry has the same SHA as an entry that was already printed then no
// signature information is printed:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html
fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
let mut entries: Vec<BlameEntry> = Vec::new();
let mut index: HashMap<Oid, usize> = HashMap::default();
let mut current_entry: Option<BlameEntry> = None;
for line in output.lines() {
let mut done = false;
match &mut current_entry {
None => {
let mut new_entry = BlameEntry::new_from_blame_line(line)?;
if let Some(existing_entry) = index
.get(&new_entry.sha)
.and_then(|slot| entries.get(*slot))
{
new_entry.author.clone_from(&existing_entry.author);
new_entry
.author_mail
.clone_from(&existing_entry.author_mail);
new_entry.author_time = existing_entry.author_time;
new_entry.author_tz.clone_from(&existing_entry.author_tz);
new_entry
.committer_name
.clone_from(&existing_entry.committer_name);
new_entry
.committer_email
.clone_from(&existing_entry.committer_email);
new_entry.committer_time = existing_entry.committer_time;
new_entry
.committer_tz
.clone_from(&existing_entry.committer_tz);
new_entry.summary.clone_from(&existing_entry.summary);
}
current_entry.replace(new_entry);
}
Some(entry) => {
let Some((key, value)) = line.split_once(' ') else {
continue;
};
let is_committed = !entry.sha.is_zero();
match key {
"filename" => {
entry.filename = value.into();
done = true;
}
"previous" => entry.previous = Some(value.into()),
"summary" if is_committed => entry.summary = Some(value.into()),
"author" if is_committed => entry.author = Some(value.into()),
"author-mail" if is_committed => entry.author_mail = Some(value.into()),
"author-time" if is_committed => {
entry.author_time = Some(value.parse::<i64>()?)
}
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
"committer" if is_committed => entry.committer_name = Some(value.into()),
"committer-mail" if is_committed => entry.committer_email = Some(value.into()),
"committer-time" if is_committed => {
entry.committer_time = Some(value.parse::<i64>()?)
}
"committer-tz" if is_committed => entry.committer_tz = Some(value.into()),
_ => {}
}
}
};
if done {
if let Some(entry) = current_entry.take() {
index.insert(entry.sha, entries.len());
// We only want annotations that have a commit.
if !entry.sha.is_zero() {
entries.push(entry);
}
}
}
}
Ok(entries)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::BlameEntry;
use super::parse_git_blame;
fn read_test_data(filename: &str) -> String {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push(filename);
std::fs::read_to_string(&path)
.unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path))
}
fn assert_eq_golden(entries: &Vec<BlameEntry>, golden_filename: &str) {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push("golden");
path.push(format!("{}.json", golden_filename));
let mut have_json =
serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON");
// We always want to save with a trailing newline.
have_json.push('\n');
let update = std::env::var("UPDATE_GOLDEN")
.map(|val| val.eq_ignore_ascii_case("true"))
.unwrap_or(false);
if update {
std::fs::create_dir_all(path.parent().unwrap())
.expect("could not create golden test data directory");
std::fs::write(&path, have_json).expect("could not write out golden data");
} else {
let want_json =
std::fs::read_to_string(&path).unwrap_or_else(|_| {
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
}).replace("\r\n", "\n");
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
}
}
#[test]
fn test_parse_git_blame_not_committed() {
let output = read_test_data("blame_incremental_not_committed");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_not_committed");
}
#[test]
fn test_parse_git_blame_simple() {
let output = read_test_data("blame_incremental_simple");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_simple");
}
#[test]
fn test_parse_git_blame_complex() {
let output = read_test_data("blame_incremental_complex");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_complex");
}
}

View File

@@ -314,7 +314,7 @@ impl Tool for GrepTool {
mod tests {
use super::*;
use assistant_tool::Tool;
use gpui::{TestAppContext, UpdateGlobal};
use gpui::{AppContext, TestAppContext, UpdateGlobal};
use language::{Language, LanguageConfig, LanguageMatcher};
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project, WorktreeSettings};

View File

@@ -226,7 +226,7 @@ impl Tool for ListDirectoryTool {
mod tests {
use super::*;
use assistant_tool::Tool;
use gpui::{TestAppContext, UpdateGlobal};
use gpui::{AppContext, TestAppContext, UpdateGlobal};
use indoc::indoc;
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project, WorktreeSettings};

View File

@@ -289,7 +289,7 @@ impl Tool for ReadFileTool {
#[cfg(test)]
mod test {
use super::*;
use gpui::{TestAppContext, UpdateGlobal};
use gpui::{AppContext, TestAppContext, UpdateGlobal};
use language::{Language, LanguageConfig, LanguageMatcher};
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project, WorktreeSettings};

View File

@@ -1,9 +1,8 @@
use anyhow::Result;
use language_model::LanguageModelToolSchemaFormat;
use schemars::{
JsonSchema, Schema,
generate::SchemaSettings,
transform::{Transform, transform_subschemas},
JsonSchema,
schema::{RootSchema, Schema, SchemaObject},
};
pub fn json_schema_for<T: JsonSchema>(
@@ -14,7 +13,7 @@ pub fn json_schema_for<T: JsonSchema>(
}
fn schema_to_json(
schema: &Schema,
schema: &RootSchema,
format: LanguageModelToolSchemaFormat,
) -> Result<serde_json::Value> {
let mut value = serde_json::to_value(schema)?;
@@ -22,42 +21,58 @@ fn schema_to_json(
Ok(value)
}
pub fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> RootSchema {
let mut generator = match format {
LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
// TODO: Gemini docs mention using a subset of OpenAPI 3, so this may benefit from using
// `SchemaSettings::openapi3()`.
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::draft07()
.with(|settings| {
settings.meta_schema = None;
settings.inline_subschemas = true;
})
.with_transform(ToJsonSchemaSubsetTransform)
.into_generator(),
LanguageModelToolSchemaFormat::JsonSchema => schemars::SchemaGenerator::default(),
LanguageModelToolSchemaFormat::JsonSchemaSubset => {
schemars::r#gen::SchemaSettings::default()
.with(|settings| {
settings.meta_schema = None;
settings.inline_subschemas = true;
settings
.visitors
.push(Box::new(TransformToJsonSchemaSubsetVisitor));
})
.into_generator()
}
};
generator.root_schema_for::<T>()
}
#[derive(Debug, Clone)]
struct ToJsonSchemaSubsetTransform;
struct TransformToJsonSchemaSubsetVisitor;
impl Transform for ToJsonSchemaSubsetTransform {
fn transform(&mut self, schema: &mut Schema) {
impl schemars::visit::Visitor for TransformToJsonSchemaSubsetVisitor {
fn visit_root_schema(&mut self, root: &mut RootSchema) {
schemars::visit::visit_root_schema(self, root)
}
fn visit_schema(&mut self, schema: &mut Schema) {
schemars::visit::visit_schema(self, schema)
}
fn visit_schema_object(&mut self, schema: &mut SchemaObject) {
// Ensure that the type field is not an array, this happens when we use
// Option<T>, the type will be [T, "null"].
if let Some(type_field) = schema.get_mut("type") {
if let Some(types) = type_field.as_array() {
if let Some(first_type) = types.first() {
*type_field = first_type.clone();
if let Some(instance_type) = schema.instance_type.take() {
schema.instance_type = match instance_type {
schemars::schema::SingleOrVec::Single(t) => {
Some(schemars::schema::SingleOrVec::Single(t))
}
schemars::schema::SingleOrVec::Vec(items) => items
.into_iter()
.next()
.map(schemars::schema::SingleOrVec::from),
};
}
// One of is not supported, use anyOf instead.
if let Some(subschema) = schema.subschemas.as_mut() {
if let Some(one_of) = subschema.one_of.take() {
subschema.any_of = Some(one_of);
}
}
// oneOf is not supported, use anyOf instead
if let Some(one_of) = schema.remove("oneOf") {
schema.insert("anyOf".to_string(), one_of);
}
transform_subschemas(self, schema);
schemars::visit::visit_schema_object(self, schema)
}
}

View File

@@ -1,47 +0,0 @@
You are an expert text editor. Taking the following file as an input:
```{{path}}
{{file_content}}
```
Produce a series of edits following the given user instructions:
<user_instructions>
{{instructions}}
</user_instructions>
Your response must be a series of edits in the following format:
<edits>
<old_text>
OLD TEXT 1 HERE
</old_text>
<new_text>
NEW TEXT 1 HERE
</new_text>
<old_text>
OLD TEXT 2 HERE
</old_text>
<new_text>
NEW TEXT 2 HERE
</new_text>
<old_text>
OLD TEXT 3 HERE
</old_text>
<new_text>
NEW TEXT 3 HERE
</new_text>
</edits>
Rules for editing:
- `old_text` represents full lines (including indentation) in the input file that will be replaced with `new_text`
- It is crucial that `old_text` is unique and unambiguous.
- Always include enough context around the lines you want to replace in `old_text` such that it's impossible to mistake them for other lines.
- If you want to replace all occurrences, repeat the same `old_text`/`new_text` pair multiple times and I will apply them sequentially, one occurrence at a time.
- Don't explain why you made a change, just report the edits.
- Make sure you follow the instructions carefully and thoroughly, avoid doing *less* or *more* than instructed.
<edits>

View File

@@ -25,4 +25,5 @@ serde.workspace = true
serde_json.workspace = true
strum.workspace = true
thiserror.workspace = true
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
workspace-hack.workspace = true

View File

@@ -1,6 +1,9 @@
mod models;
use anyhow::{Context, Error, Result, anyhow};
use std::collections::HashMap;
use std::pin::Pin;
use anyhow::{Context as _, Error, Result, anyhow};
use aws_sdk_bedrockruntime as bedrock;
pub use aws_sdk_bedrockruntime as bedrock_client;
pub use aws_sdk_bedrockruntime::types::{
@@ -21,10 +24,9 @@ pub use bedrock::types::{
ToolResultContentBlock as BedrockToolResultContentBlock,
ToolResultStatus as BedrockToolResultStatus, ToolUseBlock as BedrockToolUseBlock,
};
use futures::stream::{self, BoxStream};
use futures::stream::{self, BoxStream, Stream};
use serde::{Deserialize, Serialize};
use serde_json::{Number, Value};
use std::collections::HashMap;
use thiserror::Error;
pub use crate::models::*;
@@ -32,59 +34,70 @@ pub use crate::models::*;
pub async fn stream_completion(
client: bedrock::Client,
request: Request,
handle: tokio::runtime::Handle,
) -> Result<BoxStream<'static, Result<BedrockStreamingResponse, BedrockError>>, Error> {
let mut response = bedrock::Client::converse_stream(&client)
.model_id(request.model.clone())
.set_messages(request.messages.into());
handle
.spawn(async move {
let mut response = bedrock::Client::converse_stream(&client)
.model_id(request.model.clone())
.set_messages(request.messages.into());
if let Some(Thinking::Enabled {
budget_tokens: Some(budget_tokens),
}) = request.thinking
{
let thinking_config = HashMap::from([
("type".to_string(), Document::String("enabled".to_string())),
(
"budget_tokens".to_string(),
Document::Number(AwsNumber::PosInt(budget_tokens)),
),
]);
response = response.additional_model_request_fields(Document::Object(HashMap::from([(
"thinking".to_string(),
Document::from(thinking_config),
)])));
}
if let Some(Thinking::Enabled {
budget_tokens: Some(budget_tokens),
}) = request.thinking
{
response =
response.additional_model_request_fields(Document::Object(HashMap::from([(
"thinking".to_string(),
Document::from(HashMap::from([
("type".to_string(), Document::String("enabled".to_string())),
(
"budget_tokens".to_string(),
Document::Number(AwsNumber::PosInt(budget_tokens)),
),
])),
)])));
}
if request
.tools
.as_ref()
.map_or(false, |t| !t.tools.is_empty())
{
response = response.set_tool_config(request.tools);
}
if request.tools.is_some() && !request.tools.as_ref().unwrap().tools.is_empty() {
response = response.set_tool_config(request.tools);
}
let output = response
.send()
.await
.context("Failed to send API request to Bedrock");
let response = response.send().await;
let stream = Box::pin(stream::unfold(
output?.stream,
move |mut stream| async move {
match stream.recv().await {
Ok(Some(output)) => Some((Ok(output), stream)),
Ok(None) => None,
Err(err) => Some((
Err(BedrockError::ClientError(anyhow!(
"{:?}",
aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
))),
stream,
match response {
Ok(output) => {
let stream: Pin<
Box<
dyn Stream<Item = Result<BedrockStreamingResponse, BedrockError>>
+ Send,
>,
> = Box::pin(stream::unfold(output.stream, |mut stream| async move {
match stream.recv().await {
Ok(Some(output)) => Some(({ Ok(output) }, stream)),
Ok(None) => None,
Err(err) => {
Some((
// TODO: Figure out how we can capture Throttling Exceptions
Err(BedrockError::ClientError(anyhow!(
"{:?}",
aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
))),
stream,
))
}
}
}));
Ok(stream)
}
Err(err) => Err(anyhow!(
"{:?}",
aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
)),
}
},
));
Ok(stream)
})
.await
.context("spawning a task")?
}
pub fn aws_document_to_value(document: &Document) -> Value {

View File

@@ -12,6 +12,7 @@ pub struct CallSettings {
/// Configuration of voice calls in Zed.
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
#[schemars(deny_unknown_fields)]
pub struct CallSettingsContent {
/// Whether the microphone should be muted when joining a channel or a call.
///

View File

@@ -22,7 +22,9 @@ use gpui::{
use language::{
Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig,
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
language_settings::{AllLanguageSettings, Formatter, PrettierSettings, SelectedFormatter},
language_settings::{
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
},
tree_sitter_rust, tree_sitter_typescript,
};
use lsp::{LanguageServerId, OneOf};
@@ -4589,13 +4591,15 @@ async fn test_formatting_buffer(
cx_a.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter =
Some(SelectedFormatter::List(vec![Formatter::External {
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
vec![Formatter::External {
command: "awk".into(),
arguments: Some(
vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
),
}]));
}]
.into(),
)));
});
});
});
@@ -4695,10 +4699,9 @@ async fn test_prettier_formatting_buffer(
cx_b.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter =
Some(SelectedFormatter::List(vec![Formatter::LanguageServer {
name: None,
}]));
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
vec![Formatter::LanguageServer { name: None }].into(),
)));
file.defaults.prettier = Some(PrettierSettings {
allowed: true,
..PrettierSettings::default()

View File

@@ -6,12 +6,16 @@ use debugger_ui::debugger_panel::DebugPanel;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::StreamExt as _;
use gpui::{BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _, VisualContext};
use gpui::{
AppContext as _, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _,
VisualContext,
};
use http_client::BlockedHttpClient;
use language::{
FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageRegistry,
language_settings::{
AllLanguageSettings, Formatter, PrettierSettings, SelectedFormatter, language_settings,
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
language_settings,
},
tree_sitter_typescript,
};
@@ -501,10 +505,9 @@ async fn test_ssh_collaboration_formatting_with_prettier(
cx_b.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter =
Some(SelectedFormatter::List(vec![Formatter::LanguageServer {
name: None,
}]));
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
vec![Formatter::LanguageServer { name: None }].into(),
)));
file.defaults.prettier = Some(PrettierSettings {
allowed: true,
..PrettierSettings::default()

View File

@@ -28,6 +28,7 @@ pub struct ChatPanelSettings {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
#[schemars(deny_unknown_fields)]
pub struct ChatPanelSettingsContent {
/// When to show the panel button in the status bar.
///
@@ -51,6 +52,7 @@ pub struct NotificationPanelSettings {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
#[schemars(deny_unknown_fields)]
pub struct PanelSettingsContent {
/// Whether to show the panel button in the status bar.
///
@@ -67,6 +69,7 @@ pub struct PanelSettingsContent {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
#[schemars(deny_unknown_fields)]
pub struct MessageEditorSettings {
/// Whether to automatically replace emoji shortcodes with emoji characters.
/// For example: typing `:wave:` gets replaced with `👋`.

View File

@@ -41,7 +41,7 @@ pub struct CommandPalette {
/// Removes subsequent whitespace characters and double colons from the query.
///
/// This improves the likelihood of a match by either humanized name or keymap-style name.
pub fn normalize_action_query(input: &str) -> String {
fn normalize_query(input: &str) -> String {
let mut result = String::with_capacity(input.len());
let mut last_char = None;
@@ -297,7 +297,7 @@ impl PickerDelegate for CommandPaletteDelegate {
let mut commands = self.all_commands.clone();
let hit_counts = self.hit_counts();
let executor = cx.background_executor().clone();
let query = normalize_action_query(query.as_str());
let query = normalize_query(query.as_str());
async move {
commands.sort_by_key(|action| {
(
@@ -311,17 +311,29 @@ impl PickerDelegate for CommandPaletteDelegate {
.enumerate()
.map(|(ix, command)| StringMatchCandidate::new(ix, &command.name))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
true,
true,
10000,
&Default::default(),
executor,
)
.await;
let matches = if query.is_empty() {
candidates
.into_iter()
.enumerate()
.map(|(index, candidate)| StringMatch {
candidate_id: index,
string: candidate.string,
positions: Vec::new(),
score: 0.0,
})
.collect()
} else {
fuzzy::match_strings(
&candidates,
&query,
true,
true,
10000,
&Default::default(),
executor,
)
.await
};
tx.send((commands, matches)).await.log_err();
}
@@ -410,8 +422,8 @@ impl PickerDelegate for CommandPaletteDelegate {
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
let matching_command = self.matches.get(ix)?;
let command = self.commands.get(matching_command.candidate_id)?;
let r#match = self.matches.get(ix)?;
let command = self.commands.get(r#match.candidate_id)?;
Some(
ListItem::new(ix)
.inset(true)
@@ -424,7 +436,7 @@ impl PickerDelegate for CommandPaletteDelegate {
.justify_between()
.child(HighlightedLabel::new(
command.name.clone(),
matching_command.positions.clone(),
r#match.positions.clone(),
))
.children(KeyBinding::for_action_in(
&*command.action,
@@ -500,28 +512,19 @@ mod tests {
#[test]
fn test_normalize_query() {
assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
assert_eq!(
normalize_action_query("editor: backspace"),
"editor: backspace"
);
assert_eq!(
normalize_action_query("editor: backspace"),
"editor: backspace"
);
assert_eq!(
normalize_action_query("editor: backspace"),
"editor: backspace"
);
assert_eq!(
normalize_action_query("editor::GoToDefinition"),
normalize_query("editor::GoToDefinition"),
"editor:GoToDefinition"
);
assert_eq!(
normalize_action_query("editor::::GoToDefinition"),
normalize_query("editor::::GoToDefinition"),
"editor:GoToDefinition"
);
assert_eq!(
normalize_action_query("editor: :GoToDefinition"),
normalize_query("editor: :GoToDefinition"),
"editor: :GoToDefinition"
);
}

View File

@@ -29,7 +29,6 @@ impl Display for ContextServerId {
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema)]
pub struct ContextServerCommand {
#[serde(rename = "command")]
pub path: String,
pub args: Vec<String>,
pub env: Option<HashMap<String, String>>,

View File

@@ -10,7 +10,6 @@ use gpui::{AsyncApp, SharedString};
pub use http_client::{HttpClient, github::latest_github_release};
use language::{LanguageName, LanguageToolchainStore};
use node_runtime::NodeRuntime;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::WorktreeId;
use smol::fs::File;
@@ -48,10 +47,7 @@ pub trait DapDelegate: Send + Sync + 'static {
async fn shell_env(&self) -> collections::HashMap<String, String>;
}
#[derive(
Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, JsonSchema,
)]
#[serde(transparent)]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub struct DebugAdapterName(pub SharedString);
impl Deref for DebugAdapterName {

View File

@@ -22,16 +22,17 @@ impl CodeLldbDebugAdapter {
async fn request_args(
&self,
delegate: &Arc<dyn DapDelegate>,
mut configuration: Value,
label: &str,
task_definition: &DebugTaskDefinition,
) -> Result<dap::StartDebuggingRequestArguments> {
// CodeLLDB uses `name` for a terminal label.
let mut configuration = task_definition.config.clone();
let obj = configuration
.as_object_mut()
.context("CodeLLDB is not a valid json object")?;
// CodeLLDB uses `name` for a terminal label.
obj.entry("name")
.or_insert(Value::String(String::from(label)));
.or_insert(Value::String(String::from(task_definition.label.as_ref())));
obj.entry("cwd")
.or_insert(delegate.worktree_root_path().to_string_lossy().into());
@@ -360,31 +361,17 @@ impl DebugAdapter for CodeLldbDebugAdapter {
self.path_to_codelldb.set(path.clone()).ok();
command = Some(path);
};
let mut json_config = config.config.clone();
Ok(DebugAdapterBinary {
command: Some(command.unwrap()),
cwd: Some(delegate.worktree_root_path().to_path_buf()),
arguments: user_args.unwrap_or_else(|| {
if let Some(config) = json_config.as_object_mut()
&& let Some(source_languages) = config.get("sourceLanguages").filter(|value| {
value
.as_array()
.map_or(false, |array| array.iter().all(Value::is_string))
})
{
let ret = vec![
"--settings".into(),
json!({"sourceLanguages": source_languages}).to_string(),
];
config.remove("sourceLanguages");
ret
} else {
vec![]
}
vec![
"--settings".into(),
json!({"sourceLanguages": ["cpp", "rust"]}).to_string(),
]
}),
request_args: self
.request_args(delegate, json_config, &config.label)
.await?,
request_args: self.request_args(delegate, &config).await?,
envs: HashMap::default(),
connection: None,
})

View File

@@ -4,6 +4,7 @@ mod go;
mod javascript;
mod php;
mod python;
mod ruby;
use std::sync::Arc;
@@ -24,6 +25,7 @@ use gpui::{App, BorrowAppContext};
use javascript::JsDebugAdapter;
use php::PhpDebugAdapter;
use python::PythonDebugAdapter;
use ruby::RubyDebugAdapter;
use serde_json::json;
use task::{DebugScenario, ZedDebugConfig};
@@ -33,6 +35,7 @@ pub fn init(cx: &mut App) {
registry.add_adapter(Arc::from(PythonDebugAdapter::default()));
registry.add_adapter(Arc::from(PhpDebugAdapter::default()));
registry.add_adapter(Arc::from(JsDebugAdapter::default()));
registry.add_adapter(Arc::from(RubyDebugAdapter));
registry.add_adapter(Arc::from(GoDebugAdapter::default()));
registry.add_adapter(Arc::from(GdbDebugAdapter));

View File

@@ -282,10 +282,6 @@ impl DebugAdapter for JsDebugAdapter {
"description": "Automatically stop program after launch",
"default": false
},
"attachSimplePort": {
"type": "number",
"description": "If set, attaches to the process via the given port. This is generally no longer necessary for Node.js programs and loses the ability to debug child processes, but can be useful in more esoteric scenarios such as with Deno and Docker launches. If set to 0, a random port will be chosen and --inspect-brk added to the launch arguments automatically."
},
"runtimeExecutable": {
"type": ["string", "null"],
"description": "Runtime to use, an absolute path or the name of a runtime available on PATH",

View File

@@ -0,0 +1,208 @@
use anyhow::{Result, bail};
use async_trait::async_trait;
use collections::FxHashMap;
use dap::{
DebugRequest, StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
adapters::{
DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition,
},
};
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::path::PathBuf;
use std::{ffi::OsStr, sync::Arc};
use task::{DebugScenario, ZedDebugConfig};
use util::command::new_smol_command;
#[derive(Default)]
pub(crate) struct RubyDebugAdapter;
impl RubyDebugAdapter {
const ADAPTER_NAME: &'static str = "Ruby";
}
#[derive(Serialize, Deserialize)]
struct RubyDebugConfig {
script_or_command: Option<String>,
script: Option<String>,
command: Option<String>,
#[serde(default)]
args: Vec<String>,
#[serde(default)]
env: FxHashMap<String, String>,
cwd: Option<PathBuf>,
}
#[async_trait(?Send)]
impl DebugAdapter for RubyDebugAdapter {
fn name(&self) -> DebugAdapterName {
DebugAdapterName(Self::ADAPTER_NAME.into())
}
fn adapter_language_name(&self) -> Option<LanguageName> {
Some(SharedString::new_static("Ruby").into())
}
async fn request_kind(
&self,
_: &serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
Ok(StartDebuggingRequestArgumentsRequest::Launch)
}
fn dap_schema(&self) -> serde_json::Value {
json!({
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "Command name (ruby, rake, bin/rails, bundle exec ruby, etc)",
},
"script": {
"type": "string",
"description": "Absolute path to a Ruby file."
},
"cwd": {
"type": "string",
"description": "Directory to execute the program in",
"default": "${ZED_WORKTREE_ROOT}"
},
"args": {
"type": "array",
"description": "Command line arguments passed to the program",
"items": {
"type": "string"
},
"default": []
},
"env": {
"type": "object",
"description": "Additional environment variables to pass to the debugging (and debugged) process",
"default": {}
},
}
})
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
match zed_scenario.request {
DebugRequest::Launch(launch) => {
let config = RubyDebugConfig {
script_or_command: Some(launch.program),
script: None,
command: None,
args: launch.args,
env: launch.env,
cwd: launch.cwd.clone(),
};
let config = serde_json::to_value(config)?;
Ok(DebugScenario {
adapter: zed_scenario.adapter,
label: zed_scenario.label,
config,
tcp_connection: None,
build: None,
})
}
DebugRequest::Attach(_) => {
anyhow::bail!("Attach requests are unsupported");
}
}
}
async fn get_binary(
&self,
delegate: &Arc<dyn DapDelegate>,
definition: &DebugTaskDefinition,
_user_installed_path: Option<PathBuf>,
_user_args: Option<Vec<String>>,
_cx: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
let mut rdbg_path = adapter_path.join("rdbg");
if !delegate.fs().is_file(&rdbg_path).await {
match delegate.which("rdbg".as_ref()).await {
Some(path) => rdbg_path = path,
None => {
delegate.output_to_console(
"rdbg not found on path, trying `gem install debug`".to_string(),
);
let output = new_smol_command("gem")
.arg("install")
.arg("--no-document")
.arg("--bindir")
.arg(adapter_path)
.arg("debug")
.output()
.await?;
anyhow::ensure!(
output.status.success(),
"Failed to install rdbg:\n{}",
String::from_utf8_lossy(&output.stderr).to_string()
);
}
}
}
let tcp_connection = definition.tcp_connection.clone().unwrap_or_default();
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
let ruby_config = serde_json::from_value::<RubyDebugConfig>(definition.config.clone())?;
let mut arguments = vec![
"--open".to_string(),
format!("--port={}", port),
format!("--host={}", host),
];
if let Some(script) = &ruby_config.script {
arguments.push(script.clone());
} else if let Some(command) = &ruby_config.command {
arguments.push("--command".to_string());
arguments.push(command.clone());
} else if let Some(command_or_script) = &ruby_config.script_or_command {
if delegate
.which(OsStr::new(&command_or_script))
.await
.is_some()
{
arguments.push("--command".to_string());
}
arguments.push(command_or_script.clone());
} else {
bail!("Ruby debug config must have 'script' or 'command' args");
}
arguments.extend(ruby_config.args);
let mut configuration = definition.config.clone();
if let Some(configuration) = configuration.as_object_mut() {
configuration
.entry("cwd")
.or_insert_with(|| delegate.worktree_root_path().to_string_lossy().into());
}
Ok(DebugAdapterBinary {
command: Some(rdbg_path.to_string_lossy().to_string()),
arguments,
connection: Some(dap::adapters::TcpArguments {
host,
port,
timeout,
}),
cwd: Some(
ruby_config
.cwd
.unwrap_or(delegate.worktree_root_path().to_owned()),
),
envs: ruby_config.env.into_iter().collect(),
request_args: StartDebuggingRequestArguments {
request: self.request_kind(&definition.config).await?,
configuration,
},
})
}
}

View File

@@ -21,7 +21,7 @@ use project::{
use settings::Settings as _;
use std::{
borrow::Cow,
collections::{BTreeMap, HashMap, VecDeque},
collections::{HashMap, VecDeque},
sync::Arc,
};
use util::maybe;
@@ -32,6 +32,13 @@ use workspace::{
ui::{Button, Clickable, ContextMenu, Label, LabelCommon, PopoverMenu, h_flex},
};
// TODO:
// - [x] stop sorting by session ID
// - [x] pick the most recent session by default (logs if available, RPC messages otherwise)
// - [ ] dump the launch/attach request somewhere (logs?)
const MAX_SESSIONS: usize = 10;
struct DapLogView {
editor: Entity<Editor>,
focus_handle: FocusHandle,
@@ -42,34 +49,14 @@ struct DapLogView {
_subscriptions: Vec<Subscription>,
}
struct LogStoreEntryIdentifier<'a> {
session_id: SessionId,
project: Cow<'a, WeakEntity<Project>>,
}
impl LogStoreEntryIdentifier<'_> {
fn to_owned(&self) -> LogStoreEntryIdentifier<'static> {
LogStoreEntryIdentifier {
session_id: self.session_id,
project: Cow::Owned(self.project.as_ref().clone()),
}
}
}
struct LogStoreMessage {
id: LogStoreEntryIdentifier<'static>,
kind: IoKind,
command: Option<SharedString>,
message: SharedString,
}
pub struct LogStore {
projects: HashMap<WeakEntity<Project>, ProjectState>,
rpc_tx: UnboundedSender<LogStoreMessage>,
adapter_log_tx: UnboundedSender<LogStoreMessage>,
debug_sessions: VecDeque<DebugAdapterState>,
rpc_tx: UnboundedSender<(SessionId, IoKind, Option<SharedString>, SharedString)>,
adapter_log_tx: UnboundedSender<(SessionId, IoKind, Option<SharedString>, SharedString)>,
}
struct ProjectState {
debug_sessions: BTreeMap<SessionId, DebugAdapterState>,
_subscriptions: [gpui::Subscription; 2],
}
@@ -135,12 +122,13 @@ impl DebugAdapterState {
impl LogStore {
pub fn new(cx: &Context<Self>) -> Self {
let (rpc_tx, mut rpc_rx) = unbounded::<LogStoreMessage>();
let (rpc_tx, mut rpc_rx) =
unbounded::<(SessionId, IoKind, Option<SharedString>, SharedString)>();
cx.spawn(async move |this, cx| {
while let Some(message) = rpc_rx.next().await {
while let Some((session_id, io_kind, command, message)) = rpc_rx.next().await {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
this.add_debug_adapter_message(message, cx);
this.add_debug_adapter_message(session_id, io_kind, command, message, cx);
})?;
}
@@ -150,12 +138,13 @@ impl LogStore {
})
.detach_and_log_err(cx);
let (adapter_log_tx, mut adapter_log_rx) = unbounded::<LogStoreMessage>();
let (adapter_log_tx, mut adapter_log_rx) =
unbounded::<(SessionId, IoKind, Option<SharedString>, SharedString)>();
cx.spawn(async move |this, cx| {
while let Some(message) = adapter_log_rx.next().await {
while let Some((session_id, io_kind, _, message)) = adapter_log_rx.next().await {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
this.add_debug_adapter_log(message, cx);
this.add_debug_adapter_log(session_id, io_kind, message, cx);
})?;
}
@@ -168,76 +157,57 @@ impl LogStore {
rpc_tx,
adapter_log_tx,
projects: HashMap::new(),
debug_sessions: Default::default(),
}
}
pub fn add_project(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
let weak_project = project.downgrade();
self.projects.insert(
project.downgrade(),
ProjectState {
_subscriptions: [
cx.observe_release(project, {
let weak_project = project.downgrade();
move |this, _, _| {
this.projects.remove(&weak_project);
}
cx.observe_release(project, move |this, _, _| {
this.projects.remove(&weak_project);
}),
cx.subscribe(&project.read(cx).dap_store(), {
let weak_project = project.downgrade();
move |this, dap_store, event, cx| match event {
cx.subscribe(
&project.read(cx).dap_store(),
|this, dap_store, event, cx| match event {
dap_store::DapStoreEvent::DebugClientStarted(session_id) => {
let session = dap_store.read(cx).session_by_id(session_id);
if let Some(session) = session {
this.add_debug_session(
LogStoreEntryIdentifier {
project: Cow::Owned(weak_project.clone()),
session_id: *session_id,
},
session,
cx,
);
this.add_debug_session(*session_id, session, cx);
}
}
dap_store::DapStoreEvent::DebugClientShutdown(session_id) => {
let id = LogStoreEntryIdentifier {
project: Cow::Borrowed(&weak_project),
session_id: *session_id,
};
if let Some(state) = this.get_debug_adapter_state(&id) {
state.is_terminated = true;
}
this.get_debug_adapter_state(*session_id)
.iter_mut()
.for_each(|state| state.is_terminated = true);
this.clean_sessions(cx);
}
_ => {}
}
}),
},
),
],
debug_sessions: Default::default(),
},
);
}
fn get_debug_adapter_state(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
) -> Option<&mut DebugAdapterState> {
self.projects
.get_mut(&id.project)
.and_then(|state| state.debug_sessions.get_mut(&id.session_id))
fn get_debug_adapter_state(&mut self, id: SessionId) -> Option<&mut DebugAdapterState> {
self.debug_sessions
.iter_mut()
.find(|adapter_state| adapter_state.id == id)
}
fn add_debug_adapter_message(
&mut self,
LogStoreMessage {
id,
kind: io_kind,
command,
message,
}: LogStoreMessage,
id: SessionId,
io_kind: IoKind,
command: Option<SharedString>,
message: SharedString,
cx: &mut Context<Self>,
) {
let Some(debug_client_state) = self.get_debug_adapter_state(&id) else {
let Some(debug_client_state) = self.get_debug_adapter_state(id) else {
return;
};
@@ -259,7 +229,7 @@ impl LogStore {
if rpc_messages.last_message_kind != Some(kind) {
Self::get_debug_adapter_entry(
&mut rpc_messages.messages,
id.to_owned(),
id,
kind.label().into(),
LogKind::Rpc,
cx,
@@ -269,7 +239,7 @@ impl LogStore {
let entry = Self::get_debug_adapter_entry(
&mut rpc_messages.messages,
id.to_owned(),
id,
message,
LogKind::Rpc,
cx,
@@ -290,15 +260,12 @@ impl LogStore {
fn add_debug_adapter_log(
&mut self,
LogStoreMessage {
id,
kind: io_kind,
message,
..
}: LogStoreMessage,
id: SessionId,
io_kind: IoKind,
message: SharedString,
cx: &mut Context<Self>,
) {
let Some(debug_adapter_state) = self.get_debug_adapter_state(&id) else {
let Some(debug_adapter_state) = self.get_debug_adapter_state(id) else {
return;
};
@@ -309,7 +276,7 @@ impl LogStore {
Self::get_debug_adapter_entry(
&mut debug_adapter_state.log_messages,
id.to_owned(),
id,
message,
LogKind::Adapter,
cx,
@@ -319,17 +286,13 @@ impl LogStore {
fn get_debug_adapter_entry(
log_lines: &mut VecDeque<SharedString>,
id: LogStoreEntryIdentifier<'static>,
id: SessionId,
message: SharedString,
kind: LogKind,
cx: &mut Context<Self>,
) -> SharedString {
if let Some(excess) = log_lines
.len()
.checked_sub(RpcMessages::MESSAGE_QUEUE_LIMIT)
&& excess > 0
{
log_lines.drain(..excess);
while log_lines.len() >= RpcMessages::MESSAGE_QUEUE_LIMIT {
log_lines.pop_front();
}
let format_messages = DebuggerSettings::get_global(cx).format_dap_log_messages;
@@ -359,116 +322,118 @@ impl LogStore {
fn add_debug_session(
&mut self,
id: LogStoreEntryIdentifier<'static>,
session_id: SessionId,
session: Entity<Session>,
cx: &mut Context<Self>,
) {
maybe!({
let project_entry = self.projects.get_mut(&id.project)?;
let std::collections::btree_map::Entry::Vacant(state) =
project_entry.debug_sessions.entry(id.session_id)
else {
return None;
};
if self
.debug_sessions
.iter_mut()
.any(|adapter_state| adapter_state.id == session_id)
{
return;
}
let (adapter_name, has_adapter_logs) = session.read_with(cx, |session, _| {
(
session.adapter(),
session
.adapter_client()
.map_or(false, |client| client.has_adapter_logs()),
)
});
state.insert(DebugAdapterState::new(
id.session_id,
adapter_name,
has_adapter_logs,
));
self.clean_sessions(cx);
let io_tx = self.rpc_tx.clone();
let client = session.read(cx).adapter_client()?;
let project = id.project.clone();
let session_id = id.session_id;
client.add_log_handler(
move |kind, command, message| {
io_tx
.unbounded_send(LogStoreMessage {
id: LogStoreEntryIdentifier {
session_id,
project: project.clone(),
},
kind,
command: command.map(|command| command.to_owned().into()),
message: message.to_owned().into(),
})
.ok();
},
LogKind::Rpc,
);
let log_io_tx = self.adapter_log_tx.clone();
let project = id.project;
client.add_log_handler(
move |kind, command, message| {
log_io_tx
.unbounded_send(LogStoreMessage {
id: LogStoreEntryIdentifier {
session_id,
project: project.clone(),
},
kind,
command: command.map(|command| command.to_owned().into()),
message: message.to_owned().into(),
})
.ok();
},
LogKind::Adapter,
);
Some(())
let (adapter_name, has_adapter_logs) = session.read_with(cx, |session, _| {
(
session.adapter(),
session
.adapter_client()
.map(|client| client.has_adapter_logs())
.unwrap_or(false),
)
});
self.debug_sessions.push_back(DebugAdapterState::new(
session_id,
adapter_name,
has_adapter_logs,
));
self.clean_sessions(cx);
let io_tx = self.rpc_tx.clone();
let Some(client) = session.read(cx).adapter_client() else {
return;
};
client.add_log_handler(
move |io_kind, command, message| {
io_tx
.unbounded_send((
session_id,
io_kind,
command.map(|command| command.to_owned().into()),
message.to_owned().into(),
))
.ok();
},
LogKind::Rpc,
);
let log_io_tx = self.adapter_log_tx.clone();
client.add_log_handler(
move |io_kind, command, message| {
log_io_tx
.unbounded_send((
session_id,
io_kind,
command.map(|command| command.to_owned().into()),
message.to_owned().into(),
))
.ok();
},
LogKind::Adapter,
);
}
fn clean_sessions(&mut self, cx: &mut Context<Self>) {
self.projects.values_mut().for_each(|project| {
let mut allowed_terminated_sessions = 10u32;
project.debug_sessions.retain(|_, session| {
if !session.is_terminated {
return true;
}
allowed_terminated_sessions = allowed_terminated_sessions.saturating_sub(1);
allowed_terminated_sessions > 0
});
let mut to_remove = self.debug_sessions.len().saturating_sub(MAX_SESSIONS);
self.debug_sessions.retain(|session| {
if to_remove > 0 && session.is_terminated {
to_remove -= 1;
return false;
}
true
});
cx.notify();
}
fn log_messages_for_session(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
session_id: SessionId,
) -> Option<&mut VecDeque<SharedString>> {
self.get_debug_adapter_state(id)
self.debug_sessions
.iter_mut()
.find(|session| session.id == session_id)
.map(|state| &mut state.log_messages)
}
fn rpc_messages_for_session(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
session_id: SessionId,
) -> Option<&mut VecDeque<SharedString>> {
self.get_debug_adapter_state(id)
.map(|state| &mut state.rpc_messages.messages)
self.debug_sessions.iter_mut().find_map(|state| {
if state.id == session_id {
Some(&mut state.rpc_messages.messages)
} else {
None
}
})
}
fn initialization_sequence_for_session(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
) -> Option<&Vec<SharedString>> {
self.get_debug_adapter_state(&id)
.map(|state| &state.rpc_messages.initialization_sequence)
session_id: SessionId,
) -> Option<&mut Vec<SharedString>> {
self.debug_sessions.iter_mut().find_map(|state| {
if state.id == session_id {
Some(&mut state.rpc_messages.initialization_sequence)
} else {
None
}
})
}
}
@@ -488,11 +453,10 @@ impl Render for DapLogToolbarItemView {
return Empty.into_any_element();
};
let (menu_rows, current_session_id, project) = log_view.update(cx, |log_view, cx| {
let (menu_rows, current_session_id) = log_view.update(cx, |log_view, cx| {
(
log_view.menu_items(cx),
log_view.current_view.map(|(session_id, _)| session_id),
log_view.project.downgrade(),
)
});
@@ -520,7 +484,6 @@ impl Render for DapLogToolbarItemView {
.menu(move |mut window, cx| {
let log_view = log_view.clone();
let menu_rows = menu_rows.clone();
let project = project.clone();
ContextMenu::build(&mut window, cx, move |mut menu, window, _cx| {
for row in menu_rows.into_iter() {
menu = menu.custom_row(move |_window, _cx| {
@@ -546,15 +509,8 @@ impl Render for DapLogToolbarItemView {
.child(Label::new(ADAPTER_LOGS))
.into_any_element()
},
window.handler_for(&log_view, {
let project = project.clone();
let id = LogStoreEntryIdentifier {
project: Cow::Owned(project),
session_id: row.session_id,
};
move |view, window, cx| {
view.show_log_messages_for_adapter(&id, window, cx);
}
window.handler_for(&log_view, move |view, window, cx| {
view.show_log_messages_for_adapter(row.session_id, window, cx);
}),
);
}
@@ -568,15 +524,8 @@ impl Render for DapLogToolbarItemView {
.child(Label::new(RPC_MESSAGES))
.into_any_element()
},
window.handler_for(&log_view, {
let project = project.clone();
let id = LogStoreEntryIdentifier {
project: Cow::Owned(project),
session_id: row.session_id,
};
move |view, window, cx| {
view.show_rpc_trace_for_server(&id, window, cx);
}
window.handler_for(&log_view, move |view, window, cx| {
view.show_rpc_trace_for_server(row.session_id, window, cx);
}),
)
.custom_entry(
@@ -587,17 +536,12 @@ impl Render for DapLogToolbarItemView {
.child(Label::new(INITIALIZATION_SEQUENCE))
.into_any_element()
},
window.handler_for(&log_view, {
let project = project.clone();
let id = LogStoreEntryIdentifier {
project: Cow::Owned(project),
session_id: row.session_id,
};
move |view, window, cx| {
view.show_initialization_sequence_for_server(
&id, window, cx,
);
}
window.handler_for(&log_view, move |view, window, cx| {
view.show_initialization_sequence_for_server(
row.session_id,
window,
cx,
);
}),
);
}
@@ -669,9 +613,7 @@ impl DapLogView {
let events_subscriptions = cx.subscribe(&log_store, |log_view, _, event, cx| match event {
Event::NewLogEntry { id, entry, kind } => {
if log_view.current_view == Some((id.session_id, *kind))
&& log_view.project == *id.project
{
if log_view.current_view == Some((*id, *kind)) {
log_view.editor.update(cx, |editor, cx| {
editor.set_read_only(false);
let last_point = editor.buffer().read(cx).len(cx);
@@ -687,18 +629,12 @@ impl DapLogView {
}
}
});
let weak_project = project.downgrade();
let state_info = log_store
.read(cx)
.projects
.get(&weak_project)
.and_then(|project| {
project
.debug_sessions
.values()
.next_back()
.map(|session| (session.id, session.has_adapter_logs))
});
.debug_sessions
.back()
.map(|session| (session.id, session.has_adapter_logs));
let mut this = Self {
editor,
@@ -711,14 +647,10 @@ impl DapLogView {
};
if let Some((session_id, have_adapter_logs)) = state_info {
let id = LogStoreEntryIdentifier {
session_id,
project: Cow::Owned(weak_project),
};
if have_adapter_logs {
this.show_log_messages_for_adapter(&id, window, cx);
this.show_log_messages_for_adapter(session_id, window, cx);
} else {
this.show_rpc_trace_for_server(&id, window, cx);
this.show_rpc_trace_for_server(session_id, window, cx);
}
}
@@ -758,38 +690,31 @@ impl DapLogView {
fn menu_items(&self, cx: &App) -> Vec<DapMenuItem> {
self.log_store
.read(cx)
.projects
.get(&self.project.downgrade())
.map_or_else(Vec::new, |state| {
state
.debug_sessions
.values()
.rev()
.map(|state| DapMenuItem {
session_id: state.id,
adapter_name: state.adapter_name.clone(),
has_adapter_logs: state.has_adapter_logs,
selected_entry: self
.current_view
.map_or(LogKind::Adapter, |(_, kind)| kind),
})
.collect::<Vec<_>>()
.debug_sessions
.iter()
.rev()
.map(|state| DapMenuItem {
session_id: state.id,
adapter_name: state.adapter_name.clone(),
has_adapter_logs: state.has_adapter_logs,
selected_entry: self.current_view.map_or(LogKind::Adapter, |(_, kind)| kind),
})
.collect::<Vec<_>>()
}
fn show_rpc_trace_for_server(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
session_id: SessionId,
window: &mut Window,
cx: &mut Context<Self>,
) {
let rpc_log = self.log_store.update(cx, |log_store, _| {
log_store
.rpc_messages_for_session(id)
.rpc_messages_for_session(session_id)
.map(|state| log_contents(state.iter().cloned()))
});
if let Some(rpc_log) = rpc_log {
self.current_view = Some((id.session_id, LogKind::Rpc));
self.current_view = Some((session_id, LogKind::Rpc));
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
let language = self.project.read(cx).languages().language_for_name("JSON");
editor
@@ -800,7 +725,8 @@ impl DapLogView {
.expect("log buffer should be a singleton")
.update(cx, |_, cx| {
cx.spawn({
async move |buffer, cx| {
let buffer = cx.entity();
async move |_, cx| {
let language = language.await.ok();
buffer.update(cx, |buffer, cx| {
buffer.set_language(language, cx);
@@ -820,17 +746,17 @@ impl DapLogView {
fn show_log_messages_for_adapter(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
session_id: SessionId,
window: &mut Window,
cx: &mut Context<Self>,
) {
let message_log = self.log_store.update(cx, |log_store, _| {
log_store
.log_messages_for_session(id)
.log_messages_for_session(session_id)
.map(|state| log_contents(state.iter().cloned()))
});
if let Some(message_log) = message_log {
self.current_view = Some((id.session_id, LogKind::Adapter));
self.current_view = Some((session_id, LogKind::Adapter));
let (editor, editor_subscriptions) = Self::editor_for_logs(message_log, window, cx);
editor
.read(cx)
@@ -849,17 +775,17 @@ impl DapLogView {
fn show_initialization_sequence_for_server(
&mut self,
id: &LogStoreEntryIdentifier<'_>,
session_id: SessionId,
window: &mut Window,
cx: &mut Context<Self>,
) {
let rpc_log = self.log_store.update(cx, |log_store, _| {
log_store
.initialization_sequence_for_session(id)
.initialization_sequence_for_session(session_id)
.map(|state| log_contents(state.iter().cloned()))
});
if let Some(rpc_log) = rpc_log {
self.current_view = Some((id.session_id, LogKind::Rpc));
self.current_view = Some((session_id, LogKind::Rpc));
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
let language = self.project.read(cx).languages().language_for_name("JSON");
editor
@@ -1067,9 +993,9 @@ impl Focusable for DapLogView {
}
}
enum Event {
pub enum Event {
NewLogEntry {
id: LogStoreEntryIdentifier<'static>,
id: SessionId,
entry: SharedString,
kind: LogKind,
},
@@ -1082,30 +1008,31 @@ impl EventEmitter<SearchEvent> for DapLogView {}
#[cfg(any(test, feature = "test-support"))]
impl LogStore {
pub fn has_projects(&self) -> bool {
!self.projects.is_empty()
pub fn contained_session_ids(&self) -> Vec<SessionId> {
self.debug_sessions
.iter()
.map(|session| session.id)
.collect()
}
pub fn contained_session_ids(&self, project: &WeakEntity<Project>) -> Vec<SessionId> {
self.projects.get(project).map_or(vec![], |state| {
state.debug_sessions.keys().copied().collect()
})
pub fn rpc_messages_for_session_id(&self, session_id: SessionId) -> Vec<SharedString> {
self.debug_sessions
.iter()
.find(|adapter_state| adapter_state.id == session_id)
.expect("This session should exist if a test is calling")
.rpc_messages
.messages
.clone()
.into()
}
pub fn rpc_messages_for_session_id(
&self,
project: &WeakEntity<Project>,
session_id: SessionId,
) -> Vec<SharedString> {
self.projects.get(&project).map_or(vec![], |state| {
state
.debug_sessions
.get(&session_id)
.expect("This session should exist if a test is calling")
.rpc_messages
.messages
.clone()
.into()
})
pub fn log_messages_for_session_id(&self, session_id: SessionId) -> Vec<SharedString> {
self.debug_sessions
.iter()
.find(|adapter_state| adapter_state.id == session_id)
.expect("This session should exist if a test is calling")
.log_messages
.clone()
.into()
}
}

View File

@@ -868,7 +868,7 @@ impl DebugPanel {
let threads =
running_state.update(cx, |running_state, cx| {
let session = running_state.session();
session.read(cx).is_started().then(|| {
session.read(cx).is_running().then(|| {
session.update(cx, |session, cx| {
session.threads(cx)
})
@@ -1298,11 +1298,6 @@ impl Render for DebugPanel {
}
v_flex()
.when_else(
self.position(window, cx) == DockPosition::Bottom,
|this| this.max_h(self.size),
|this| this.max_w(self.size),
)
.size_full()
.key_context("DebugPanel")
.child(h_flex().children(self.top_controls_strip(window, cx)))
@@ -1473,94 +1468,6 @@ impl Render for DebugPanel {
if has_sessions {
this.children(self.active_session.clone())
} else {
let docked_to_bottom = self.position(window, cx) == DockPosition::Bottom;
let welcome_experience = v_flex()
.when_else(
docked_to_bottom,
|this| this.w_2_3().h_full().pr_8(),
|this| this.w_full().h_1_3(),
)
.items_center()
.justify_center()
.gap_2()
.child(
Button::new("spawn-new-session-empty-state", "New Session")
.icon(IconName::Plus)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, window, cx| {
window.dispatch_action(crate::Start.boxed_clone(), cx);
}),
)
.child(
Button::new("edit-debug-settings", "Edit debug.json")
.icon(IconName::Code)
.icon_size(IconSize::XSmall)
.color(Color::Muted)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, window, cx| {
window.dispatch_action(
zed_actions::OpenProjectDebugTasks.boxed_clone(),
cx,
);
}),
)
.child(
Button::new("open-debugger-docs", "Debugger Docs")
.icon(IconName::Book)
.color(Color::Muted)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, _, cx| cx.open_url("https://zed.dev/docs/debugger")),
)
.child(
Button::new(
"spawn-new-session-install-extensions",
"Debugger Extensions",
)
.icon(IconName::Blocks)
.color(Color::Muted)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, window, cx| {
window.dispatch_action(
zed_actions::Extensions {
category_filter: Some(
zed_actions::ExtensionCategoryFilter::DebugAdapters,
),
}
.boxed_clone(),
cx,
);
}),
);
let breakpoint_list =
v_flex()
.group("base-breakpoint-list")
.items_start()
.when_else(
docked_to_bottom,
|this| this.min_w_1_3().h_full(),
|this| this.w_full().h_2_3(),
)
.p_1()
.child(
h_flex()
.pl_1()
.w_full()
.justify_between()
.child(Label::new("Breakpoints").size(LabelSize::Small))
.child(h_flex().visible_on_hover("base-breakpoint-list").child(
self.breakpoint_list.read(cx).render_control_strip(),
))
.track_focus(&self.breakpoint_list.focus_handle(cx)),
)
.child(Divider::horizontal())
.child(self.breakpoint_list.clone());
this.child(
v_flex()
.h_full()
@@ -1568,23 +1475,65 @@ impl Render for DebugPanel {
.items_center()
.justify_center()
.child(
div()
.when_else(docked_to_bottom, Div::h_flex, Div::v_flex)
.size_full()
.map(|this| {
if docked_to_bottom {
this.items_start()
.child(breakpoint_list)
.child(Divider::vertical())
.child(welcome_experience)
} else {
this.items_end()
.child(welcome_experience)
.child(Divider::horizontal())
.child(breakpoint_list)
}
}),
),
h_flex().size_full()
.items_start()
.child(v_flex().group("base-breakpoint-list").items_start().min_w_1_3().h_full().p_1()
.child(h_flex().pl_1().w_full().justify_between()
.child(Label::new("Breakpoints").size(LabelSize::Small))
.child(h_flex().visible_on_hover("base-breakpoint-list").child(self.breakpoint_list.read(cx).render_control_strip())))
.child(Divider::horizontal())
.child(self.breakpoint_list.clone()))
.child(Divider::vertical())
.child(
v_flex().w_2_3().h_full().items_center().justify_center()
.gap_2()
.pr_8()
.child(
Button::new("spawn-new-session-empty-state", "New Session")
.icon(IconName::Plus)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, window, cx| {
window.dispatch_action(crate::Start.boxed_clone(), cx);
})
)
.child(
Button::new("edit-debug-settings", "Edit debug.json")
.icon(IconName::Code)
.icon_size(IconSize::XSmall)
.color(Color::Muted)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, window, cx| {
window.dispatch_action(zed_actions::OpenProjectDebugTasks.boxed_clone(), cx);
})
)
.child(
Button::new("open-debugger-docs", "Debugger Docs")
.icon(IconName::Book)
.color(Color::Muted)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, _, cx| {
cx.open_url("https://zed.dev/docs/debugger")
})
)
.child(
Button::new("spawn-new-session-install-extensions", "Debugger Extensions")
.icon(IconName::Blocks)
.color(Color::Muted)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, window, cx| {
window.dispatch_action(zed_actions::Extensions { category_filter: Some(zed_actions::ExtensionCategoryFilter::DebugAdapters)}.boxed_clone(), cx);
})
)
)
)
)
}
})

View File

@@ -900,7 +900,7 @@ impl RunningState {
let config_is_valid = request_type.is_ok();
let mut extra_config = Value::Null;
let build_output = if let Some(build) = build {
let (task_template, locator_name) = match build {
BuildTaskDefinition::Template {
@@ -930,7 +930,6 @@ impl RunningState {
};
let locator_name = if let Some(locator_name) = locator_name {
extra_config = config.clone();
debug_assert!(!config_is_valid);
Some(locator_name)
} else if !config_is_valid {
@@ -946,7 +945,6 @@ impl RunningState {
});
if let Ok(t) = task {
t.await.and_then(|scenario| {
extra_config = scenario.config;
match scenario.build {
Some(BuildTaskDefinition::Template {
locator_name, ..
@@ -1010,13 +1008,13 @@ impl RunningState {
if !exit_status.success() {
anyhow::bail!("Build failed");
}
Some((task.resolved.clone(), locator_name, extra_config))
Some((task.resolved.clone(), locator_name))
} else {
None
};
if config_is_valid {
} else if let Some((task, locator_name, extra_config)) = build_output {
} else if let Some((task, locator_name)) = build_output {
let locator_name =
locator_name.with_context(|| {
format!("Could not find a valid locator for a build task and configure is invalid with error: {}", request_type.err()
@@ -1041,8 +1039,6 @@ impl RunningState {
.with_context(|| anyhow!("{}: is not a valid adapter name", &adapter))?.config_from_zed_format(zed_config)
.await?;
config = scenario.config;
util::merge_non_null_json_value_into(extra_config, &mut config);
Self::substitute_variables_in_config(&mut config, &task_context);
} else {
let Err(e) = request_type else {

View File

@@ -878,30 +878,19 @@ impl LineBreakpoint {
.cursor_pointer()
.child(
h_flex()
.gap_0p5()
.gap_1()
.child(
Label::new(format!("{}:{}", self.name, self.line))
.size(LabelSize::Small)
.line_height_style(ui::LineHeightStyle::UiLabel),
)
.children(self.dir.as_ref().and_then(|dir| {
let path_without_root = Path::new(dir.as_ref())
.components()
.skip(1)
.collect::<PathBuf>();
path_without_root.components().next()?;
Some(
Label::new(path_without_root.to_string_lossy().into_owned())
.color(Color::Muted)
.size(LabelSize::Small)
.line_height_style(ui::LineHeightStyle::UiLabel)
.truncate(),
)
.children(self.dir.clone().map(|dir| {
Label::new(dir)
.color(Color::Muted)
.size(LabelSize::Small)
.line_height_style(ui::LineHeightStyle::UiLabel)
})),
)
.when_some(self.dir.as_ref(), |this, parent_dir| {
this.tooltip(Tooltip::text(format!("Worktree parent path: {parent_dir}")))
})
.child(BreakpointOptionsStrip {
props,
breakpoint: BreakpointEntry {
@@ -1245,15 +1234,14 @@ impl RenderOnce for BreakpointOptionsStrip {
};
h_flex()
.gap_1()
.gap_2()
.child(
div().map(self.add_border(ActiveBreakpointStripMode::Log, supports_logs, window, cx))
div() .map(self.add_border(ActiveBreakpointStripMode::Log, supports_logs, window, cx))
.child(
IconButton::new(
SharedString::from(format!("{id}-log-toggle")),
IconName::ScrollText,
)
.icon_size(IconSize::XSmall)
.style(style_for_toggle(ActiveBreakpointStripMode::Log, has_logs))
.icon_color(color_for_toggle(has_logs))
.disabled(!supports_logs)
@@ -1273,7 +1261,6 @@ impl RenderOnce for BreakpointOptionsStrip {
SharedString::from(format!("{id}-condition-toggle")),
IconName::SplitAlt,
)
.icon_size(IconSize::XSmall)
.style(style_for_toggle(
ActiveBreakpointStripMode::Condition,
has_condition
@@ -1287,7 +1274,7 @@ impl RenderOnce for BreakpointOptionsStrip {
.when(!has_condition && !self.is_selected, |this| this.invisible()),
)
.child(
div().map(self.add_border(
div() .map(self.add_border(
ActiveBreakpointStripMode::HitCondition,
supports_hit_condition,window, cx
))
@@ -1296,7 +1283,6 @@ impl RenderOnce for BreakpointOptionsStrip {
SharedString::from(format!("{id}-hit-condition-toggle")),
IconName::ArrowDown10,
)
.icon_size(IconSize::XSmall)
.style(style_for_toggle(
ActiveBreakpointStripMode::HitCondition,
has_hit_condition,

View File

@@ -114,7 +114,7 @@ impl Console {
}
fn is_running(&self, cx: &Context<Self>) -> bool {
self.session.read(cx).is_started()
self.session.read(cx).is_running()
}
fn handle_stack_frame_list_events(

View File

@@ -37,23 +37,15 @@ async fn test_dap_logger_captures_all_session_rpc_messages(
.await;
assert!(
log_store.read_with(cx, |log_store, _| !log_store.has_projects()),
"log_store shouldn't contain any projects before any projects were created"
log_store.read_with(cx, |log_store, _| log_store
.contained_session_ids()
.is_empty()),
"log_store shouldn't contain any session IDs before any sessions were created"
);
let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
let workspace = init_test_workspace(&project, cx).await;
assert!(
log_store.read_with(cx, |log_store, _| log_store.has_projects()),
"log_store shouldn't contain any projects before any projects were created"
);
assert!(
log_store.read_with(cx, |log_store, _| log_store
.contained_session_ids(&project.downgrade())
.is_empty()),
"log_store shouldn't contain any projects before any projects were created"
);
let cx = &mut VisualTestContext::from_window(*workspace, cx);
// Start a debug session
@@ -62,22 +54,20 @@ async fn test_dap_logger_captures_all_session_rpc_messages(
let client = session.update(cx, |session, _| session.adapter_client().unwrap());
assert_eq!(
log_store.read_with(cx, |log_store, _| log_store
.contained_session_ids(&project.downgrade())
.len()),
log_store.read_with(cx, |log_store, _| log_store.contained_session_ids().len()),
1,
);
assert!(
log_store.read_with(cx, |log_store, _| log_store
.contained_session_ids(&project.downgrade())
.contained_session_ids()
.contains(&session_id)),
"log_store should contain the session IDs of the started session"
);
assert!(
!log_store.read_with(cx, |log_store, _| log_store
.rpc_messages_for_session_id(&project.downgrade(), session_id)
.rpc_messages_for_session_id(session_id)
.is_empty()),
"We should have the initialization sequence in the log store"
);

View File

@@ -267,6 +267,7 @@ async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppConte
"Debugpy",
"PHP",
"JavaScript",
"Ruby",
"Delve",
"GDB",
"fake-adapter",

View File

@@ -61,7 +61,6 @@ parking_lot.workspace = true
pretty_assertions.workspace = true
project.workspace = true
rand.workspace = true
regex.workspace = true
rpc.workspace = true
schemars.workspace = true
serde.workspace = true

View File

@@ -37,9 +37,7 @@ pub use block_map::{
use block_map::{BlockRow, BlockSnapshot};
use collections::{HashMap, HashSet};
pub use crease_map::*;
pub use fold_map::{
ChunkRenderer, ChunkRendererContext, ChunkRendererId, Fold, FoldId, FoldPlaceholder, FoldPoint,
};
pub use fold_map::{ChunkRenderer, ChunkRendererContext, Fold, FoldId, FoldPlaceholder, FoldPoint};
use fold_map::{FoldMap, FoldSnapshot};
use gpui::{App, Context, Entity, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle};
pub use inlay_map::Inlay;
@@ -540,7 +538,7 @@ impl DisplayMap {
pub fn update_fold_widths(
&mut self,
widths: impl IntoIterator<Item = (ChunkRendererId, Pixels)>,
widths: impl IntoIterator<Item = (FoldId, Pixels)>,
cx: &mut Context<Self>,
) -> bool {
let snapshot = self.buffer.read(cx).snapshot(cx);

View File

@@ -1,5 +1,3 @@
use crate::{InlayId, display_map::inlay_map::InlayChunk};
use super::{
Highlights,
inlay_map::{InlayBufferRows, InlayChunks, InlayEdit, InlayOffset, InlayPoint, InlaySnapshot},
@@ -277,16 +275,13 @@ impl FoldMapWriter<'_> {
pub(crate) fn update_fold_widths(
&mut self,
new_widths: impl IntoIterator<Item = (ChunkRendererId, Pixels)>,
new_widths: impl IntoIterator<Item = (FoldId, Pixels)>,
) -> (FoldSnapshot, Vec<FoldEdit>) {
let mut edits = Vec::new();
let inlay_snapshot = self.0.snapshot.inlay_snapshot.clone();
let buffer = &inlay_snapshot.buffer;
for (id, new_width) in new_widths {
let ChunkRendererId::Fold(id) = id else {
continue;
};
if let Some(metadata) = self.0.snapshot.fold_metadata_by_id.get(&id).cloned() {
if Some(new_width) != metadata.width {
let buffer_start = metadata.range.start.to_offset(buffer);
@@ -532,7 +527,7 @@ impl FoldMap {
placeholder: Some(TransformPlaceholder {
text: ELLIPSIS,
renderer: ChunkRenderer {
id: ChunkRendererId::Fold(fold.id),
id: fold.id,
render: Arc::new(move |cx| {
(fold.placeholder.render)(
fold_id,
@@ -1065,7 +1060,7 @@ impl sum_tree::Summary for TransformSummary {
}
#[derive(Copy, Clone, Eq, PartialEq, Debug, Default, Ord, PartialOrd, Hash)]
pub struct FoldId(pub(super) usize);
pub struct FoldId(usize);
impl From<FoldId> for ElementId {
fn from(val: FoldId) -> Self {
@@ -1270,17 +1265,11 @@ pub struct Chunk<'a> {
pub renderer: Option<ChunkRenderer>,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum ChunkRendererId {
Fold(FoldId),
Inlay(InlayId),
}
/// A recipe for how the chunk should be presented.
#[derive(Clone)]
pub struct ChunkRenderer {
/// The id of the renderer associated with this chunk.
pub id: ChunkRendererId,
/// The id of the fold associated with this chunk.
pub id: FoldId,
/// Creates a custom element to represent this chunk.
pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
/// If true, the element is constrained to the shaped width of the text.
@@ -1322,7 +1311,7 @@ impl DerefMut for ChunkRendererContext<'_, '_> {
pub struct FoldChunks<'a> {
transform_cursor: Cursor<'a, Transform, (FoldOffset, InlayOffset)>,
inlay_chunks: InlayChunks<'a>,
inlay_chunk: Option<(InlayOffset, InlayChunk<'a>)>,
inlay_chunk: Option<(InlayOffset, language::Chunk<'a>)>,
inlay_offset: InlayOffset,
output_offset: FoldOffset,
max_output_offset: FoldOffset,
@@ -1414,8 +1403,7 @@ impl<'a> Iterator for FoldChunks<'a> {
}
// Otherwise, take a chunk from the buffer's text.
if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() {
let chunk = &mut inlay_chunk.chunk;
if let Some((buffer_chunk_start, mut chunk)) = self.inlay_chunk.clone() {
let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len());
let transform_end = self.transform_cursor.end(&()).1;
let chunk_end = buffer_chunk_end.min(transform_end);
@@ -1440,7 +1428,7 @@ impl<'a> Iterator for FoldChunks<'a> {
is_tab: chunk.is_tab,
is_inlay: chunk.is_inlay,
underline: chunk.underline,
renderer: inlay_chunk.renderer,
renderer: None,
});
}

View File

@@ -1,4 +1,4 @@
use crate::{ChunkRenderer, HighlightStyles, InlayId};
use crate::{HighlightStyles, InlayId};
use collections::BTreeSet;
use gpui::{Hsla, Rgba};
use language::{Chunk, Edit, Point, TextSummary};
@@ -8,13 +8,11 @@ use multi_buffer::{
use std::{
cmp,
ops::{Add, AddAssign, Range, Sub, SubAssign},
sync::Arc,
};
use sum_tree::{Bias, Cursor, SumTree};
use text::{Patch, Rope};
use ui::{ActiveTheme, IntoElement as _, ParentElement as _, Styled as _, div};
use super::{Highlights, custom_highlights::CustomHighlightsChunks, fold_map::ChunkRendererId};
use super::{Highlights, custom_highlights::CustomHighlightsChunks};
/// Decides where the [`Inlay`]s should be displayed.
///
@@ -254,13 +252,6 @@ pub struct InlayChunks<'a> {
snapshot: &'a InlaySnapshot,
}
#[derive(Clone)]
pub struct InlayChunk<'a> {
pub chunk: Chunk<'a>,
/// Whether the inlay should be customly rendered.
pub renderer: Option<ChunkRenderer>,
}
impl InlayChunks<'_> {
pub fn seek(&mut self, new_range: Range<InlayOffset>) {
self.transforms.seek(&new_range.start, Bias::Right, &());
@@ -280,7 +271,7 @@ impl InlayChunks<'_> {
}
impl<'a> Iterator for InlayChunks<'a> {
type Item = InlayChunk<'a>;
type Item = Chunk<'a>;
fn next(&mut self) -> Option<Self::Item> {
if self.output_offset == self.max_output_offset {
@@ -305,12 +296,9 @@ impl<'a> Iterator for InlayChunks<'a> {
chunk.text = suffix;
self.output_offset.0 += prefix.len();
InlayChunk {
chunk: Chunk {
text: prefix,
..chunk.clone()
},
renderer: None,
Chunk {
text: prefix,
..chunk.clone()
}
}
Transform::Inlay(inlay) => {
@@ -325,7 +313,6 @@ impl<'a> Iterator for InlayChunks<'a> {
}
}
let mut renderer = None;
let mut highlight_style = match inlay.id {
InlayId::InlineCompletion(_) => {
self.highlight_styles.inline_completion.map(|s| {
@@ -338,31 +325,14 @@ impl<'a> Iterator for InlayChunks<'a> {
}
InlayId::Hint(_) => self.highlight_styles.inlay_hint,
InlayId::DebuggerValue(_) => self.highlight_styles.inlay_hint,
InlayId::Color(_) => {
if let Some(color) = inlay.color {
renderer = Some(ChunkRenderer {
id: ChunkRendererId::Inlay(inlay.id),
render: Arc::new(move |cx| {
div()
.relative()
.size_3p5()
.child(
div()
.absolute()
.right_1()
.size_3()
.border_1()
.border_color(cx.theme().colors().border)
.bg(color),
)
.into_any_element()
}),
constrain_width: false,
measured_width: None,
});
InlayId::Color(_) => match inlay.color {
Some(color) => {
let mut style = self.highlight_styles.inlay_hint.unwrap_or_default();
style.color = Some(color);
Some(style)
}
self.highlight_styles.inlay_hint
}
None => self.highlight_styles.inlay_hint,
},
};
let next_inlay_highlight_endpoint;
let offset_in_inlay = self.output_offset - self.transforms.start().0;
@@ -400,14 +370,11 @@ impl<'a> Iterator for InlayChunks<'a> {
self.output_offset.0 += chunk.len();
InlayChunk {
chunk: Chunk {
text: chunk,
highlight_style,
is_inlay: true,
..Chunk::default()
},
renderer,
Chunk {
text: chunk,
highlight_style,
is_inlay: true,
..Default::default()
}
}
};
@@ -1099,7 +1066,7 @@ impl InlaySnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(Default::default()..self.len(), false, Highlights::default())
.map(|chunk| chunk.chunk.text)
.map(|chunk| chunk.text)
.collect()
}
@@ -1737,7 +1704,7 @@ mod tests {
..Highlights::default()
},
)
.map(|chunk| chunk.chunk.text)
.map(|chunk| chunk.text)
.collect::<String>();
assert_eq!(
actual_text,

View File

@@ -547,7 +547,6 @@ pub enum SoftWrap {
#[derive(Clone)]
pub struct EditorStyle {
pub background: Hsla,
pub border: Hsla,
pub local_player: PlayerColor,
pub text: TextStyle,
pub scrollbar_width: Pixels,
@@ -563,7 +562,6 @@ impl Default for EditorStyle {
fn default() -> Self {
Self {
background: Hsla::default(),
border: Hsla::default(),
local_player: PlayerColor::default(),
text: TextStyle::default(),
scrollbar_width: Pixels::default(),
@@ -11541,90 +11539,66 @@ impl Editor {
let language_settings = buffer.language_settings_at(selection.head(), cx);
let language_scope = buffer.language_scope_at(selection.head());
let indent_and_prefix_for_row =
|row: u32| -> (IndentSize, Option<String>, Option<String>) {
let indent = buffer.indent_size_for_line(MultiBufferRow(row));
let (comment_prefix, rewrap_prefix) =
if let Some(language_scope) = &language_scope {
let indent_end = Point::new(row, indent.len);
let comment_prefix = language_scope
.line_comment_prefixes()
.iter()
.find(|prefix| buffer.contains_str_at(indent_end, prefix))
.map(|prefix| prefix.to_string());
let line_end = Point::new(row, buffer.line_len(MultiBufferRow(row)));
let line_text_after_indent = buffer
.text_for_range(indent_end..line_end)
.collect::<String>();
let rewrap_prefix = language_scope
.rewrap_prefixes()
.iter()
.find_map(|prefix_regex| {
prefix_regex.find(&line_text_after_indent).map(|mat| {
if mat.start() == 0 {
Some(mat.as_str().to_string())
} else {
None
}
})
})
.flatten();
(comment_prefix, rewrap_prefix)
} else {
(None, None)
};
(indent, comment_prefix, rewrap_prefix)
};
let mut ranges = Vec::new();
let mut current_range_start = first_row;
let from_empty_selection = selection.is_empty();
let mut current_range_start = first_row;
let mut prev_row = first_row;
let (
mut current_range_indent,
mut current_range_comment_prefix,
mut current_range_rewrap_prefix,
) = indent_and_prefix_for_row(first_row);
let mut prev_indent = buffer.indent_size_for_line(MultiBufferRow(first_row));
let mut prev_comment_prefix = if let Some(language_scope) = &language_scope {
let indent = buffer.indent_size_for_line(MultiBufferRow(first_row));
let indent_end = Point::new(first_row, indent.len);
language_scope
.line_comment_prefixes()
.iter()
.find(|prefix| buffer.contains_str_at(indent_end, prefix))
.cloned()
} else {
None
};
for row in non_blank_rows_iter.skip(1) {
let has_paragraph_break = row > prev_row + 1;
let (row_indent, row_comment_prefix, row_rewrap_prefix) =
indent_and_prefix_for_row(row);
let row_indent = buffer.indent_size_for_line(MultiBufferRow(row));
let row_comment_prefix = if let Some(language_scope) = &language_scope {
let indent = buffer.indent_size_for_line(MultiBufferRow(row));
let indent_end = Point::new(row, indent.len);
language_scope
.line_comment_prefixes()
.iter()
.find(|prefix| buffer.contains_str_at(indent_end, prefix))
.cloned()
} else {
None
};
let has_indent_change = row_indent != current_range_indent;
let has_comment_change = row_comment_prefix != current_range_comment_prefix;
let has_boundary_change = has_comment_change
|| row_rewrap_prefix.is_some()
|| (has_indent_change && current_range_comment_prefix.is_some());
let has_boundary_change =
row_indent != prev_indent || row_comment_prefix != prev_comment_prefix;
if has_paragraph_break || has_boundary_change {
ranges.push((
language_settings.clone(),
Point::new(current_range_start, 0)
..Point::new(prev_row, buffer.line_len(MultiBufferRow(prev_row))),
current_range_indent,
current_range_comment_prefix.clone(),
current_range_rewrap_prefix.clone(),
prev_indent,
prev_comment_prefix.clone(),
from_empty_selection,
));
current_range_start = row;
current_range_indent = row_indent;
current_range_comment_prefix = row_comment_prefix;
current_range_rewrap_prefix = row_rewrap_prefix;
}
prev_row = row;
prev_indent = row_indent;
prev_comment_prefix = row_comment_prefix;
}
ranges.push((
language_settings.clone(),
Point::new(current_range_start, 0)
..Point::new(prev_row, buffer.line_len(MultiBufferRow(prev_row))),
current_range_indent,
current_range_comment_prefix,
current_range_rewrap_prefix,
prev_indent,
prev_comment_prefix,
from_empty_selection,
));
@@ -11634,14 +11608,8 @@ impl Editor {
let mut edits = Vec::new();
let mut rewrapped_row_ranges = Vec::<RangeInclusive<u32>>::new();
for (
language_settings,
wrap_range,
indent_size,
comment_prefix,
rewrap_prefix,
from_empty_selection,
) in wrap_ranges
for (language_settings, wrap_range, indent_size, comment_prefix, from_empty_selection) in
wrap_ranges
{
let mut start_row = wrap_range.start.row;
let mut end_row = wrap_range.end.row;
@@ -11657,16 +11625,12 @@ impl Editor {
let tab_size = language_settings.tab_size;
let indent_prefix = indent_size.chars().collect::<String>();
let mut line_prefix = indent_prefix.clone();
let mut line_prefix = indent_size.chars().collect::<String>();
let mut inside_comment = false;
if let Some(prefix) = &comment_prefix {
line_prefix.push_str(prefix);
inside_comment = true;
}
if let Some(prefix) = &rewrap_prefix {
line_prefix.push_str(prefix);
}
let allow_rewrap_based_on_language = match language_settings.allow_rewrap {
RewrapBehavior::InComments => inside_comment,
@@ -11713,18 +11677,12 @@ impl Editor {
let selection_text = buffer.text_for_range(start..end).collect::<String>();
let Some(lines_without_prefixes) = selection_text
.lines()
.enumerate()
.map(|(ix, line)| {
let line_trimmed = line.trim_start();
if rewrap_prefix.is_some() && ix > 0 {
Ok(line_trimmed)
} else {
line_trimmed
.strip_prefix(&line_prefix.trim_start())
.with_context(|| {
format!("line did not start with prefix {line_prefix:?}: {line:?}")
})
}
.map(|line| {
line.strip_prefix(&line_prefix)
.or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
.with_context(|| {
format!("line did not start with prefix {line_prefix:?}: {line:?}")
})
})
.collect::<Result<Vec<_>, _>>()
.log_err()
@@ -11737,16 +11695,8 @@ impl Editor {
.language_settings_at(Point::new(start_row, 0), cx)
.preferred_line_length as usize
});
let subsequent_lines_prefix = if let Some(rewrap_prefix_str) = &rewrap_prefix {
format!("{}{}", indent_prefix, " ".repeat(rewrap_prefix_str.len()))
} else {
line_prefix.clone()
};
let wrapped_text = wrap_with_prefix(
line_prefix,
subsequent_lines_prefix,
lines_without_prefixes.join("\n"),
wrap_column,
tab_size,
@@ -17381,9 +17331,9 @@ impl Editor {
self.active_indent_guides_state.dirty = true;
}
pub fn update_renderer_widths(
pub fn update_fold_widths(
&mut self,
widths: impl IntoIterator<Item = (ChunkRendererId, Pixels)>,
widths: impl IntoIterator<Item = (FoldId, Pixels)>,
cx: &mut Context<Self>,
) -> bool {
self.display_map
@@ -21248,22 +21198,18 @@ fn test_word_breaking_tokenizer() {
}
fn wrap_with_prefix(
first_line_prefix: String,
subsequent_lines_prefix: String,
line_prefix: String,
unwrapped_text: String,
wrap_column: usize,
tab_size: NonZeroU32,
preserve_existing_whitespace: bool,
) -> String {
let first_line_prefix_len = char_len_with_expanded_tabs(0, &first_line_prefix, tab_size);
let subsequent_lines_prefix_len =
char_len_with_expanded_tabs(0, &subsequent_lines_prefix, tab_size);
let line_prefix_len = char_len_with_expanded_tabs(0, &line_prefix, tab_size);
let mut wrapped_text = String::new();
let mut current_line = first_line_prefix.clone();
let mut is_first_line = true;
let mut current_line = line_prefix.clone();
let tokenizer = WordBreakingTokenizer::new(&unwrapped_text);
let mut current_line_len = first_line_prefix_len;
let mut current_line_len = line_prefix_len;
let mut in_whitespace = false;
for token in tokenizer {
let have_preceding_whitespace = in_whitespace;
@@ -21273,19 +21219,13 @@ fn wrap_with_prefix(
grapheme_len,
} => {
in_whitespace = false;
let current_prefix_len = if is_first_line {
first_line_prefix_len
} else {
subsequent_lines_prefix_len
};
if current_line_len + grapheme_len > wrap_column
&& current_line_len != current_prefix_len
&& current_line_len != line_prefix_len
{
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
is_first_line = false;
current_line = subsequent_lines_prefix.clone();
current_line_len = subsequent_lines_prefix_len;
current_line.truncate(line_prefix.len());
current_line_len = line_prefix_len;
}
current_line.push_str(token);
current_line_len += grapheme_len;
@@ -21302,46 +21242,32 @@ fn wrap_with_prefix(
token = " ";
grapheme_len = 1;
}
let current_prefix_len = if is_first_line {
first_line_prefix_len
} else {
subsequent_lines_prefix_len
};
if current_line_len + grapheme_len > wrap_column {
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
is_first_line = false;
current_line = subsequent_lines_prefix.clone();
current_line_len = subsequent_lines_prefix_len;
} else if current_line_len != current_prefix_len || preserve_existing_whitespace {
current_line.truncate(line_prefix.len());
current_line_len = line_prefix_len;
} else if current_line_len != line_prefix_len || preserve_existing_whitespace {
current_line.push_str(token);
current_line_len += grapheme_len;
}
}
WordBreakToken::Newline => {
in_whitespace = true;
let current_prefix_len = if is_first_line {
first_line_prefix_len
} else {
subsequent_lines_prefix_len
};
if preserve_existing_whitespace {
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
is_first_line = false;
current_line = subsequent_lines_prefix.clone();
current_line_len = subsequent_lines_prefix_len;
current_line.truncate(line_prefix.len());
current_line_len = line_prefix_len;
} else if have_preceding_whitespace {
continue;
} else if current_line_len + 1 > wrap_column
&& current_line_len != current_prefix_len
} else if current_line_len + 1 > wrap_column && current_line_len != line_prefix_len
{
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
is_first_line = false;
current_line = subsequent_lines_prefix.clone();
current_line_len = subsequent_lines_prefix_len;
} else if current_line_len != current_prefix_len {
current_line.truncate(line_prefix.len());
current_line_len = line_prefix_len;
} else if current_line_len != line_prefix_len {
current_line.push(' ');
current_line_len += 1;
}
@@ -21359,7 +21285,6 @@ fn wrap_with_prefix(
fn test_wrap_with_prefix() {
assert_eq!(
wrap_with_prefix(
"# ".to_string(),
"# ".to_string(),
"abcdefg".to_string(),
4,
@@ -21370,7 +21295,6 @@ fn test_wrap_with_prefix() {
);
assert_eq!(
wrap_with_prefix(
"".to_string(),
"".to_string(),
"\thello world".to_string(),
8,
@@ -21381,7 +21305,6 @@ fn test_wrap_with_prefix() {
);
assert_eq!(
wrap_with_prefix(
"// ".to_string(),
"// ".to_string(),
"xx \nyy zz aa bb cc".to_string(),
12,
@@ -21392,7 +21315,6 @@ fn test_wrap_with_prefix() {
);
assert_eq!(
wrap_with_prefix(
String::new(),
String::new(),
"这是什么 \n 钢笔".to_string(),
3,
@@ -22483,7 +22405,6 @@ impl Render for Editor {
&cx.entity(),
EditorStyle {
background,
border: cx.theme().colors().border,
local_player: cx.theme().players().local(),
text: text_style,
scrollbar_width: EditorElement::SCROLLBAR_WIDTH,

View File

@@ -378,6 +378,7 @@ pub enum SnippetSortOrder {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct EditorSettingsContent {
/// Whether the cursor blinks in the editor.
///

View File

@@ -3,7 +3,7 @@ use std::sync::Arc;
use gpui::{App, FontFeatures, FontWeight};
use project::project_settings::{InlineBlameSettings, ProjectSettings};
use settings::{EditableSettingControl, Settings};
use theme::{FontFamilyCache, FontFamilyName, ThemeSettings};
use theme::{FontFamilyCache, ThemeSettings};
use ui::{
CheckboxWithLabel, ContextMenu, DropdownMenu, NumericStepper, SettingsContainer, SettingsGroup,
prelude::*,
@@ -75,7 +75,7 @@ impl EditableSettingControl for BufferFontFamilyControl {
value: Self::Value,
_cx: &App,
) {
settings.buffer_font_family = Some(FontFamilyName(value.into()));
settings.buffer_font_family = Some(value.to_string());
}
}

View File

@@ -30,7 +30,7 @@ use language::{
},
tree_sitter_python,
};
use language_settings::{Formatter, IndentGuideSettings};
use language_settings::{Formatter, FormatterList, IndentGuideSettings};
use lsp::CompletionParams;
use multi_buffer::{IndentGuide, PathKey};
use parking_lot::Mutex;
@@ -3567,7 +3567,7 @@ async fn test_indent_outdent_with_hard_tabs(cx: &mut TestAppContext) {
#[gpui::test]
fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.languages.0.extend([
settings.languages.extend([
(
"TOML".into(),
LanguageSettingsContent {
@@ -5145,7 +5145,7 @@ fn test_transpose(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_rewrap(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.languages.0.extend([
settings.languages.extend([
(
"Markdown".into(),
LanguageSettingsContent {
@@ -5210,10 +5210,6 @@ async fn test_rewrap(cx: &mut TestAppContext) {
let markdown_language = Arc::new(Language::new(
LanguageConfig {
name: "Markdown".into(),
rewrap_prefixes: vec![
regex::Regex::new("\\d+\\.\\s+").unwrap(),
regex::Regex::new("[-*+]\\s+").unwrap(),
],
..LanguageConfig::default()
},
None,
@@ -5376,82 +5372,7 @@ async fn test_rewrap(cx: &mut TestAppContext) {
A long long long line of markdown text
to wrap.ˇ
"},
markdown_language.clone(),
&mut cx,
);
// Test that rewrapping boundary works and preserves relative indent for Markdown documents
assert_rewrap(
indoc! {"
«1. This is a numbered list item that is very long and needs to be wrapped properly.
2. This is a numbered list item that is very long and needs to be wrapped properly.
- This is an unordered list item that is also very long and should not merge with the numbered item.ˇ»
"},
indoc! {"
«1. This is a numbered list item that is
very long and needs to be wrapped
properly.
2. This is a numbered list item that is
very long and needs to be wrapped
properly.
- This is an unordered list item that is
also very long and should not merge
with the numbered item.ˇ»
"},
markdown_language.clone(),
&mut cx,
);
// Test that rewrapping add indents for rewrapping boundary if not exists already.
assert_rewrap(
indoc! {"
«1. This is a numbered list item that is
very long and needs to be wrapped
properly.
2. This is a numbered list item that is
very long and needs to be wrapped
properly.
- This is an unordered list item that is
also very long and should not merge with
the numbered item.ˇ»
"},
indoc! {"
«1. This is a numbered list item that is
very long and needs to be wrapped
properly.
2. This is a numbered list item that is
very long and needs to be wrapped
properly.
- This is an unordered list item that is
also very long and should not merge
with the numbered item.ˇ»
"},
markdown_language.clone(),
&mut cx,
);
// Test that rewrapping maintain indents even when they already exists.
assert_rewrap(
indoc! {"
«1. This is a numbered list
item that is very long and needs to be wrapped properly.
2. This is a numbered list
item that is very long and needs to be wrapped properly.
- This is an unordered list item that is also very long and
should not merge with the numbered item.ˇ»
"},
indoc! {"
«1. This is a numbered list item that is
very long and needs to be wrapped
properly.
2. This is a numbered list item that is
very long and needs to be wrapped
properly.
- This is an unordered list item that is
also very long and should not merge
with the numbered item.ˇ»
"},
markdown_language.clone(),
markdown_language,
&mut cx,
);
@@ -9405,7 +9326,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
// Set rust language override and assert overridden tabsize is sent to language server
update_test_language_settings(cx, |settings| {
settings.languages.0.insert(
settings.languages.insert(
"Rust".into(),
LanguageSettingsContent {
tab_size: NonZeroU32::new(8),
@@ -9969,7 +9890,7 @@ async fn test_range_format_during_save(cx: &mut TestAppContext) {
// Set Rust language override and assert overridden tabsize is sent to language server
update_test_language_settings(cx, |settings| {
settings.languages.0.insert(
settings.languages.insert(
"Rust".into(),
LanguageSettingsContent {
tab_size: NonZeroU32::new(8),
@@ -10012,9 +9933,9 @@ async fn test_range_format_during_save(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(vec![
Formatter::LanguageServer { name: None },
]))
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
FormatterList(vec![Formatter::LanguageServer { name: None }].into()),
))
});
let fs = FakeFs::new(cx.executor());
@@ -10141,17 +10062,21 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
async fn test_multiple_formatters(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.remove_trailing_whitespace_on_save = Some(true);
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(vec![
Formatter::LanguageServer { name: None },
Formatter::CodeActions(
[
("code-action-1".into(), true),
("code-action-2".into(), true),
settings.defaults.formatter =
Some(language_settings::SelectedFormatter::List(FormatterList(
vec![
Formatter::LanguageServer { name: None },
Formatter::CodeActions(
[
("code-action-1".into(), true),
("code-action-2".into(), true),
]
.into_iter()
.collect(),
),
]
.into_iter()
.collect(),
),
]))
.into(),
)))
});
let fs = FakeFs::new(cx.executor());
@@ -10403,9 +10328,9 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(vec![
Formatter::LanguageServer { name: None },
]))
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
FormatterList(vec![Formatter::LanguageServer { name: None }].into()),
))
});
let fs = FakeFs::new(cx.executor());
@@ -14980,7 +14905,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
.unwrap();
let _fake_server = fake_servers.next().await.unwrap();
update_test_language_settings(cx, |language_settings| {
language_settings.languages.0.insert(
language_settings.languages.insert(
language_name.clone(),
LanguageSettingsContent {
tab_size: NonZeroU32::new(8),
@@ -15878,9 +15803,9 @@ fn completion_menu_entries(menu: &CompletionsMenu) -> Vec<String> {
#[gpui::test]
async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(vec![
Formatter::Prettier,
]))
settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
FormatterList(vec![Formatter::Prettier].into()),
))
});
let fs = FakeFs::new(cx.executor());

View File

@@ -12,8 +12,8 @@ use crate::{
ToggleFold,
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
display_map::{
Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins,
HighlightKey, HighlightedChunk, ToDisplayPoint,
Block, BlockContext, BlockStyle, DisplaySnapshot, EditorMargins, FoldId, HighlightKey,
HighlightedChunk, ToDisplayPoint,
},
editor_settings::{
CurrentLineHighlight, DocumentColorsRenderMode, DoubleClickInMultibuffer, Minimap,
@@ -7119,7 +7119,7 @@ pub(crate) struct LineWithInvisibles {
enum LineFragment {
Text(ShapedLine),
Element {
id: ChunkRendererId,
id: FoldId,
element: Option<AnyElement>,
size: Size<Pixels>,
len: usize,
@@ -8297,7 +8297,7 @@ impl Element for EditorElement {
window,
cx,
);
let new_renrerer_widths = line_layouts
let new_fold_widths = line_layouts
.iter()
.flat_map(|layout| &layout.fragments)
.filter_map(|fragment| {
@@ -8308,7 +8308,7 @@ impl Element for EditorElement {
}
});
if self.editor.update(cx, |editor, cx| {
editor.update_renderer_widths(new_renrerer_widths, cx)
editor.update_fold_widths(new_fold_widths, cx)
}) {
// If the fold widths have changed, we need to prepaint
// the element again to account for any changes in

View File

@@ -1307,7 +1307,7 @@ pub mod tests {
use crate::scroll::ScrollAmount;
use crate::{ExcerptRange, scroll::Autoscroll, test::editor_lsp_test_context::rust_lang};
use futures::StreamExt;
use gpui::{Context, SemanticVersion, TestAppContext, WindowHandle};
use gpui::{AppContext as _, Context, SemanticVersion, TestAppContext, WindowHandle};
use itertools::Itertools as _;
use language::{Capability, FakeLspAdapter, language_settings::AllLanguageSettingsContent};
use language::{Language, LanguageConfig, LanguageMatcher};

View File

@@ -626,7 +626,7 @@ mod jsx_tag_autoclose_tests {
};
use super::*;
use gpui::TestAppContext;
use gpui::{AppContext as _, TestAppContext};
use language::language_settings::JsxTagAutoCloseSettings;
use languages::language;
use multi_buffer::ExcerptRange;

View File

@@ -19,21 +19,18 @@ use crate::{
#[derive(Debug)]
pub(super) struct LspColorData {
buffer_colors: HashMap<BufferId, BufferColors>,
render_mode: DocumentColorsRenderMode,
}
#[derive(Debug, Default)]
struct BufferColors {
cache_version_used: usize,
colors: Vec<(Range<Anchor>, DocumentColor, InlayId)>,
inlay_colors: HashMap<InlayId, usize>,
cache_version_used: usize,
render_mode: DocumentColorsRenderMode,
}
impl LspColorData {
pub fn new(cx: &App) -> Self {
Self {
buffer_colors: HashMap::default(),
cache_version_used: 0,
colors: Vec::new(),
inlay_colors: HashMap::default(),
render_mode: EditorSettings::get_global(cx).lsp_document_colors,
}
}
@@ -50,9 +47,8 @@ impl LspColorData {
DocumentColorsRenderMode::Inlay => Some(InlaySplice {
to_remove: Vec::new(),
to_insert: self
.buffer_colors
.colors
.iter()
.flat_map(|(_, buffer_colors)| buffer_colors.colors.iter())
.map(|(range, color, id)| {
Inlay::color(
id.id(),
@@ -67,49 +63,33 @@ impl LspColorData {
})
.collect(),
}),
DocumentColorsRenderMode::None => Some(InlaySplice {
to_remove: self
.buffer_colors
.drain()
.flat_map(|(_, buffer_colors)| buffer_colors.inlay_colors)
.map(|(id, _)| id)
.collect(),
to_insert: Vec::new(),
}),
DocumentColorsRenderMode::None => {
self.colors.clear();
Some(InlaySplice {
to_remove: self.inlay_colors.drain().map(|(id, _)| id).collect(),
to_insert: Vec::new(),
})
}
DocumentColorsRenderMode::Border | DocumentColorsRenderMode::Background => {
Some(InlaySplice {
to_remove: self
.buffer_colors
.iter_mut()
.flat_map(|(_, buffer_colors)| buffer_colors.inlay_colors.drain())
.map(|(id, _)| id)
.collect(),
to_remove: self.inlay_colors.drain().map(|(id, _)| id).collect(),
to_insert: Vec::new(),
})
}
}
}
fn set_colors(
&mut self,
buffer_id: BufferId,
colors: Vec<(Range<Anchor>, DocumentColor, InlayId)>,
cache_version: Option<usize>,
) -> bool {
let buffer_colors = self.buffer_colors.entry(buffer_id).or_default();
if let Some(cache_version) = cache_version {
buffer_colors.cache_version_used = cache_version;
}
if buffer_colors.colors == colors {
fn set_colors(&mut self, colors: Vec<(Range<Anchor>, DocumentColor, InlayId)>) -> bool {
if self.colors == colors {
return false;
}
buffer_colors.inlay_colors = colors
self.inlay_colors = colors
.iter()
.enumerate()
.map(|(i, (_, _, id))| (*id, i))
.collect();
buffer_colors.colors = colors;
self.colors = colors;
true
}
@@ -123,9 +103,8 @@ impl LspColorData {
{
Vec::new()
} else {
self.buffer_colors
self.colors
.iter()
.flat_map(|(_, buffer_colors)| &buffer_colors.colors)
.map(|(range, color, _)| {
let display_range = range.clone().to_display_points(snapshot);
let color = Hsla::from(Rgba {
@@ -183,9 +162,10 @@ impl Editor {
ColorFetchStrategy::IgnoreCache
} else {
ColorFetchStrategy::UseCache {
known_cache_version: self.colors.as_ref().and_then(|colors| {
Some(colors.buffer_colors.get(&buffer_id)?.cache_version_used)
}),
known_cache_version: self
.colors
.as_ref()
.map(|colors| colors.cache_version_used),
}
};
let colors_task = lsp_store.document_colors(fetch_strategy, buffer, cx)?;
@@ -221,13 +201,15 @@ impl Editor {
return;
};
let mut new_editor_colors = HashMap::default();
let mut cache_version = None;
let mut new_editor_colors = Vec::<(Range<Anchor>, DocumentColor)>::new();
for (buffer_id, colors) in all_colors {
let Some(excerpts) = editor_excerpts.get(&buffer_id) else {
continue;
};
match colors {
Ok(colors) => {
cache_version = colors.cache_version;
for color in colors.colors {
let color_start = point_from_lsp(color.lsp_range.start);
let color_end = point_from_lsp(color.lsp_range.end);
@@ -261,15 +243,8 @@ impl Editor {
continue;
};
let new_entry =
new_editor_colors.entry(buffer_id).or_insert_with(|| {
(Vec::<(Range<Anchor>, DocumentColor)>::new(), None)
});
new_entry.1 = colors.cache_version;
let new_buffer_colors = &mut new_entry.0;
let (Ok(i) | Err(i)) =
new_buffer_colors.binary_search_by(|(probe, _)| {
new_editor_colors.binary_search_by(|(probe, _)| {
probe
.start
.cmp(&color_start_anchor, &multi_buffer_snapshot)
@@ -279,7 +254,7 @@ impl Editor {
.cmp(&color_end_anchor, &multi_buffer_snapshot)
})
});
new_buffer_colors
new_editor_colors
.insert(i, (color_start_anchor..color_end_anchor, color));
break;
}
@@ -292,70 +267,45 @@ impl Editor {
editor
.update(cx, |editor, cx| {
let mut colors_splice = InlaySplice::default();
let mut new_color_inlays = Vec::with_capacity(new_editor_colors.len());
let Some(colors) = &mut editor.colors else {
return;
};
let mut updated = false;
for (buffer_id, (new_buffer_colors, new_cache_version)) in new_editor_colors {
let mut new_buffer_color_inlays =
Vec::with_capacity(new_buffer_colors.len());
let mut existing_buffer_colors = colors
.buffer_colors
.entry(buffer_id)
.or_default()
.colors
.iter()
.peekable();
for (new_range, new_color) in new_buffer_colors {
let rgba_color = Rgba {
r: new_color.color.red,
g: new_color.color.green,
b: new_color.color.blue,
a: new_color.color.alpha,
};
let mut existing_colors = colors.colors.iter().peekable();
for (new_range, new_color) in new_editor_colors {
let rgba_color = Rgba {
r: new_color.color.red,
g: new_color.color.green,
b: new_color.color.blue,
a: new_color.color.alpha,
};
loop {
match existing_buffer_colors.peek() {
Some((existing_range, existing_color, existing_inlay_id)) => {
match existing_range
.start
.cmp(&new_range.start, &multi_buffer_snapshot)
.then_with(|| {
existing_range
.end
.cmp(&new_range.end, &multi_buffer_snapshot)
}) {
cmp::Ordering::Less => {
loop {
match existing_colors.peek() {
Some((existing_range, existing_color, existing_inlay_id)) => {
match existing_range
.start
.cmp(&new_range.start, &multi_buffer_snapshot)
.then_with(|| {
existing_range
.end
.cmp(&new_range.end, &multi_buffer_snapshot)
}) {
cmp::Ordering::Less => {
colors_splice.to_remove.push(*existing_inlay_id);
existing_colors.next();
continue;
}
cmp::Ordering::Equal => {
if existing_color == &new_color {
new_color_inlays.push((
new_range,
new_color,
*existing_inlay_id,
));
} else {
colors_splice.to_remove.push(*existing_inlay_id);
existing_buffer_colors.next();
continue;
}
cmp::Ordering::Equal => {
if existing_color == &new_color {
new_buffer_color_inlays.push((
new_range,
new_color,
*existing_inlay_id,
));
} else {
colors_splice
.to_remove
.push(*existing_inlay_id);
let inlay = Inlay::color(
post_inc(&mut editor.next_color_inlay_id),
new_range.start,
rgba_color,
);
let inlay_id = inlay.id;
colors_splice.to_insert.push(inlay);
new_buffer_color_inlays
.push((new_range, new_color, inlay_id));
}
existing_buffer_colors.next();
break;
}
cmp::Ordering::Greater => {
let inlay = Inlay::color(
post_inc(&mut editor.next_color_inlay_id),
new_range.start,
@@ -363,40 +313,49 @@ impl Editor {
);
let inlay_id = inlay.id;
colors_splice.to_insert.push(inlay);
new_buffer_color_inlays
new_color_inlays
.push((new_range, new_color, inlay_id));
break;
}
existing_colors.next();
break;
}
cmp::Ordering::Greater => {
let inlay = Inlay::color(
post_inc(&mut editor.next_color_inlay_id),
new_range.start,
rgba_color,
);
let inlay_id = inlay.id;
colors_splice.to_insert.push(inlay);
new_color_inlays.push((new_range, new_color, inlay_id));
break;
}
}
None => {
let inlay = Inlay::color(
post_inc(&mut editor.next_color_inlay_id),
new_range.start,
rgba_color,
);
let inlay_id = inlay.id;
colors_splice.to_insert.push(inlay);
new_buffer_color_inlays
.push((new_range, new_color, inlay_id));
break;
}
}
None => {
let inlay = Inlay::color(
post_inc(&mut editor.next_color_inlay_id),
new_range.start,
rgba_color,
);
let inlay_id = inlay.id;
colors_splice.to_insert.push(inlay);
new_color_inlays.push((new_range, new_color, inlay_id));
break;
}
}
}
if existing_buffer_colors.peek().is_some() {
colors_splice
.to_remove
.extend(existing_buffer_colors.map(|(_, _, id)| *id));
}
updated |= colors.set_colors(
buffer_id,
new_buffer_color_inlays,
new_cache_version,
);
}
if existing_colors.peek().is_some() {
colors_splice
.to_remove
.extend(existing_colors.map(|(_, _, id)| *id));
}
let mut updated = colors.set_colors(new_color_inlays);
if let Some(cache_version) = cache_version {
colors.cache_version_used = cache_version;
}
if colors.render_mode == DocumentColorsRenderMode::Inlay
&& (!colors_splice.to_insert.is_empty()
|| !colors_splice.to_remove.is_empty())

View File

@@ -1,3 +0,0 @@
pub mod basic;
pub use basic::*;

View File

@@ -1,104 +0,0 @@
use std::{collections::HashSet, path::Path, sync::Arc};
use anyhow::Result;
use assistant_tools::{CreateFileToolInput, EditFileToolInput, ReadFileToolInput};
use async_trait::async_trait;
use buffer_diff::DiffHunkStatus;
use collections::HashMap;
use crate::example::{
Example, ExampleContext, ExampleMetadata, FileEditHunk, FileEdits, JudgeAssertion,
LanguageServer,
};
pub struct EditBasic;
#[async_trait(?Send)]
impl Example for EditBasic {
fn meta(&self) -> ExampleMetadata {
ExampleMetadata {
name: "edit_basic".to_string(),
url: "https://github.com/zed-industries/zed.git".to_string(),
revision: "58604fba86ebbffaa01f7c6834253e33bcd38c0f".to_string(),
language_server: None,
max_assertions: None,
}
}
async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> {
cx.push_user_message(format!(
r#"
Read the `crates/git/src/blame.rs` file and delete `run_git_blame`. Just that
one function, not its usages.
IMPORTANT: You are only allowed to use the `read_file` and `edit_file` tools!
"#
));
let response = cx.run_to_end().await?;
// let expected_edits = HashMap::from_iter([(
// Arc::from(Path::new("crates/git/src/blame.rs")),
// FileEdits {
// hunks: vec![
// FileEditHunk {
// base_text: " unique_shas.insert(entry.sha);\n".into(),
// text: " unique_shas.insert(entry.git_sha);\n".into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " pub sha: Oid,\n".into(),
// text: " pub git_sha: Oid,\n".into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " let sha = parts\n".into(),
// text: " let git_sha = parts\n".into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text:
// " .ok_or_else(|| anyhow!(\"failed to parse sha\"))?;\n"
// .into(),
// text:
// " .ok_or_else(|| anyhow!(\"failed to parse git_sha\"))?;\n"
// .into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " sha,\n".into(),
// text: " git_sha,\n".into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " .get(&new_entry.sha)\n".into(),
// text: " .get(&new_entry.git_sha)\n".into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " let is_committed = !entry.sha.is_zero();\n"
// .into(),
// text: " let is_committed = !entry.git_sha.is_zero();\n"
// .into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " index.insert(entry.sha, entries.len());\n"
// .into(),
// text: " index.insert(entry.git_sha, entries.len());\n"
// .into(),
// status: DiffHunkStatus::modified_none(),
// },
// FileEditHunk {
// base_text: " if !entry.sha.is_zero() {\n".into(),
// text: " if !entry.git_sha.is_zero() {\n".into(),
// status: DiffHunkStatus::modified_none(),
// },
// ],
// },
// )]);
// let actual_edits = cx.edits();
// cx.assert_eq(&actual_edits, &expected_edits, "edits don't match")?;
Ok(())
}
}

View File

@@ -1054,15 +1054,6 @@ pub fn response_events_to_markdown(
| LanguageModelCompletionEvent::StartMessage { .. }
| LanguageModelCompletionEvent::StatusUpdate { .. },
) => {}
Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
json_parse_error, ..
}) => {
flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer);
response.push_str(&format!(
"**Error**: parse error in tool use JSON: {}\n\n",
json_parse_error
));
}
Err(error) => {
flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer);
response.push_str(&format!("**Error**: {}\n\n", error));
@@ -1141,17 +1132,6 @@ impl ThreadDialog {
| Ok(LanguageModelCompletionEvent::StartMessage { .. })
| Ok(LanguageModelCompletionEvent::Stop(_)) => {}
Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
json_parse_error,
..
}) => {
flush_text(&mut current_text, &mut content);
content.push(MessageContent::Text(format!(
"ERROR: parse error in tool use JSON: {}",
json_parse_error
)));
}
Err(error) => {
flush_text(&mut current_text, &mut content);
content.push(MessageContent::Text(format!("ERROR: {}", error)));

View File

@@ -8,7 +8,7 @@ use collections::{BTreeMap, HashSet};
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs, RealFs};
use futures::{AsyncReadExt, StreamExt, io::BufReader};
use gpui::{SemanticVersion, TestAppContext};
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
use http_client::{FakeHttpClient, Response};
use language::{BinaryStatus, LanguageMatcher, LanguageRegistry};
use lsp::LanguageServerName;

View File

@@ -65,7 +65,6 @@ actions!(
#[derive(Clone, Debug, Default, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = git, deprecated_aliases = ["editor::RevertFile"])]
#[serde(deny_unknown_fields)]
pub struct RestoreFile {
#[serde(default)]
pub skip_prompt: bool,

View File

@@ -388,7 +388,6 @@ pub(crate) fn commit_message_editor(
commit_editor.set_collaboration_hub(Box::new(project));
commit_editor.set_use_autoclose(false);
commit_editor.set_show_gutter(false, cx);
commit_editor.set_use_modal_editing(true);
commit_editor.set_show_wrap_guides(false, cx);
commit_editor.set_show_indent_guides(false, cx);
let placeholder = placeholder.unwrap_or("Enter commit message".into());

View File

@@ -12,7 +12,7 @@ license = "Apache-2.0"
workspace = true
[features]
default = ["http_client", "font-kit", "wayland", "x11", "windows-manifest"]
default = ["http_client", "font-kit", "wayland", "x11"]
test-support = [
"leak-detection",
"collections/test-support",
@@ -69,7 +69,7 @@ x11 = [
"open",
"scap",
]
windows-manifest = []
[lib]
path = "src/gpui.rs"

View File

@@ -17,7 +17,7 @@ fn main() {
#[cfg(target_os = "macos")]
macos::build();
}
#[cfg(all(target_os = "windows", feature = "windows-manifest"))]
#[cfg(target_os = "windows")]
Ok("windows") => {
let manifest = std::path::Path::new("resources/windows/gpui.manifest.xml");
let rc_file = std::path::Path::new("resources/windows/gpui.rc");

View File

@@ -125,7 +125,9 @@ pub trait Action: Any + Send {
Self: Sized;
/// Optional JSON schema for the action's input data.
fn action_json_schema(_: &mut schemars::SchemaGenerator) -> Option<schemars::Schema>
fn action_json_schema(
_: &mut schemars::r#gen::SchemaGenerator,
) -> Option<schemars::schema::Schema>
where
Self: Sized,
{
@@ -236,7 +238,7 @@ impl Default for ActionRegistry {
struct ActionData {
pub build: ActionBuilder,
pub json_schema: fn(&mut schemars::SchemaGenerator) -> Option<schemars::Schema>,
pub json_schema: fn(&mut schemars::r#gen::SchemaGenerator) -> Option<schemars::schema::Schema>,
}
/// This type must be public so that our macros can build it in other crates.
@@ -251,7 +253,7 @@ pub struct MacroActionData {
pub name: &'static str,
pub type_id: TypeId,
pub build: ActionBuilder,
pub json_schema: fn(&mut schemars::SchemaGenerator) -> Option<schemars::Schema>,
pub json_schema: fn(&mut schemars::r#gen::SchemaGenerator) -> Option<schemars::schema::Schema>,
pub deprecated_aliases: &'static [&'static str],
pub deprecation_message: Option<&'static str>,
}
@@ -355,8 +357,8 @@ impl ActionRegistry {
pub fn action_schemas(
&self,
generator: &mut schemars::SchemaGenerator,
) -> Vec<(&'static str, Option<schemars::Schema>)> {
generator: &mut schemars::r#gen::SchemaGenerator,
) -> Vec<(&'static str, Option<schemars::schema::Schema>)> {
// Use the order from all_names so that the resulting schema has sensible order.
self.all_names
.iter()

View File

@@ -1069,7 +1069,7 @@ impl App {
}
}
/// Obtains a reference to the background executor, which can be used to spawn futures.
/// Obtains a reference to the executor, which can be used to spawn futures.
pub fn background_executor(&self) -> &BackgroundExecutor {
&self.background_executor
}
@@ -1334,11 +1334,6 @@ impl App {
self.pending_effects.push_back(Effect::RefreshWindows);
}
/// Get all key bindings in the app.
pub fn key_bindings(&self) -> Rc<RefCell<Keymap>> {
self.keymap.clone()
}
/// Register a global listener for actions invoked via the keyboard.
pub fn on_action<A: Action>(&mut self, listener: impl Fn(&A, &mut Self) + 'static) {
self.global_action_listeners
@@ -1393,8 +1388,8 @@ impl App {
/// Get all non-internal actions that have been registered, along with their schemas.
pub fn action_schemas(
&self,
generator: &mut schemars::SchemaGenerator,
) -> Vec<(&'static str, Option<schemars::Schema>)> {
generator: &mut schemars::r#gen::SchemaGenerator,
) -> Vec<(&'static str, Option<schemars::schema::Schema>)> {
self.actions.action_schemas(generator)
}

View File

@@ -178,14 +178,7 @@ impl TestAppContext {
&self.foreground_executor
}
/// Builds an entity that is owned by the application.
///
/// The given function will be invoked with a [`Context`] and must return an object representing the entity. An
/// [`Entity`] handle will be returned, which can be used to access the entity in a context.
pub fn new<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut Context<T>) -> T,
) -> Entity<T> {
fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T> {
let mut cx = self.app.borrow_mut();
cx.new(build_entity)
}

View File

@@ -1,10 +1,9 @@
use anyhow::{Context as _, bail};
use schemars::{JsonSchema, json_schema};
use schemars::{JsonSchema, SchemaGenerator, schema::Schema};
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{self, Visitor},
};
use std::borrow::Cow;
use std::{
fmt::{self, Display, Formatter},
hash::{Hash, Hasher},
@@ -100,14 +99,22 @@ impl Visitor<'_> for RgbaVisitor {
}
impl JsonSchema for Rgba {
fn schema_name() -> Cow<'static, str> {
"Rgba".into()
fn schema_name() -> String {
"Rgba".to_string()
}
fn json_schema(_generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
json_schema!({
"type": "string",
"pattern": "^#([0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$"
fn json_schema(_generator: &mut SchemaGenerator) -> Schema {
use schemars::schema::{InstanceType, SchemaObject, StringValidation};
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::String.into()),
string: Some(Box::new(StringValidation {
pattern: Some(
r"^#([0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$".to_string(),
),
..Default::default()
})),
..Default::default()
})
}
}
@@ -622,11 +629,11 @@ impl From<Rgba> for Hsla {
}
impl JsonSchema for Hsla {
fn schema_name() -> Cow<'static, str> {
fn schema_name() -> String {
Rgba::schema_name()
}
fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
fn json_schema(generator: &mut SchemaGenerator) -> Schema {
Rgba::json_schema(generator)
}
}

View File

@@ -613,10 +613,10 @@ pub trait InteractiveElement: Sized {
/// Track the focus state of the given focus handle on this element.
/// If the focus handle is focused by the application, this element will
/// apply its focused styles.
fn track_focus(mut self, focus_handle: &FocusHandle) -> Self {
fn track_focus(mut self, focus_handle: &FocusHandle) -> FocusableWrapper<Self> {
self.interactivity().focusable = true;
self.interactivity().tracked_focus_handle = Some(focus_handle.clone());
self
FocusableWrapper { element: self }
}
/// Set the keymap context for this element. This will be used to determine
@@ -980,35 +980,15 @@ pub trait InteractiveElement: Sized {
self.interactivity().block_mouse_except_scroll();
self
}
/// Set the given styles to be applied when this element, specifically, is focused.
/// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`].
fn focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
where
Self: Sized,
{
self.interactivity().focus_style = Some(Box::new(f(StyleRefinement::default())));
self
}
/// Set the given styles to be applied when this element is inside another element that is focused.
/// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`].
fn in_focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
where
Self: Sized,
{
self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default())));
self
}
}
/// A trait for elements that want to use the standard GPUI interactivity features
/// that require state.
pub trait StatefulInteractiveElement: InteractiveElement {
/// Set this element to focusable.
fn focusable(mut self) -> Self {
fn focusable(mut self) -> FocusableWrapper<Self> {
self.interactivity().focusable = true;
self
FocusableWrapper { element: self }
}
/// Set the overflow x and y to scroll.
@@ -1138,6 +1118,27 @@ pub trait StatefulInteractiveElement: InteractiveElement {
}
}
/// A trait for providing focus related APIs to interactive elements
pub trait FocusableElement: InteractiveElement {
/// Set the given styles to be applied when this element, specifically, is focused.
fn focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
where
Self: Sized,
{
self.interactivity().focus_style = Some(Box::new(f(StyleRefinement::default())));
self
}
/// Set the given styles to be applied when this element is inside another element that is focused.
fn in_focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
where
Self: Sized,
{
self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default())));
self
}
}
pub(crate) type MouseDownListener =
Box<dyn Fn(&MouseDownEvent, DispatchPhase, &Hitbox, &mut Window, &mut App) + 'static>;
pub(crate) type MouseUpListener =
@@ -2776,6 +2777,126 @@ impl GroupHitboxes {
}
}
/// A wrapper around an element that can be focused.
pub struct FocusableWrapper<E> {
/// The element that is focusable
pub element: E,
}
impl<E: InteractiveElement> FocusableElement for FocusableWrapper<E> {}
impl<E> InteractiveElement for FocusableWrapper<E>
where
E: InteractiveElement,
{
fn interactivity(&mut self) -> &mut Interactivity {
self.element.interactivity()
}
}
impl<E: StatefulInteractiveElement> StatefulInteractiveElement for FocusableWrapper<E> {}
impl<E> Styled for FocusableWrapper<E>
where
E: Styled,
{
fn style(&mut self) -> &mut StyleRefinement {
self.element.style()
}
}
impl FocusableWrapper<Div> {
/// Add a listener to be called when the children of this `Div` are prepainted.
/// This allows you to store the [`Bounds`] of the children for later use.
pub fn on_children_prepainted(
mut self,
listener: impl Fn(Vec<Bounds<Pixels>>, &mut Window, &mut App) + 'static,
) -> Self {
self.element = self.element.on_children_prepainted(listener);
self
}
}
impl<E> Element for FocusableWrapper<E>
where
E: Element,
{
type RequestLayoutState = E::RequestLayoutState;
type PrepaintState = E::PrepaintState;
fn id(&self) -> Option<ElementId> {
self.element.id()
}
fn source_location(&self) -> Option<&'static core::panic::Location<'static>> {
self.element.source_location()
}
fn request_layout(
&mut self,
id: Option<&GlobalElementId>,
inspector_id: Option<&InspectorElementId>,
window: &mut Window,
cx: &mut App,
) -> (LayoutId, Self::RequestLayoutState) {
self.element.request_layout(id, inspector_id, window, cx)
}
fn prepaint(
&mut self,
id: Option<&GlobalElementId>,
inspector_id: Option<&InspectorElementId>,
bounds: Bounds<Pixels>,
state: &mut Self::RequestLayoutState,
window: &mut Window,
cx: &mut App,
) -> E::PrepaintState {
self.element
.prepaint(id, inspector_id, bounds, state, window, cx)
}
fn paint(
&mut self,
id: Option<&GlobalElementId>,
inspector_id: Option<&InspectorElementId>,
bounds: Bounds<Pixels>,
request_layout: &mut Self::RequestLayoutState,
prepaint: &mut Self::PrepaintState,
window: &mut Window,
cx: &mut App,
) {
self.element.paint(
id,
inspector_id,
bounds,
request_layout,
prepaint,
window,
cx,
)
}
}
impl<E> IntoElement for FocusableWrapper<E>
where
E: IntoElement,
{
type Element = E::Element;
fn into_element(self) -> Self::Element {
self.element.into_element()
}
}
impl<E> ParentElement for FocusableWrapper<E>
where
E: ParentElement,
{
fn extend(&mut self, elements: impl IntoIterator<Item = AnyElement>) {
self.element.extend(elements)
}
}
/// A wrapper around an element that can store state, produced after assigning an ElementId.
pub struct Stateful<E> {
pub(crate) element: E,
@@ -2806,6 +2927,8 @@ where
}
}
impl<E: FocusableElement> FocusableElement for Stateful<E> {}
impl<E> Element for Stateful<E>
where
E: Element,

View File

@@ -25,7 +25,7 @@ use std::{
use thiserror::Error;
use util::ResultExt;
use super::{Stateful, StatefulInteractiveElement};
use super::{FocusableElement, Stateful, StatefulInteractiveElement};
/// The delay before showing the loading state.
pub const LOADING_DELAY: Duration = Duration::from_millis(200);
@@ -509,6 +509,8 @@ impl IntoElement for Img {
}
}
impl FocusableElement for Img {}
impl StatefulInteractiveElement for Img {}
impl ImageSource {

View File

@@ -10,8 +10,8 @@
use crate::{
AnyElement, App, AvailableSpace, Bounds, ContentMask, DispatchPhase, Edges, Element, EntityId,
FocusHandle, GlobalElementId, Hitbox, HitboxBehavior, InspectorElementId, IntoElement,
Overflow, Pixels, Point, ScrollDelta, ScrollWheelEvent, Size, Style, StyleRefinement, Styled,
Window, point, px, size,
Overflow, Pixels, Point, ScrollWheelEvent, Size, Style, StyleRefinement, Styled, Window, point,
px, size,
};
use collections::VecDeque;
use refineable::Refineable as _;
@@ -962,15 +962,12 @@ impl Element for List {
let height = bounds.size.height;
let scroll_top = prepaint.layout.scroll_top;
let hitbox_id = prepaint.hitbox.id;
let mut accumulated_scroll_delta = ScrollDelta::default();
window.on_mouse_event(move |event: &ScrollWheelEvent, phase, window, cx| {
if phase == DispatchPhase::Bubble && hitbox_id.should_handle_scroll(window) {
accumulated_scroll_delta = accumulated_scroll_delta.coalesce(event.delta);
let pixel_delta = accumulated_scroll_delta.pixel_delta(px(20.));
list_state.0.borrow_mut().scroll(
&scroll_top,
height,
pixel_delta,
event.delta.pixel_delta(px(20.)),
current_view,
window,
cx,

View File

@@ -95,13 +95,6 @@ where
.spawn(self.log_tracked_err(*location))
.detach();
}
/// Convert a Task<Result<T, E>> to a Task<()> that logs all errors.
pub fn log_err_in_task(self, cx: &App) -> Task<Option<T>> {
let location = core::panic::Location::caller();
cx.foreground_executor()
.spawn(async move { self.log_tracked_err(*location).await })
}
}
impl<T> Future for Task<T> {

Some files were not shown because too many files have changed in this diff Show More