Compare commits

..

32 Commits

Author SHA1 Message Date
Nathan Sobo
c240f876b1 Restructure agent2 modules 2025-07-01 09:15:34 -06:00
Nathan Sobo
b076ff99ef Get tests passing again after merging 2025-07-01 07:45:42 -06:00
Nathan Sobo
36c173e3e2 Merge remote-tracking branch 'origin/main' into test-driven-agent 2025-07-01 06:42:02 -06:00
Nathan Sobo
8573b3a84b WIP 2025-04-30 11:08:53 -06:00
Nathan Sobo
5e70235794 Checkpoint: Start on glob tool with a dynamic tool description
Specify the project roots in the tool. Still very much need to test
this.
2025-04-27 00:24:18 -06:00
Nathan Sobo
9e9192f6a3 Merge branch 'streaming-edits' into test-driven-agent 2025-04-26 22:22:01 -06:00
Antonio Scandurra
936972d9b0 Run evals in parallel 2025-04-26 13:35:54 +02:00
Antonio Scandurra
e9533423db Checkpoint 2025-04-26 12:49:28 +02:00
Antonio Scandurra
ba480295c1 WIP: working test_remove_function in a loop 2025-04-25 21:39:55 +02:00
Antonio Scandurra
9106f4495b Introduce a new EditParser struct 2025-04-25 21:18:01 +02:00
Antonio Scandurra
1feb1296fe WIP 2025-04-25 11:32:12 +02:00
Antonio Scandurra
582a247922 Merge remote-tracking branch 'origin/main' into streaming-edits 2025-04-25 09:41:40 +02:00
Antonio Scandurra
c2881a4537 Add a new eval for edits 2025-04-25 09:41:33 +02:00
Nathan Sobo
b4744750da 💄 2025-04-23 22:00:41 -06:00
Nathan Sobo
6edc255158 WIP: Tests passing decently reliably 2025-04-23 21:49:05 -06:00
Nathan Sobo
a96a1b1339 Teast streaming tool use 2025-04-23 21:42:34 -06:00
Nathan Sobo
73cee468ed Start an assistant message if needed to avoid potential panics
If the model is well behaved it won't happen, but I don't want us to
panic if they mess up.
2025-04-23 20:50:06 -06:00
Nathan Sobo
1f06615da2 Introduce LanguageModelToolUse::raw_input to enable alternative
streaming solutions at the app layer
2025-04-23 20:08:09 -06:00
Nathan Sobo
c1773f7281 WIP 2025-04-23 19:39:27 -06:00
Nathan Sobo
6c6b1ba3bc Merge remote-tracking branch 'origin/main' into test-driven-agent 2025-04-23 19:10:32 -06:00
Nathan Sobo
a23d9328ce WIP 2025-04-23 08:41:28 -06:00
Nathan Sobo
5796a2663b Streamline tool implementation
Auto-implement an object-safe AnyTool trait for any T that implements
Tool.
2025-04-23 00:41:16 -06:00
Nathan Sobo
447eb8e1c9 Checkpoint 2025-04-21 07:08:03 -06:00
Nathan Sobo
e434117018 Checkpoint: Still a failing test for concurrent tool calls.
Seems like I'm surfacing a bug in Anthropic.
2025-04-20 20:50:20 -06:00
Nathan Sobo
36271b79b3 Failing test proving we need to batch tools per message 2025-04-20 19:04:37 -06:00
Nathan Sobo
41644a53cc Checkpoint 2025-04-20 17:56:42 -06:00
Nathan Sobo
08a9c4af09 Checkpoint 2025-04-20 17:54:33 -06:00
Nathan Sobo
3187f28405 Checkpoint 2025-04-20 17:28:44 -06:00
Nathan Sobo
101f3b100f Get a basic request/reply tested in AgentThread 2025-04-20 00:41:03 -06:00
Nathan Sobo
39c8b7bf5f Add agent_thread crate
Experimental for now, I want to try really integration testing it
against the real APIs in a more "eval style", meaning embrace the
stochastic nature of it.
2025-04-20 00:17:38 -06:00
Nathan Sobo
08b41252f6 Include role in start message 2025-04-20 00:16:49 -06:00
Nathan Sobo
152bbca238 Add gpui helpers 2025-04-20 00:16:08 -06:00
335 changed files with 6346 additions and 8499 deletions

View File

@@ -30,7 +30,6 @@ jobs:
run_tests: ${{ steps.filter.outputs.run_tests }}
run_license: ${{ steps.filter.outputs.run_license }}
run_docs: ${{ steps.filter.outputs.run_docs }}
run_nix: ${{ steps.filter.outputs.run_nix }}
runs-on:
- ubuntu-latest
steps:
@@ -70,12 +69,6 @@ jobs:
else
echo "run_license=false" >> $GITHUB_OUTPUT
fi
NIX_REGEX='^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)'
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep "$NIX_REGEX") ]]; then
echo "run_nix=true" >> $GITHUB_OUTPUT
else
echo "run_nix=false" >> $GITHUB_OUTPUT
fi
migration_checks:
name: Check Postgres and Protobuf migrations, mergability
@@ -753,10 +746,7 @@ jobs:
nix-build:
name: Build with Nix
uses: ./.github/workflows/nix.yml
needs: [job_spec]
if: github.repository_owner == 'zed-industries' &&
(contains(github.event.pull_request.labels.*.name, 'run-nix') ||
needs.job_spec.outputs.run_nix == 'true')
if: github.repository_owner == 'zed-industries' && contains(github.event.pull_request.labels.*.name, 'run-nix')
secrets: inherit
with:
flake-output: debug

58
Cargo.lock generated
View File

@@ -107,6 +107,39 @@ dependencies = [
"zstd",
]
[[package]]
name = "agent2"
version = "0.1.0"
dependencies = [
"anyhow",
"assistant_tool",
"assistant_tools",
"chrono",
"client",
"collections",
"ctor",
"env_logger 0.11.8",
"fs",
"futures 0.3.31",
"gpui",
"gpui_tokio",
"handlebars 4.5.0",
"language_model",
"language_models",
"parking_lot",
"project",
"reqwest_client",
"rust-embed",
"schemars",
"serde",
"serde_json",
"settings",
"smol",
"thiserror 2.0.12",
"util",
"worktree",
]
[[package]]
name = "agent_settings"
version = "0.1.0"
@@ -2076,7 +2109,7 @@ dependencies = [
[[package]]
name = "blade-graphics"
version = "0.6.0"
source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
dependencies = [
"ash",
"ash-window",
@@ -2109,7 +2142,7 @@ dependencies = [
[[package]]
name = "blade-macros"
version = "0.3.0"
source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
dependencies = [
"proc-macro2",
"quote",
@@ -2119,7 +2152,7 @@ dependencies = [
[[package]]
name = "blade-util"
version = "0.2.0"
source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
dependencies = [
"blade-graphics",
"bytemuck",
@@ -4147,8 +4180,6 @@ dependencies = [
"async-trait",
"collections",
"dap",
"dotenvy",
"fs",
"futures 0.3.31",
"gpui",
"json_dotpath",
@@ -4677,6 +4708,12 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "dotenv"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
[[package]]
name = "dotenvy"
version = "0.15.7"
@@ -4830,7 +4867,6 @@ dependencies = [
"tree-sitter-python",
"tree-sitter-rust",
"tree-sitter-typescript",
"tree-sitter-yaml",
"ui",
"unicode-script",
"unicode-segmentation",
@@ -5111,7 +5147,7 @@ dependencies = [
"collections",
"debug_adapter_extension",
"dirs 4.0.0",
"dotenvy",
"dotenv",
"env_logger 0.11.8",
"extension",
"fs",
@@ -12259,7 +12295,6 @@ dependencies = [
"language",
"log",
"lsp",
"markdown",
"node_runtime",
"parking_lot",
"pathdiff",
@@ -14569,7 +14604,6 @@ dependencies = [
name = "settings_ui"
version = "0.1.0"
dependencies = [
"anyhow",
"collections",
"command_palette",
"command_palette_hooks",
@@ -14580,7 +14614,6 @@ dependencies = [
"fs",
"fuzzy",
"gpui",
"language",
"log",
"menu",
"paths",
@@ -14590,8 +14623,6 @@ dependencies = [
"serde",
"settings",
"theme",
"tree-sitter-json",
"tree-sitter-rust",
"ui",
"util",
"workspace",
@@ -17348,7 +17379,6 @@ dependencies = [
"rand 0.8.5",
"regex",
"rust-embed",
"schemars",
"serde",
"serde_json",
"serde_json_lenient",
@@ -19946,7 +19976,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.195.0"
version = "0.194.0"
dependencies = [
"activity_indicator",
"agent",

View File

@@ -4,6 +4,7 @@ members = [
"crates/activity_indicator",
"crates/agent_ui",
"crates/agent",
"crates/agent2",
"crates/agent_settings",
"crates/anthropic",
"crates/askpass",
@@ -425,9 +426,9 @@ aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
base64 = "0.22"
bitflags = "2.6.0"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
blade-util = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
blade-util = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
blake3 = "1.5.3"
bytes = "1.0"
cargo_metadata = "0.19"
@@ -449,7 +450,7 @@ dashmap = "6.0"
derive_more = "0.99.17"
dirs = "4.0"
documented = "0.9.1"
dotenvy = "0.15.0"
dotenv = "0.15.0"
ec4rs = "1.1"
emojis = "0.6.1"
env_logger = "0.11"
@@ -480,7 +481,7 @@ json_dotpath = "1.1"
jsonschema = "0.30.0"
jsonwebtoken = "9.3"
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
@@ -491,7 +492,7 @@ metal = "0.29"
moka = { version = "0.12.10", features = ["sync"] }
naga = { version = "25.0", features = ["wgsl-in"] }
nanoid = "0.4"
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
nix = "0.29"
num-format = "0.4.4"
objc = "0.2"
@@ -531,7 +532,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "951c77
"stream",
] }
rsa = "0.9.6"
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
"async-dispatcher-runtime",
] }
rust-embed = { version = "8.4", features = ["include-exclude"] }
@@ -625,7 +626,7 @@ wasmtime = { version = "29", default-features = false, features = [
wasmtime-wasi = "29"
which = "6.0.0"
workspace-hack = "0.1.0"
zed_llm_client = "= 0.8.5"
zed_llm_client = "0.8.5"
zstd = "0.11"
[workspace.dependencies.async-stripe]

View File

@@ -34,7 +34,7 @@
"ctrl-q": "zed::Quit",
"f4": "debugger::Start",
"shift-f5": "debugger::Stop",
"ctrl-shift-f5": "debugger::RerunSession",
"ctrl-shift-f5": "debugger::Restart",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
"ctrl-f11": "debugger::StepInto",
@@ -557,13 +557,6 @@
"ctrl-b": "workspace::ToggleLeftDock",
"ctrl-j": "workspace::ToggleBottomDock",
"ctrl-alt-y": "workspace::CloseAllDocks",
"ctrl-alt-0": "workspace::ResetActiveDockSize",
// For 0px parameter, uses UI font size value.
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
"ctrl-alt-=": ["workspace::IncreaseActiveDockSize", { "px": 0 }],
"ctrl-alt-)": "workspace::ResetOpenDocksSize",
"ctrl-alt-_": ["workspace::DecreaseOpenDocksSize", { "px": 0 }],
"ctrl-alt-+": ["workspace::IncreaseOpenDocksSize", { "px": 0 }],
"shift-find": "pane::DeploySearch",
"ctrl-shift-f": "pane::DeploySearch",
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
@@ -605,9 +598,7 @@
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
// or by tag:
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
"f5": "debugger::Rerun",
"ctrl-f4": "workspace::CloseActiveDock",
"ctrl-w": "workspace::CloseActiveDock"
"f5": "debugger::RerunLastSession"
}
},
{
@@ -710,13 +701,6 @@
"pagedown": "editor::ContextMenuLast"
}
},
{
"context": "Editor && showing_signature_help && !showing_completions",
"bindings": {
"up": "editor::SignatureHelpPrevious",
"down": "editor::SignatureHelpNext"
}
},
// Custom bindings
{
"bindings": {
@@ -1084,13 +1068,6 @@
"ctrl-shift-tab": "pane::ActivatePreviousItem"
}
},
{
"context": "MarkdownPreview",
"bindings": {
"pageup": "markdown::MovePageUp",
"pagedown": "markdown::MovePageDown"
}
},
{
"context": "KeymapEditor",
"use_key_equivalents": true,

View File

@@ -5,10 +5,10 @@
"bindings": {
"f4": "debugger::Start",
"shift-f5": "debugger::Stop",
"shift-cmd-f5": "debugger::RerunSession",
"shift-cmd-f5": "debugger::Restart",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
"ctrl-f11": "debugger::StepInto",
"f11": "debugger::StepInto",
"shift-f11": "debugger::StepOut",
"home": "menu::SelectFirst",
"shift-pageup": "menu::SelectFirst",
@@ -624,13 +624,6 @@
"cmd-r": "workspace::ToggleRightDock",
"cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::CloseAllDocks",
// For 0px parameter, uses UI font size value.
"ctrl-alt-0": "workspace::ResetActiveDockSize",
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
"ctrl-alt-=": ["workspace::IncreaseActiveDockSize", { "px": 0 }],
"ctrl-alt-)": "workspace::ResetOpenDocksSize",
"ctrl-alt-_": ["workspace::DecreaseOpenDocksSize", { "px": 0 }],
"ctrl-alt-+": ["workspace::IncreaseOpenDocksSize", { "px": 0 }],
"cmd-shift-f": "pane::DeploySearch",
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
"cmd-shift-t": "pane::ReopenClosedItem",
@@ -659,8 +652,7 @@
"cmd-k shift-up": "workspace::SwapPaneUp",
"cmd-k shift-down": "workspace::SwapPaneDown",
"cmd-shift-x": "zed::Extensions",
"f5": "debugger::Rerun",
"cmd-w": "workspace::CloseActiveDock"
"f5": "debugger::RerunLastSession"
}
},
{
@@ -774,13 +766,6 @@
"pagedown": "editor::ContextMenuLast"
}
},
{
"context": "Editor && showing_signature_help && !showing_completions",
"bindings": {
"up": "editor::SignatureHelpPrevious",
"down": "editor::SignatureHelpNext"
}
},
// Custom bindings
{
"use_key_equivalents": true,
@@ -1183,13 +1168,6 @@
"ctrl-shift-tab": "pane::ActivatePreviousItem"
}
},
{
"context": "MarkdownPreview",
"bindings": {
"pageup": "markdown::MovePageUp",
"pagedown": "markdown::MovePageDown"
}
},
{
"context": "KeymapEditor",
"use_key_equivalents": true,

View File

@@ -98,13 +98,6 @@
"ctrl-n": "editor::ContextMenuNext"
}
},
{
"context": "Editor && showing_signature_help && !showing_completions",
"bindings": {
"ctrl-p": "editor::SignatureHelpPrevious",
"ctrl-n": "editor::SignatureHelpNext"
}
},
{
"context": "Workspace",
"bindings": {

View File

@@ -98,13 +98,6 @@
"ctrl-n": "editor::ContextMenuNext"
}
},
{
"context": "Editor && showing_signature_help && !showing_completions",
"bindings": {
"ctrl-p": "editor::SignatureHelpPrevious",
"ctrl-n": "editor::SignatureHelpNext"
}
},
{
"context": "Workspace",
"bindings": {

View File

@@ -477,13 +477,6 @@
"ctrl-n": "editor::ShowWordCompletions"
}
},
{
"context": "vim_mode == insert && showing_signature_help && !showing_completions",
"bindings": {
"ctrl-p": "editor::SignatureHelpPrevious",
"ctrl-n": "editor::SignatureHelpNext"
}
},
{
"context": "vim_mode == replace",
"bindings": {

View File

@@ -746,6 +746,8 @@
"default_width": 380
},
"agent": {
// Version of this setting.
"version": "2",
// Whether the agent is enabled.
"enabled": true,
/// What completion mode to start new threads in, if available. Can be 'normal' or 'burn'.
@@ -1290,8 +1292,6 @@
// Whether or not selecting text in the terminal will automatically
// copy to the system clipboard.
"copy_on_select": false,
// Whether to keep the text selection after copying it to the clipboard
"keep_selection_on_copy": false,
// Whether to show the terminal button in the status bar
"button": true,
// Any key-value pairs added to this list will be added to the terminal's
@@ -1656,6 +1656,7 @@
// Different settings for specific language models.
"language_models": {
"anthropic": {
"version": "1",
"api_url": "https://api.anthropic.com"
},
"google": {
@@ -1665,6 +1666,7 @@
"api_url": "http://localhost:11434"
},
"openai": {
"version": "1",
"api_url": "https://api.openai.com/v1"
},
"open_router": {
@@ -1782,8 +1784,7 @@
// `socks5h`. `http` will be used when no scheme is specified.
//
// By default no proxy will be used, or Zed will try get proxy settings from
// environment variables. If certain hosts should not be proxied,
// set the `no_proxy` environment variable and provide a comma-separated list.
// environment variables.
//
// Examples:
// - "proxy": "socks5h://localhost:10808"

View File

@@ -31,13 +31,7 @@ use workspace::{StatusItemView, Workspace, item::ItemHandle};
const GIT_OPERATION_DELAY: Duration = Duration::from_millis(0);
actions!(
activity_indicator,
[
/// Displays error messages from language servers in the status bar.
ShowErrorMessage
]
);
actions!(activity_indicator, [ShowErrorMessage]);
pub enum Event {
ShowStatus {

View File

@@ -1,7 +1,7 @@
use std::sync::Arc;
use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings};
use assistant_tool::{AnyTool, ToolSource, ToolWorkingSet, UniqueToolName};
use assistant_tool::{Tool, ToolSource, ToolWorkingSet};
use collections::IndexMap;
use convert_case::{Case, Casing};
use fs::Fs;
@@ -72,7 +72,7 @@ impl AgentProfile {
&self.id
}
pub fn enabled_tools(&self, cx: &App) -> Vec<(UniqueToolName, AnyTool)> {
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
return Vec::new();
};
@@ -81,7 +81,7 @@ impl AgentProfile {
.read(cx)
.tools(cx)
.into_iter()
.filter(|(_, tool)| Self::is_enabled(settings, tool.source(), tool.name()))
.filter(|tool| Self::is_enabled(settings, tool.source(), tool.name()))
.collect()
}
@@ -108,10 +108,10 @@ impl AgentProfile {
#[cfg(test)]
mod tests {
use agent_settings::ContextServerPreset;
use assistant_tool::{Tool, ToolRegistry};
use assistant_tool::ToolRegistry;
use collections::IndexMap;
use gpui::SharedString;
use gpui::{AppContext, TestAppContext};
use gpui::TestAppContext;
use http_client::FakeHttpClient;
use project::Project;
use settings::{Settings, SettingsStore};
@@ -137,7 +137,7 @@ mod tests {
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|(_, tool)| tool.name())
.map(|tool| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
@@ -174,7 +174,7 @@ mod tests {
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|(_, tool)| tool.name())
.map(|tool| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
@@ -207,7 +207,7 @@ mod tests {
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|(_, tool)| tool.name())
.map(|tool| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
@@ -267,16 +267,10 @@ mod tests {
}
fn default_tool_set(cx: &mut TestAppContext) -> Entity<ToolWorkingSet> {
cx.new(|cx| {
cx.new(|_| {
let mut tool_set = ToolWorkingSet::default();
tool_set.insert(
Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")).into(),
cx,
);
tool_set.insert(
Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")).into(),
cx,
);
tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")));
tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")));
tool_set
})
}
@@ -296,8 +290,6 @@ mod tests {
}
impl Tool for FakeTool {
type Input = ();
fn name(&self) -> String {
self.name.clone()
}
@@ -316,17 +308,17 @@ mod tests {
unimplemented!()
}
fn needs_confirmation(&self, _input: &Self::Input, _cx: &App) -> bool {
fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
unimplemented!()
}
fn ui_text(&self, _input: &Self::Input) -> String {
fn ui_text(&self, _input: &serde_json::Value) -> String {
unimplemented!()
}
fn run(
self: Arc<Self>,
_input: Self::Input,
_input: serde_json::Value,
_request: Arc<language_model::LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<assistant_tool::ActionLog>,

View File

@@ -29,8 +29,6 @@ impl ContextServerTool {
}
impl Tool for ContextServerTool {
type Input = serde_json::Value;
fn name(&self) -> String {
self.tool.name.clone()
}
@@ -49,7 +47,7 @@ impl Tool for ContextServerTool {
}
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
true
}
@@ -71,13 +69,13 @@ impl Tool for ContextServerTool {
})
}
fn ui_text(&self, _input: &Self::Input) -> String {
fn ui_text(&self, _input: &serde_json::Value) -> String {
format!("Run MCP tool `{}`", self.tool.name)
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,

View File

@@ -10,10 +10,10 @@ use crate::{
};
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
use anyhow::{Result, anyhow};
use assistant_tool::{ActionLog, AnyTool, AnyToolCard, ToolWorkingSet};
use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet};
use chrono::{DateTime, Utc};
use client::{ModelRequestUsage, RequestUsage};
use collections::HashMap;
use collections::{HashMap, HashSet};
use feature_flags::{self, FeatureFlagAppExt};
use futures::{FutureExt, StreamExt as _, future::Shared};
use git::repository::DiffType;
@@ -960,14 +960,13 @@ impl Thread {
model: Arc<dyn LanguageModel>,
) -> Vec<LanguageModelRequestTool> {
if model.supports_tools() {
self.profile
.enabled_tools(cx)
resolve_tool_name_conflicts(self.profile.enabled_tools(cx).as_slice())
.into_iter()
.filter_map(|(name, tool)| {
// Skip tools that cannot be supported
let input_schema = tool.input_schema(model.tool_input_format()).ok()?;
Some(LanguageModelRequestTool {
name: name.into(),
name,
description: tool.description(),
input_schema,
})
@@ -2387,7 +2386,7 @@ impl Thread {
let tool_list = available_tools
.iter()
.map(|(name, tool)| format!("- {}: {}", name, tool.description()))
.map(|tool| format!("- {}: {}", tool.name(), tool.description()))
.collect::<Vec<_>>()
.join("\n");
@@ -2452,7 +2451,7 @@ impl Thread {
ui_text: impl Into<SharedString>,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
tool: AnyTool,
tool: Arc<dyn Tool>,
model: Arc<dyn LanguageModel>,
window: Option<AnyWindowHandle>,
cx: &mut Context<Thread>,
@@ -2468,7 +2467,7 @@ impl Thread {
tool_use_id: LanguageModelToolUseId,
request: Arc<LanguageModelRequest>,
input: serde_json::Value,
tool: AnyTool,
tool: Arc<dyn Tool>,
model: Arc<dyn LanguageModel>,
window: Option<AnyWindowHandle>,
cx: &mut Context<Thread>,
@@ -2607,7 +2606,7 @@ impl Thread {
.profile
.enabled_tools(cx)
.iter()
.map(|(name, _)| name.clone().into())
.map(|tool| tool.name())
.collect();
self.message_feedback.insert(message_id, feedback);
@@ -3145,6 +3144,85 @@ struct PendingCompletion {
_task: Task<()>,
}
/// Resolves tool name conflicts by ensuring all tool names are unique.
///
/// When multiple tools have the same name, this function applies the following rules:
/// 1. Native tools always keep their original name
/// 2. Context server tools get prefixed with their server ID and an underscore
/// 3. All tool names are truncated to MAX_TOOL_NAME_LENGTH (64 characters)
/// 4. If conflicts still exist after prefixing, the conflicting tools are filtered out
///
/// Note: This function assumes that built-in tools occur before MCP tools in the tools list.
fn resolve_tool_name_conflicts(tools: &[Arc<dyn Tool>]) -> Vec<(String, Arc<dyn Tool>)> {
fn resolve_tool_name(tool: &Arc<dyn Tool>) -> String {
let mut tool_name = tool.name();
tool_name.truncate(MAX_TOOL_NAME_LENGTH);
tool_name
}
const MAX_TOOL_NAME_LENGTH: usize = 64;
let mut duplicated_tool_names = HashSet::default();
let mut seen_tool_names = HashSet::default();
for tool in tools {
let tool_name = resolve_tool_name(tool);
if seen_tool_names.contains(&tool_name) {
debug_assert!(
tool.source() != assistant_tool::ToolSource::Native,
"There are two built-in tools with the same name: {}",
tool_name
);
duplicated_tool_names.insert(tool_name);
} else {
seen_tool_names.insert(tool_name);
}
}
if duplicated_tool_names.is_empty() {
return tools
.into_iter()
.map(|tool| (resolve_tool_name(tool), tool.clone()))
.collect();
}
tools
.into_iter()
.filter_map(|tool| {
let mut tool_name = resolve_tool_name(tool);
if !duplicated_tool_names.contains(&tool_name) {
return Some((tool_name, tool.clone()));
}
match tool.source() {
assistant_tool::ToolSource::Native => {
// Built-in tools always keep their original name
Some((tool_name, tool.clone()))
}
assistant_tool::ToolSource::ContextServer { id } => {
// Context server tools are prefixed with the context server ID, and truncated if necessary
tool_name.insert(0, '_');
if tool_name.len() + id.len() > MAX_TOOL_NAME_LENGTH {
let len = MAX_TOOL_NAME_LENGTH - tool_name.len();
let mut id = id.to_string();
id.truncate(len);
tool_name.insert_str(0, &id);
} else {
tool_name.insert_str(0, &id);
}
tool_name.truncate(MAX_TOOL_NAME_LENGTH);
if seen_tool_names.contains(&tool_name) {
log::error!("Cannot resolve tool name conflict for tool {}", tool.name());
None
} else {
Some((tool_name, tool.clone()))
}
}
}
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
@@ -3160,6 +3238,7 @@ mod tests {
use futures::future::BoxFuture;
use futures::stream::BoxStream;
use gpui::TestAppContext;
use icons::IconName;
use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider};
use language_model::{
LanguageModelCompletionError, LanguageModelName, LanguageModelProviderId,
@@ -3804,6 +3883,148 @@ fn main() {{
});
}
#[gpui::test]
fn test_resolve_tool_name_conflicts() {
use assistant_tool::{Tool, ToolSource};
assert_resolve_tool_name_conflicts(
vec![
TestTool::new("tool1", ToolSource::Native),
TestTool::new("tool2", ToolSource::Native),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
],
vec!["tool1", "tool2", "tool3"],
);
assert_resolve_tool_name_conflicts(
vec![
TestTool::new("tool1", ToolSource::Native),
TestTool::new("tool2", ToolSource::Native),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
],
vec!["tool1", "tool2", "mcp-1_tool3", "mcp-2_tool3"],
);
assert_resolve_tool_name_conflicts(
vec![
TestTool::new("tool1", ToolSource::Native),
TestTool::new("tool2", ToolSource::Native),
TestTool::new("tool3", ToolSource::Native),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
],
vec!["tool1", "tool2", "tool3", "mcp-1_tool3", "mcp-2_tool3"],
);
// Test that tool with very long name is always truncated
assert_resolve_tool_name_conflicts(
vec![TestTool::new(
"tool-with-more-then-64-characters-blah-blah-blah-blah-blah-blah-blah-blah",
ToolSource::Native,
)],
vec!["tool-with-more-then-64-characters-blah-blah-blah-blah-blah-blah-"],
);
// Test deduplication of tools with very long names, in this case the mcp server name should be truncated
assert_resolve_tool_name_conflicts(
vec![
TestTool::new("tool-with-very-very-very-long-name", ToolSource::Native),
TestTool::new(
"tool-with-very-very-very-long-name",
ToolSource::ContextServer {
id: "mcp-with-very-very-very-long-name".into(),
},
),
],
vec![
"tool-with-very-very-very-long-name",
"mcp-with-very-very-very-long-_tool-with-very-very-very-long-name",
],
);
fn assert_resolve_tool_name_conflicts(
tools: Vec<TestTool>,
expected: Vec<impl Into<String>>,
) {
let tools: Vec<Arc<dyn Tool>> = tools
.into_iter()
.map(|t| Arc::new(t) as Arc<dyn Tool>)
.collect();
let tools = resolve_tool_name_conflicts(&tools);
assert_eq!(tools.len(), expected.len());
for (i, expected_name) in expected.into_iter().enumerate() {
let expected_name = expected_name.into();
let actual_name = &tools[i].0;
assert_eq!(
actual_name, &expected_name,
"Expected '{}' got '{}' at index {}",
expected_name, actual_name, i
);
}
}
struct TestTool {
name: String,
source: ToolSource,
}
impl TestTool {
fn new(name: impl Into<String>, source: ToolSource) -> Self {
Self {
name: name.into(),
source,
}
}
}
impl Tool for TestTool {
fn name(&self) -> String {
self.name.clone()
}
fn icon(&self) -> IconName {
IconName::Ai
}
fn may_perform_edits(&self) -> bool {
false
}
fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
true
}
fn source(&self) -> ToolSource {
self.source.clone()
}
fn description(&self) -> String {
"Test tool".to_string()
}
fn ui_text(&self, _input: &serde_json::Value) -> String {
"Test tool".to_string()
}
fn run(
self: Arc<Self>,
_input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
_model: Arc<dyn LanguageModel>,
_window: Option<AnyWindowHandle>,
_cx: &mut App,
) -> assistant_tool::ToolResult {
assistant_tool::ToolResult {
output: Task::ready(Err(anyhow::anyhow!("No content"))),
card: None,
}
}
}
}
// Helper to create a model that returns errors
enum TestError {
Overloaded,

View File

@@ -537,8 +537,8 @@ impl ThreadStore {
}
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
tool_working_set.update(cx, |tool_working_set, cx| {
tool_working_set.remove(&tool_ids, cx);
tool_working_set.update(cx, |tool_working_set, _| {
tool_working_set.remove(&tool_ids);
});
}
}
@@ -569,18 +569,19 @@ impl ThreadStore {
.log_err()
{
let tool_ids = tool_working_set
.update(cx, |tool_working_set, cx| {
tool_working_set.extend(
response.tools.into_iter().map(|tool| {
Arc::new(ContextServerTool::new(
.update(cx, |tool_working_set, _| {
response
.tools
.into_iter()
.map(|tool| {
log::info!("registering context server tool: {:?}", tool.name);
tool_working_set.insert(Arc::new(ContextServerTool::new(
context_server_store.clone(),
server.id(),
tool,
))
.into()
}),
cx,
)
)))
})
.collect::<Vec<_>>()
})
.log_err();

View File

@@ -4,7 +4,7 @@ use crate::{
};
use anyhow::Result;
use assistant_tool::{
AnyTool, AnyToolCard, ToolResultContent, ToolResultOutput, ToolUseStatus, ToolWorkingSet,
AnyToolCard, Tool, ToolResultContent, ToolResultOutput, ToolUseStatus, ToolWorkingSet,
};
use collections::HashMap;
use futures::{FutureExt as _, future::Shared};
@@ -378,7 +378,7 @@ impl ToolUseState {
ui_text: impl Into<Arc<str>>,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
tool: AnyTool,
tool: Arc<dyn Tool>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
let ui_text = ui_text.into();
@@ -533,7 +533,7 @@ pub struct Confirmation {
pub input: serde_json::Value,
pub ui_text: Arc<str>,
pub request: Arc<LanguageModelRequest>,
pub tool: AnyTool,
pub tool: Arc<dyn Tool>,
}
#[derive(Debug, Clone)]

49
crates/agent2/Cargo.toml Normal file
View File

@@ -0,0 +1,49 @@
[package]
name = "agent2"
version = "0.1.0"
edition = "2021"
license = "GPL-3.0-or-later"
publish = false
[lib]
path = "src/agent2.rs"
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
assistant_tool.workspace = true
assistant_tools.workspace = true
chrono.workspace = true
collections.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true
handlebars = { workspace = true, features = ["rust-embed"] }
language_model.workspace = true
language_models.workspace = true
parking_lot.workspace = true
project.workspace = true
rust-embed.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
thiserror.workspace = true
util.workspace = true
worktree.workspace = true
[dev-dependencies]
ctor.workspace = true
client = { workspace = true, "features" = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, "features" = ["test-support"] }
gpui = { workspace = true, "features" = ["test-support"] }
gpui_tokio.workspace = true
language_model = { workspace = true, "features" = ["test-support"] }
project = { workspace = true, "features" = ["test-support"] }
reqwest_client.workspace = true
settings = { workspace = true, "features" = ["test-support"] }
worktree = { workspace = true, "features" = ["test-support"] }

1
crates/agent2/LICENSE-GPL Symbolic link
View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -0,0 +1,6 @@
mod prompts;
mod templates;
mod thread;
mod tools;
pub use thread::*;

View File

@@ -0,0 +1,29 @@
use crate::{
templates::{BaseTemplate, Template, Templates, WorktreeData},
thread::Prompt,
};
use anyhow::Result;
use gpui::{App, Entity};
use project::Project;
struct BasePrompt {
project: Entity<Project>,
}
impl Prompt for BasePrompt {
fn render(&self, templates: &Templates, cx: &App) -> Result<String> {
BaseTemplate {
os: std::env::consts::OS.to_string(),
shell: util::get_system_shell(),
worktrees: self
.project
.read(cx)
.worktrees(cx)
.map(|worktree| WorktreeData {
root_name: worktree.read(cx).root_name().to_string(),
})
.collect(),
}
.render(templates)
}
}

View File

@@ -0,0 +1,57 @@
use std::sync::Arc;
use anyhow::Result;
use handlebars::Handlebars;
use rust_embed::RustEmbed;
use serde::Serialize;
#[derive(RustEmbed)]
#[folder = "src/templates"]
#[include = "*.hbs"]
struct Assets;
pub struct Templates(Handlebars<'static>);
impl Templates {
pub fn new() -> Arc<Self> {
let mut handlebars = Handlebars::new();
handlebars.register_embed_templates::<Assets>().unwrap();
Arc::new(Self(handlebars))
}
}
pub trait Template: Sized {
const TEMPLATE_NAME: &'static str;
fn render(&self, templates: &Templates) -> Result<String>
where
Self: Serialize + Sized,
{
Ok(templates.0.render(Self::TEMPLATE_NAME, self)?)
}
}
#[derive(Serialize)]
pub struct BaseTemplate {
pub os: String,
pub shell: String,
pub worktrees: Vec<WorktreeData>,
}
impl Template for BaseTemplate {
const TEMPLATE_NAME: &'static str = "base.hbs";
}
#[derive(Serialize)]
pub struct WorktreeData {
pub root_name: String,
}
#[derive(Serialize)]
pub struct GlobTemplate {
pub project_roots: String,
}
impl Template for GlobTemplate {
const TEMPLATE_NAME: &'static str = "glob.hbs";
}

View File

@@ -0,0 +1,56 @@
You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.
## Communication
1. Be conversational but professional.
2. Refer to the USER in the second person and yourself in the first person.
3. Format your responses in markdown. Use backticks to format file, directory, function, and class names.
4. NEVER lie or make things up.
5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing.
## Tool Use
1. Make sure to adhere to the tools schema.
2. Provide every required argument.
3. DO NOT use tools to access items that are already available in the context section.
4. Use only the tools that are currently available.
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
## Searching and Reading
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
If appropriate, use tool calls to explore the current project, which contains the following root directories:
{{#each worktrees}}
- `{{root_name}}`
{{/each}}
- When providing paths to tools, the path should always begin with a path that starts with a project root directory listed above.
- When looking for symbols in the project, prefer the `grep` tool.
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
- Bias towards not asking the user for help if you can find the answer yourself.
## Fixing Diagnostics
1. Make 1-2 attempts at fixing diagnostics, then defer to the user.
2. Never simplify code you've written just to solve diagnostics. Complete, mostly correct code is more valuable than perfect code that doesn't solve the problem.
## Debugging
When debugging, only make code changes if you are certain that you can solve the problem.
Otherwise, follow debugging best practices:
1. Address the root cause instead of the symptoms.
2. Add descriptive logging statements and error messages to track variable and code state.
3. Add test functions and statements to isolate the problem.
## Calling External APIs
1. Unless explicitly requested by the user, use the best suited external APIs and packages to solve the task. There is no need to ask the user for permission.
2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file. If no such file exists or if the package is not present, use the latest version that is in your training data.
3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
## System Information
Operating System: {{os}}
Default Shell: {{shell}}

View File

@@ -0,0 +1,8 @@
Find paths on disk with glob patterns.
Assume that all glob patterns are matched in a project directory with the following entries.
{{project_roots}}
When searching with patterns that begin with literal path components, e.g. `foo/bar/**/*.rs`, be
sure to anchor them with one of the directories listed above.

420
crates/agent2/src/thread.rs Normal file
View File

@@ -0,0 +1,420 @@
use crate::templates::Templates;
use anyhow::{anyhow, Result};
use futures::{channel::mpsc, future};
use gpui::{App, Context, SharedString, Task};
use language_model::{
CompletionIntent, CompletionMode, LanguageModel, LanguageModelCompletionError,
LanguageModelCompletionEvent, LanguageModelRequest, LanguageModelRequestMessage,
LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent,
LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, Role, StopReason,
};
use schemars::{JsonSchema, Schema};
use serde::Deserialize;
use smol::stream::StreamExt;
use std::{collections::BTreeMap, sync::Arc};
use util::ResultExt;
#[derive(Debug)]
pub struct AgentMessage {
pub role: Role,
pub content: Vec<MessageContent>,
}
pub type AgentResponseEvent = LanguageModelCompletionEvent;
pub trait Prompt {
fn render(&self, prompts: &Templates, cx: &App) -> Result<String>;
}
pub struct Thread {
messages: Vec<AgentMessage>,
completion_mode: CompletionMode,
/// Holds the task that handles agent interaction until the end of the turn.
/// Survives across multiple requests as the model performs tool calls and
/// we run tools, report their results.
running_turn: Option<Task<()>>,
system_prompts: Vec<Arc<dyn Prompt>>,
tools: BTreeMap<SharedString, Arc<dyn AgentToolErased>>,
templates: Arc<Templates>,
// project: Entity<Project>,
// action_log: Entity<ActionLog>,
}
impl Thread {
pub fn new(templates: Arc<Templates>) -> Self {
Self {
messages: Vec::new(),
completion_mode: CompletionMode::Normal,
system_prompts: Vec::new(),
running_turn: None,
tools: BTreeMap::default(),
templates,
}
}
pub fn set_mode(&mut self, mode: CompletionMode) {
self.completion_mode = mode;
}
pub fn messages(&self) -> &[AgentMessage] {
&self.messages
}
pub fn add_tool(&mut self, tool: impl AgentTool) {
self.tools.insert(tool.name(), tool.erase());
}
pub fn remove_tool(&mut self, name: &str) -> bool {
self.tools.remove(name).is_some()
}
/// Sending a message results in the model streaming a response, which could include tool calls.
/// After calling tools, the model will stops and waits for any outstanding tool calls to be completed and their results sent.
/// The returned channel will report all the occurrences in which the model stops before erroring or ending its turn.
pub fn send(
&mut self,
model: Arc<dyn LanguageModel>,
content: impl Into<MessageContent>,
cx: &mut Context<Self>,
) -> mpsc::UnboundedReceiver<Result<AgentResponseEvent, LanguageModelCompletionError>> {
cx.notify();
let (events_tx, events_rx) =
mpsc::unbounded::<Result<AgentResponseEvent, LanguageModelCompletionError>>();
let system_message = self.build_system_message(cx);
self.messages.extend(system_message);
self.messages.push(AgentMessage {
role: Role::User,
content: vec![content.into()],
});
self.running_turn = Some(cx.spawn(async move |thread, cx| {
let turn_result = async {
// Perform one request, then keep looping if the model makes tool calls.
let mut completion_intent = CompletionIntent::UserPrompt;
loop {
let request = thread.update(cx, |thread, cx| {
thread.build_completion_request(completion_intent, cx)
})?;
// println!(
// "request: {}",
// serde_json::to_string_pretty(&request).unwrap()
// );
// Stream events, appending to messages and collecting up tool uses.
let mut events = model.stream_completion(request, cx).await?;
let mut tool_uses = Vec::new();
while let Some(event) = events.next().await {
match event {
Ok(event) => {
thread
.update(cx, |thread, cx| {
tool_uses.extend(thread.handle_streamed_completion_event(
event,
events_tx.clone(),
cx,
));
})
.ok();
}
Err(error) => {
events_tx.unbounded_send(Err(error)).ok();
break;
}
}
}
// If there are no tool uses, the turn is done.
if tool_uses.is_empty() {
break;
}
// If there are tool uses, wait for their results to be
// computed, then send them together in a single message on
// the next loop iteration.
let tool_results = future::join_all(tool_uses).await;
thread
.update(cx, |thread, _cx| {
thread.messages.push(AgentMessage {
role: Role::User,
content: tool_results.into_iter().map(Into::into).collect(),
});
})
.ok();
completion_intent = CompletionIntent::ToolResults;
}
Ok(())
}
.await;
if let Err(error) = turn_result {
events_tx.unbounded_send(Err(error)).ok();
}
}));
events_rx
}
pub fn build_system_message(&mut self, cx: &App) -> Option<AgentMessage> {
let mut system_message = AgentMessage {
role: Role::System,
content: Vec::new(),
};
for prompt in &self.system_prompts {
if let Some(rendered_prompt) = prompt.render(&self.templates, cx).log_err() {
system_message
.content
.push(MessageContent::Text(rendered_prompt));
}
}
(!system_message.content.is_empty()).then_some(system_message)
}
/// A helper method that's called on every streamed completion event.
/// Returns an optional tool result task, which the main agentic loop in
/// send will send back to the model when it resolves.
fn handle_streamed_completion_event(
&mut self,
event: LanguageModelCompletionEvent,
events_tx: mpsc::UnboundedSender<Result<AgentResponseEvent, LanguageModelCompletionError>>,
cx: &mut Context<Self>,
) -> Option<Task<LanguageModelToolResult>> {
use LanguageModelCompletionEvent::*;
events_tx.unbounded_send(Ok(event.clone())).ok();
match event {
Text(new_text) => self.handle_text_event(new_text, cx),
Thinking { text, signature } => {
todo!()
}
ToolUse(tool_use) => {
return self.handle_tool_use_event(tool_use, cx);
}
StartMessage { role, .. } => {
self.messages.push(AgentMessage {
role,
content: Vec::new(),
});
}
UsageUpdate(_) => {}
Stop(stop_reason) => self.handle_stop_event(stop_reason),
StatusUpdate(_completion_request_status) => {}
RedactedThinking { data } => todo!(),
ToolUseJsonParseError {
id,
tool_name,
raw_input,
json_parse_error,
} => todo!(),
}
None
}
fn handle_stop_event(&mut self, stop_reason: StopReason) {
match stop_reason {
StopReason::EndTurn | StopReason::ToolUse => {}
StopReason::MaxTokens => todo!(),
StopReason::Refusal => todo!(),
}
}
fn handle_text_event(&mut self, new_text: String, cx: &mut Context<Self>) {
let last_message = self.last_assistant_message();
if let Some(MessageContent::Text(text)) = last_message.content.last_mut() {
text.push_str(&new_text);
} else {
last_message.content.push(MessageContent::Text(new_text));
}
cx.notify();
}
fn handle_tool_use_event(
&mut self,
tool_use: LanguageModelToolUse,
cx: &mut Context<Self>,
) -> Option<Task<LanguageModelToolResult>> {
cx.notify();
let last_message = self.last_assistant_message();
// Ensure the last message ends in the current tool use
let push_new_tool_use = last_message.content.last_mut().map_or(true, |content| {
if let MessageContent::ToolUse(last_tool_use) = content {
if last_tool_use.id == tool_use.id {
*last_tool_use = tool_use.clone();
false
} else {
true
}
} else {
true
}
});
if push_new_tool_use {
last_message.content.push(tool_use.clone().into());
}
if !tool_use.is_input_complete {
return None;
}
if let Some(tool) = self.tools.get(tool_use.name.as_ref()) {
let pending_tool_result = tool.clone().run(tool_use.input, cx);
Some(cx.foreground_executor().spawn(async move {
match pending_tool_result.await {
Ok(tool_output) => LanguageModelToolResult {
tool_use_id: tool_use.id,
tool_name: tool_use.name,
is_error: false,
content: LanguageModelToolResultContent::Text(Arc::from(tool_output)),
output: None,
},
Err(error) => LanguageModelToolResult {
tool_use_id: tool_use.id,
tool_name: tool_use.name,
is_error: true,
content: LanguageModelToolResultContent::Text(Arc::from(error.to_string())),
output: None,
},
}
}))
} else {
Some(Task::ready(LanguageModelToolResult {
content: LanguageModelToolResultContent::Text(Arc::from(format!(
"No tool named {} exists",
tool_use.name
))),
tool_use_id: tool_use.id,
tool_name: tool_use.name,
is_error: true,
output: None,
}))
}
}
/// Guarantees the last message is from the assistant and returns a mutable reference.
fn last_assistant_message(&mut self) -> &mut AgentMessage {
if self
.messages
.last()
.map_or(true, |m| m.role != Role::Assistant)
{
self.messages.push(AgentMessage {
role: Role::Assistant,
content: Vec::new(),
});
}
self.messages.last_mut().unwrap()
}
fn build_completion_request(
&self,
completion_intent: CompletionIntent,
cx: &mut App,
) -> LanguageModelRequest {
LanguageModelRequest {
thread_id: None,
prompt_id: None,
intent: Some(completion_intent),
mode: Some(self.completion_mode),
messages: self.build_request_messages(),
tools: self
.tools
.values()
.filter_map(|tool| {
Some(LanguageModelRequestTool {
name: tool.name().to_string(),
description: tool.description(cx).to_string(),
input_schema: tool
.input_schema(LanguageModelToolSchemaFormat::JsonSchema)
.log_err()?,
})
})
.collect(),
tool_choice: None,
stop: Vec::new(),
temperature: None,
}
}
fn build_request_messages(&self) -> Vec<LanguageModelRequestMessage> {
self.messages
.iter()
.map(|message| LanguageModelRequestMessage {
role: message.role,
content: message.content.clone(),
cache: false,
})
.collect()
}
}
pub trait AgentTool
where
Self: 'static + Sized,
{
type Input: for<'de> Deserialize<'de> + JsonSchema;
fn name(&self) -> SharedString;
fn description(&self, _cx: &mut App) -> SharedString {
let schema = schemars::schema_for!(Self::Input);
SharedString::new(
schema
.get("description")
.and_then(|description| description.as_str())
.unwrap_or_default(),
)
}
/// Returns the JSON schema that describes the tool's input.
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Schema {
assistant_tools::root_schema_for::<Self::Input>(format)
}
/// Runs the tool with the provided input.
fn run(self: Arc<Self>, input: Self::Input, cx: &mut App) -> Task<Result<String>>;
fn erase(self) -> Arc<dyn AgentToolErased> {
Arc::new(Erased(Arc::new(self)))
}
}
pub struct Erased<T>(T);
pub trait AgentToolErased {
fn name(&self) -> SharedString;
fn description(&self, cx: &mut App) -> SharedString;
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value>;
fn run(self: Arc<Self>, input: serde_json::Value, cx: &mut App) -> Task<Result<String>>;
}
impl<T> AgentToolErased for Erased<Arc<T>>
where
T: AgentTool,
{
fn name(&self) -> SharedString {
self.0.name()
}
fn description(&self, cx: &mut App) -> SharedString {
self.0.description(cx)
}
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
Ok(serde_json::to_value(self.0.input_schema(format))?)
}
fn run(self: Arc<Self>, input: serde_json::Value, cx: &mut App) -> Task<Result<String>> {
let parsed_input: Result<T::Input> = serde_json::from_value(input).map_err(Into::into);
match parsed_input {
Ok(input) => self.0.clone().run(input, cx),
Err(error) => Task::ready(Err(anyhow!(error))),
}
}
}

View File

@@ -0,0 +1,254 @@
use super::*;
use client::{proto::language_server_prompt_request, Client, UserStore};
use fs::FakeFs;
use gpui::{AppContext, Entity, TestAppContext};
use language_model::{
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelRegistry, MessageContent, StopReason,
};
use reqwest_client::ReqwestClient;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use smol::stream::StreamExt;
use std::{sync::Arc, time::Duration};
mod test_tools;
use test_tools::*;
#[gpui::test]
async fn test_echo(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
let events = agent
.update(cx, |agent, cx| {
agent.send(model.clone(), "Testing: Reply with 'Hello'", cx)
})
.collect()
.await;
agent.update(cx, |agent, _cx| {
assert_eq!(
agent.messages.last().unwrap().content,
vec![MessageContent::Text("Hello".to_string())]
);
});
assert_eq!(stop_events(events), vec![StopReason::EndTurn]);
}
#[gpui::test]
async fn test_basic_tool_calls(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
// Test a tool call that's likely to complete *before* streaming stops.
let events = agent
.update(cx, |agent, cx| {
agent.add_tool(EchoTool);
agent.send(
model.clone(),
"Now test the echo tool with 'Hello'. Does it work? Say 'Yes' or 'No'.",
cx,
)
})
.collect()
.await;
assert_eq!(
stop_events(events),
vec![StopReason::ToolUse, StopReason::EndTurn]
);
// Test a tool calls that's likely to complete *after* streaming stops.
let events = agent
.update(cx, |agent, cx| {
agent.remove_tool(&AgentTool::name(&EchoTool));
agent.add_tool(DelayTool);
agent.send(
model.clone(),
"Now call the delay tool with 200ms. When the timer goes off, then you echo the output of the tool.",
cx,
)
})
.collect()
.await;
assert_eq!(
stop_events(events),
vec![StopReason::ToolUse, StopReason::EndTurn]
);
agent.update(cx, |agent, _cx| {
assert!(agent
.messages
.last()
.unwrap()
.content
.iter()
.any(|content| {
if let MessageContent::Text(text) = content {
text.contains("Ding")
} else {
false
}
}));
});
}
#[gpui::test]
async fn test_streaming_tool_calls(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
// Test a tool call that's likely to complete *before* streaming stops.
let mut events = agent.update(cx, |agent, cx| {
agent.add_tool(WordListTool);
agent.send(model.clone(), "Test the word_list tool.", cx)
});
let mut saw_partial_tool_use = false;
while let Some(event) = events.next().await {
if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use_event)) = event {
agent.update(cx, |agent, _cx| {
// Look for a tool use in the agent's last message
let last_content = agent.messages().last().unwrap().content.last().unwrap();
if let MessageContent::ToolUse(last_tool_use) = last_content {
assert_eq!(last_tool_use.name.as_ref(), "word_list");
if tool_use_event.is_input_complete {
last_tool_use
.input
.get("a")
.expect("'a' has streamed because input is now complete");
last_tool_use
.input
.get("g")
.expect("'g' has streamed because input is now complete");
} else {
if !last_tool_use.is_input_complete
&& last_tool_use.input.get("g").is_none()
{
saw_partial_tool_use = true;
}
}
} else {
panic!("last content should be a tool use");
}
});
}
}
assert!(
saw_partial_tool_use,
"should see at least one partially streamed tool use in the history"
);
}
#[gpui::test]
async fn test_concurrent_tool_calls(cx: &mut TestAppContext) {
let AgentTest { model, agent, .. } = setup(cx).await;
// Test concurrent tool calls with different delay times
let events = agent
.update(cx, |agent, cx| {
agent.add_tool(DelayTool);
agent.send(
model.clone(),
"Call the delay tool twice in the same message. Once with 100ms. Once with 300ms. When both timers are complete, describe the outputs.",
cx,
)
})
.collect()
.await;
let stop_reasons = stop_events(events);
if stop_reasons.len() == 2 {
assert_eq!(stop_reasons, vec![StopReason::ToolUse, StopReason::EndTurn]);
} else if stop_reasons.len() == 3 {
assert_eq!(
stop_reasons,
vec![
StopReason::ToolUse,
StopReason::ToolUse,
StopReason::EndTurn
]
);
} else {
panic!("Expected either 1 or 2 tool uses followed by end turn");
}
agent.update(cx, |agent, _cx| {
let last_message = agent.messages.last().unwrap();
let text = last_message
.content
.iter()
.filter_map(|content| {
if let MessageContent::Text(text) = content {
Some(text.as_str())
} else {
None
}
})
.collect::<String>();
assert!(text.contains("Ding"));
});
}
/// Filters out the stop events for asserting against in tests
fn stop_events(
result_events: Vec<Result<AgentResponseEvent, LanguageModelCompletionError>>,
) -> Vec<StopReason> {
result_events
.into_iter()
.filter_map(|event| match event.unwrap() {
LanguageModelCompletionEvent::Stop(stop_reason) => Some(stop_reason),
_ => None,
})
.collect()
}
struct AgentTest {
model: Arc<dyn LanguageModel>,
agent: Entity<Thread>,
}
async fn setup(cx: &mut TestAppContext) -> AgentTest {
cx.executor().allow_parking();
cx.update(settings::init);
let fs = FakeFs::new(cx.executor().clone());
// let project = Project::test(fs.clone(), [], cx).await;
// let action_log = cx.new(|_| ActionLog::new(project.clone()));
let templates = Templates::new();
let agent = cx.new(|_| Thread::new(templates));
let model = cx
.update(|cx| {
gpui_tokio::init(cx);
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
cx.set_http_client(Arc::new(http_client));
client::init_settings(cx);
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
language_model::init(client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
let models = LanguageModelRegistry::read_global(cx);
let model = models
.available_models(cx)
.find(|model| model.id().0 == "claude-3-7-sonnet-latest")
.unwrap();
let provider = models.provider(&model.provider_id()).unwrap();
let authenticated = provider.authenticate(cx);
cx.spawn(async move |cx| {
authenticated.await.unwrap();
model
})
})
.await;
AgentTest { model, agent }
}
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}

View File

@@ -0,0 +1,83 @@
use super::*;
/// A tool that echoes its input
#[derive(JsonSchema, Serialize, Deserialize)]
pub struct EchoToolInput {
/// The text to echo.
text: String,
}
pub struct EchoTool;
impl AgentTool for EchoTool {
type Input = EchoToolInput;
fn name(&self) -> SharedString {
"echo".into()
}
fn run(self: Arc<Self>, input: Self::Input, _cx: &mut App) -> Task<Result<String>> {
Task::ready(Ok(input.text))
}
}
/// A tool that waits for a specified delay
#[derive(JsonSchema, Serialize, Deserialize)]
pub struct DelayToolInput {
/// The delay in milliseconds.
ms: u64,
}
pub struct DelayTool;
impl AgentTool for DelayTool {
type Input = DelayToolInput;
fn name(&self) -> SharedString {
"delay".into()
}
fn run(self: Arc<Self>, input: Self::Input, cx: &mut App) -> Task<Result<String>>
where
Self: Sized,
{
cx.foreground_executor().spawn(async move {
smol::Timer::after(Duration::from_millis(input.ms)).await;
Ok("Ding".to_string())
})
}
}
/// A tool that takes an object with map from letters to random words starting with that letter.
/// All fiealds are required! Pass a word for every letter!
#[derive(JsonSchema, Serialize, Deserialize)]
pub struct WordListInput {
/// Provide a random word that starts with A.
a: Option<String>,
/// Provide a random word that starts with B.
b: Option<String>,
/// Provide a random word that starts with C.
c: Option<String>,
/// Provide a random word that starts with D.
d: Option<String>,
/// Provide a random word that starts with E.
e: Option<String>,
/// Provide a random word that starts with F.
f: Option<String>,
/// Provide a random word that starts with G.
g: Option<String>,
}
pub struct WordListTool;
impl AgentTool for WordListTool {
type Input = WordListInput;
fn name(&self) -> SharedString {
"word_list".into()
}
fn run(self: Arc<Self>, _input: Self::Input, _cx: &mut App) -> Task<Result<String>> {
Task::ready(Ok("ok".to_string()))
}
}

View File

@@ -0,0 +1 @@
mod glob;

View File

@@ -0,0 +1,76 @@
use anyhow::{anyhow, Result};
use gpui::{App, AppContext, Entity, SharedString, Task};
use project::Project;
use schemars::JsonSchema;
use serde::Deserialize;
use std::{path::PathBuf, sync::Arc};
use util::paths::PathMatcher;
use worktree::Snapshot as WorktreeSnapshot;
use crate::{
templates::{GlobTemplate, Template, Templates},
thread::AgentTool,
};
// Description is dynamic, see `fn description` below
#[derive(Deserialize, JsonSchema)]
struct GlobInput {
/// A POSIX glob pattern
glob: SharedString,
}
struct GlobTool {
project: Entity<Project>,
templates: Arc<Templates>,
}
impl AgentTool for GlobTool {
type Input = GlobInput;
fn name(&self) -> SharedString {
"glob".into()
}
fn description(&self, cx: &mut App) -> SharedString {
let project_roots = self
.project
.read(cx)
.worktrees(cx)
.map(|worktree| worktree.read(cx).root_name().into())
.collect::<Vec<String>>()
.join("\n");
GlobTemplate { project_roots }
.render(&self.templates)
.expect("template failed to render")
.into()
}
fn run(self: Arc<Self>, input: Self::Input, cx: &mut App) -> Task<Result<String>> {
let path_matcher = match PathMatcher::new([&input.glob]) {
Ok(matcher) => matcher,
Err(error) => return Task::ready(Err(anyhow!(error))),
};
let snapshots: Vec<WorktreeSnapshot> = self
.project
.read(cx)
.worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect();
cx.background_spawn(async move {
let paths = snapshots.iter().flat_map(|snapshot| {
let root_name = PathBuf::from(snapshot.root_name());
snapshot
.entries(false, 0)
.map(move |entry| root_name.join(&entry.path))
.filter(|path| path_matcher.is_match(&path))
});
let output = paths
.map(|path| format!("{}\n", path.display()))
.collect::<String>();
Ok(output)
})
}
}

View File

@@ -1,7 +1,9 @@
use crate::context_picker::{ContextPicker, MentionLink};
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
use crate::message_editor::{extract_message_creases, insert_message_creases};
use crate::ui::{AddedContext, AgentNotification, AgentNotificationEvent, ContextPill};
use crate::ui::{
AddedContext, AgentNotification, AgentNotificationEvent, AnimatedLabel, ContextPill,
};
use crate::{AgentPanel, ModelUsageContext};
use agent::{
ContextStore, LastRestoreCheckpoint, MessageCrease, MessageId, MessageSegment, TextThreadStore,
@@ -1024,7 +1026,6 @@ impl ActiveThread {
}
}
ThreadEvent::MessageAdded(message_id) => {
self.clear_last_error();
if let Some(rendered_message) = self.thread.update(cx, |thread, cx| {
thread.message(*message_id).map(|message| {
RenderedMessage::from_segments(
@@ -1041,7 +1042,6 @@ impl ActiveThread {
cx.notify();
}
ThreadEvent::MessageEdited(message_id) => {
self.clear_last_error();
if let Some(index) = self.messages.iter().position(|id| id == message_id) {
if let Some(rendered_message) = self.thread.update(cx, |thread, cx| {
thread.message(*message_id).map(|message| {
@@ -1818,7 +1818,7 @@ impl ActiveThread {
.my_3()
.mx_5()
.when(is_generating_stale || message.is_hidden, |this| {
this.child(LoadingLabel::new("").size(LabelSize::Small))
this.child(AnimatedLabel::new("").size(LabelSize::Small))
})
});
@@ -2584,7 +2584,7 @@ impl ActiveThread {
.size(IconSize::XSmall)
.color(Color::Muted),
)
.child(LoadingLabel::new("Thinking").size(LabelSize::Small)),
.child(AnimatedLabel::new("Thinking").size(LabelSize::Small)),
)
.child(
h_flex()
@@ -3153,7 +3153,7 @@ impl ActiveThread {
.border_color(self.tool_card_border_color(cx))
.rounded_b_lg()
.child(
LoadingLabel::new("Waiting for Confirmation").size(LabelSize::Small)
AnimatedLabel::new("Waiting for Confirmation").size(LabelSize::Small)
)
.child(
h_flex()

View File

@@ -26,8 +26,8 @@ use project::{
};
use settings::{Settings, update_settings_file};
use ui::{
ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*,
ContextMenu, Disclosure, ElevationIndex, Indicator, PopoverMenu, Scrollbar, ScrollbarState,
Switch, SwitchColor, Tooltip, prelude::*,
};
use util::ResultExt as _;
use workspace::Workspace;
@@ -172,29 +172,19 @@ impl AgentConfiguration {
.unwrap_or(false);
v_flex()
.when(is_expanded, |this| this.mb_2())
.child(
div()
.opacity(0.6)
.px_2()
.child(Divider::horizontal().color(DividerColor::Border)),
)
.py_2()
.gap_1p5()
.border_t_1()
.border_color(cx.theme().colors().border.opacity(0.6))
.child(
h_flex()
.map(|this| {
if is_expanded {
this.mt_2().mb_1()
} else {
this.my_2()
}
})
.w_full()
.gap_1()
.justify_between()
.child(
h_flex()
.id(provider_id_string.clone())
.cursor_pointer()
.px_2()
.py_0p5()
.w_full()
.justify_between()
@@ -257,16 +247,12 @@ impl AgentConfiguration {
)
}),
)
.child(
div()
.px_2()
.when(is_expanded, |parent| match configuration_view {
Some(configuration_view) => parent.child(configuration_view),
None => parent.child(Label::new(format!(
"No configuration view for {provider_name}",
))),
}),
)
.when(is_expanded, |parent| match configuration_view {
Some(configuration_view) => parent.child(configuration_view),
None => parent.child(Label::new(format!(
"No configuration view for {provider_name}",
))),
})
}
fn render_provider_configuration_section(
@@ -276,11 +262,12 @@ impl AgentConfiguration {
let providers = LanguageModelRegistry::read_global(cx).providers();
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.border_b_1()
.border_color(cx.theme().colors().border)
.child(
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.pb_0()
.mb_2p5()
.gap_0p5()
.child(Headline::new("LLM Providers"))
@@ -289,15 +276,10 @@ impl AgentConfiguration {
.color(Color::Muted),
),
)
.child(
div()
.pl(DynamicSpacing::Base08.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.children(
providers.into_iter().map(|provider| {
self.render_provider_configuration_block(&provider, cx)
}),
),
.children(
providers
.into_iter()
.map(|provider| self.render_provider_configuration_block(&provider, cx)),
)
}

View File

@@ -379,14 +379,6 @@ impl ConfigureContextServerModal {
};
self.state = State::Waiting;
let existing_server = self.context_server_store.read(cx).get_running_server(&id);
if existing_server.is_some() {
self.context_server_store.update(cx, |store, cx| {
store.stop_server(&id, cx).log_err();
});
}
let wait_for_context_server_task =
wait_for_context_server(&self.context_server_store, id.clone(), cx);
cx.spawn({
@@ -407,21 +399,13 @@ impl ConfigureContextServerModal {
})
.detach();
let settings_changed =
ProjectSettings::get_global(cx).context_servers.get(&id.0) != Some(&settings);
if settings_changed {
// When we write the settings to the file, the context server will be restarted.
workspace.update(cx, |workspace, cx| {
let fs = workspace.app_state().fs.clone();
update_settings_file::<ProjectSettings>(fs.clone(), cx, |project_settings, _| {
project_settings.context_servers.insert(id.0, settings);
});
// When we write the settings to the file, the context server will be restarted.
workspace.update(cx, |workspace, cx| {
let fs = workspace.app_state().fs.clone();
update_settings_file::<ProjectSettings>(fs.clone(), cx, |project_settings, _| {
project_settings.context_servers.insert(id.0, settings);
});
} else if let Some(existing_server) = existing_server {
self.context_server_store
.update(cx, |store, cx| store.start_server(existing_server, cx));
}
});
}
fn cancel(&mut self, _: &menu::Cancel, cx: &mut Context<Self>) {

View File

@@ -54,76 +54,42 @@ pub use ui::preview::{all_agent_previews, get_agent_preview};
actions!(
agent,
[
/// Creates a new text-based conversation thread.
NewTextThread,
/// Toggles the context picker interface for adding files, symbols, or other context.
ToggleContextPicker,
/// Toggles the navigation menu for switching between threads and views.
ToggleNavigationMenu,
/// Toggles the options menu for agent settings and preferences.
ToggleOptionsMenu,
/// Deletes the recently opened thread from history.
DeleteRecentlyOpenThread,
/// Toggles the profile selector for switching between agent profiles.
ToggleProfileSelector,
/// Removes all added context from the current conversation.
RemoveAllContext,
/// Expands the message editor to full size.
ExpandMessageEditor,
/// Opens the conversation history view.
OpenHistory,
/// Adds a context server to the configuration.
AddContextServer,
/// Removes the currently selected thread.
RemoveSelectedThread,
/// Starts a chat conversation with the agent.
Chat,
/// Starts a chat conversation with follow-up enabled.
ChatWithFollow,
/// Cycles to the next inline assist suggestion.
CycleNextInlineAssist,
/// Cycles to the previous inline assist suggestion.
CyclePreviousInlineAssist,
/// Moves focus up in the interface.
FocusUp,
/// Moves focus down in the interface.
FocusDown,
/// Moves focus left in the interface.
FocusLeft,
/// Moves focus right in the interface.
FocusRight,
/// Removes the currently focused context item.
RemoveFocusedContext,
/// Accepts the suggested context item.
AcceptSuggestedContext,
/// Opens the active thread as a markdown file.
OpenActiveThreadAsMarkdown,
/// Opens the agent diff view to review changes.
OpenAgentDiff,
/// Keeps the current suggestion or change.
Keep,
/// Rejects the current suggestion or change.
Reject,
/// Rejects all suggestions or changes.
RejectAll,
/// Keeps all suggestions or changes.
KeepAll,
/// Follows the agent's suggestions.
Follow,
/// Resets the trial upsell notification.
ResetTrialUpsell,
/// Resets the trial end upsell notification.
ResetTrialEndUpsell,
/// Continues the current thread.
ContinueThread,
/// Continues the thread with burn mode enabled.
ContinueWithBurnMode,
/// Toggles burn mode for faster responses.
ToggleBurnMode,
]
);
/// Creates a new conversation thread, optionally based on an existing thread.
#[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
#[serde(deny_unknown_fields)]
@@ -132,7 +98,6 @@ pub struct NewThread {
from_thread_id: Option<ThreadId>,
}
/// Opens the profile management interface for configuring agent tools and settings.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
#[serde(deny_unknown_fields)]

View File

@@ -18,7 +18,6 @@ use ui::{ListItem, ListItemSpacing, prelude::*};
actions!(
agent,
[
/// Toggles the language model selector dropdown.
#[action(deprecated_aliases = ["assistant::ToggleModelSelector", "assistant2::ToggleModelSelector"])]
ToggleModelSelector
]

View File

@@ -85,24 +85,16 @@ use assistant_context::{
actions!(
assistant,
[
/// Sends the current message to the assistant.
Assist,
/// Confirms and executes the entered slash command.
ConfirmCommand,
/// Copies code from the assistant's response to the clipboard.
CopyCode,
/// Cycles between user and assistant message roles.
CycleMessageRole,
/// Inserts the selected text into the active editor.
InsertIntoEditor,
/// Quotes the current selection in the assistant conversation.
QuoteSelection,
/// Splits the conversation at the current cursor position.
Split,
]
);
/// Inserts files that were dragged and dropped into the assistant conversation.
#[derive(PartialEq, Clone, Action)]
#[action(namespace = assistant, no_json, no_register)]
pub enum InsertDraggedFiles {

View File

@@ -1,5 +1,5 @@
use agent::{Thread, ThreadEvent};
use assistant_tool::{AnyTool, ToolSource};
use assistant_tool::{Tool, ToolSource};
use collections::HashMap;
use gpui::{App, Context, Entity, IntoElement, Render, Subscription, Window};
use language_model::{LanguageModel, LanguageModelToolSchemaFormat};
@@ -7,7 +7,7 @@ use std::sync::Arc;
use ui::prelude::*;
pub struct IncompatibleToolsState {
cache: HashMap<LanguageModelToolSchemaFormat, Vec<AnyTool>>,
cache: HashMap<LanguageModelToolSchemaFormat, Vec<Arc<dyn Tool>>>,
thread: Entity<Thread>,
_thread_subscription: Subscription,
}
@@ -29,7 +29,11 @@ impl IncompatibleToolsState {
}
}
pub fn incompatible_tools(&mut self, model: &Arc<dyn LanguageModel>, cx: &App) -> &[AnyTool] {
pub fn incompatible_tools(
&mut self,
model: &Arc<dyn LanguageModel>,
cx: &App,
) -> &[Arc<dyn Tool>] {
self.cache
.entry(model.tool_input_format())
.or_insert_with(|| {
@@ -38,15 +42,15 @@ impl IncompatibleToolsState {
.profile()
.enabled_tools(cx)
.iter()
.filter(|(_, tool)| tool.input_schema(model.tool_input_format()).is_err())
.map(|(_, tool)| tool.clone())
.filter(|tool| tool.input_schema(model.tool_input_format()).is_err())
.cloned()
.collect()
})
}
}
pub struct IncompatibleToolsTooltip {
pub incompatible_tools: Vec<AnyTool>,
pub incompatible_tools: Vec<Arc<dyn Tool>>,
}
impl Render for IncompatibleToolsTooltip {

View File

@@ -1,4 +1,5 @@
mod agent_notification;
mod animated_label;
mod burn_mode_tooltip;
mod context_pill;
mod onboarding_modal;
@@ -6,6 +7,7 @@ pub mod preview;
mod upsell;
pub use agent_notification::*;
pub use animated_label::*;
pub use burn_mode_tooltip::*;
pub use context_pill::*;
pub use onboarding_modal::*;

View File

@@ -1,24 +1,24 @@
use crate::prelude::*;
use gpui::{Animation, AnimationExt, FontWeight, pulsating_between};
use std::time::Duration;
use ui::prelude::*;
#[derive(IntoElement)]
pub struct LoadingLabel {
pub struct AnimatedLabel {
base: Label,
text: SharedString,
}
impl LoadingLabel {
impl AnimatedLabel {
pub fn new(text: impl Into<SharedString>) -> Self {
let text = text.into();
LoadingLabel {
AnimatedLabel {
base: Label::new(text.clone()),
text,
}
}
}
impl LabelCommon for LoadingLabel {
impl LabelCommon for AnimatedLabel {
fn size(mut self, size: LabelSize) -> Self {
self.base = self.base.size(size);
self
@@ -80,14 +80,14 @@ impl LabelCommon for LoadingLabel {
}
}
impl RenderOnce for LoadingLabel {
impl RenderOnce for AnimatedLabel {
fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
let text = self.text.clone();
self.base
.color(Color::Muted)
.with_animations(
"loading_label",
"animated-label",
vec![
Animation::new(Duration::from_secs(1)),
Animation::new(Duration::from_secs(1)).repeat(),

View File

@@ -22,7 +22,6 @@ gpui.workspace = true
icons.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
parking_lot.workspace = true
project.workspace = true
regex.workspace = true

View File

@@ -4,19 +4,25 @@ mod tool_registry;
mod tool_schema;
mod tool_working_set;
use std::{fmt, fmt::Debug, fmt::Formatter, ops::Deref, sync::Arc};
use std::fmt;
use std::fmt::Debug;
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
use anyhow::Result;
use gpui::{
AnyElement, AnyWindowHandle, App, Context, Entity, IntoElement, SharedString, Task, WeakEntity,
Window,
};
use gpui::AnyElement;
use gpui::AnyWindowHandle;
use gpui::Context;
use gpui::IntoElement;
use gpui::Window;
use gpui::{App, Entity, SharedString, Task, WeakEntity};
use icons::IconName;
use language_model::{
LanguageModel, LanguageModelImage, LanguageModelRequest, LanguageModelToolSchemaFormat,
};
use language_model::LanguageModel;
use language_model::LanguageModelImage;
use language_model::LanguageModelRequest;
use language_model::LanguageModelToolSchemaFormat;
use project::Project;
use serde::de::DeserializeOwned;
use workspace::Workspace;
pub use crate::action_log::*;
@@ -193,10 +199,7 @@ pub enum ToolSource {
}
/// A tool that can be used by a language model.
pub trait Tool: Send + Sync + 'static {
/// The input type that is accepted by the tool.
type Input: DeserializeOwned;
pub trait Tool: 'static + Send + Sync {
/// Returns the name of the tool.
fn name(&self) -> String;
@@ -213,7 +216,7 @@ pub trait Tool: Send + Sync + 'static {
/// Returns true if the tool needs the users's confirmation
/// before having permission to run.
fn needs_confirmation(&self, input: &Self::Input, cx: &App) -> bool;
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
/// Returns true if the tool may perform edits.
fn may_perform_edits(&self) -> bool;
@@ -224,18 +227,18 @@ pub trait Tool: Send + Sync + 'static {
}
/// Returns markdown to be displayed in the UI for this tool.
fn ui_text(&self, input: &Self::Input) -> String;
fn ui_text(&self, input: &serde_json::Value) -> String;
/// Returns markdown to be displayed in the UI for this tool, while the input JSON is still streaming
/// (so information may be missing).
fn still_streaming_ui_text(&self, input: &Self::Input) -> String {
fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String {
self.ui_text(input)
}
/// Runs the tool with the provided input.
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
@@ -255,199 +258,7 @@ pub trait Tool: Send + Sync + 'static {
}
}
#[derive(Clone)]
pub struct AnyTool {
inner: Arc<dyn ErasedTool>,
}
/// Copy of `Tool` where the Input type is erased.
trait ErasedTool: Send + Sync {
fn name(&self) -> String;
fn description(&self) -> String;
fn icon(&self) -> IconName;
fn source(&self) -> ToolSource;
fn may_perform_edits(&self) -> bool;
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value>;
fn ui_text(&self, input: &serde_json::Value) -> String;
fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String;
fn run(
&self,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
model: Arc<dyn LanguageModel>,
window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult;
fn deserialize_card(
&self,
output: serde_json::Value,
project: Entity<Project>,
window: &mut Window,
cx: &mut App,
) -> Option<AnyToolCard>;
}
struct ErasedToolWrapper<T: Tool> {
tool: Arc<T>,
}
impl<T: Tool> ErasedTool for ErasedToolWrapper<T> {
fn name(&self) -> String {
self.tool.name()
}
fn description(&self) -> String {
self.tool.description()
}
fn icon(&self) -> IconName {
self.tool.icon()
}
fn source(&self) -> ToolSource {
self.tool.source()
}
fn may_perform_edits(&self) -> bool {
self.tool.may_perform_edits()
}
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool {
match serde_json::from_value::<T::Input>(input.clone()) {
Ok(parsed_input) => self.tool.needs_confirmation(&parsed_input, cx),
Err(_) => true,
}
}
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
self.tool.input_schema(format)
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<T::Input>(input.clone()) {
Ok(parsed_input) => self.tool.ui_text(&parsed_input),
Err(_) => "Invalid input".to_string(),
}
}
fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<T::Input>(input.clone()) {
Ok(parsed_input) => self.tool.still_streaming_ui_text(&parsed_input),
Err(_) => "Invalid input".to_string(),
}
}
fn run(
&self,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
model: Arc<dyn LanguageModel>,
window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
match serde_json::from_value::<T::Input>(input) {
Ok(parsed_input) => self.tool.clone().run(
parsed_input,
request,
project,
action_log,
model,
window,
cx,
),
Err(err) => ToolResult::from(Task::ready(Err(err.into()))),
}
}
fn deserialize_card(
&self,
output: serde_json::Value,
project: Entity<Project>,
window: &mut Window,
cx: &mut App,
) -> Option<AnyToolCard> {
self.tool
.clone()
.deserialize_card(output, project, window, cx)
}
}
impl<T: Tool> From<Arc<T>> for AnyTool {
fn from(tool: Arc<T>) -> Self {
Self {
inner: Arc::new(ErasedToolWrapper { tool }),
}
}
}
impl AnyTool {
pub fn name(&self) -> String {
self.inner.name()
}
pub fn description(&self) -> String {
self.inner.description()
}
pub fn icon(&self) -> IconName {
self.inner.icon()
}
pub fn source(&self) -> ToolSource {
self.inner.source()
}
pub fn may_perform_edits(&self) -> bool {
self.inner.may_perform_edits()
}
pub fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool {
self.inner.needs_confirmation(input, cx)
}
pub fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
self.inner.input_schema(format)
}
pub fn ui_text(&self, input: &serde_json::Value) -> String {
self.inner.ui_text(input)
}
pub fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String {
self.inner.still_streaming_ui_text(input)
}
pub fn run(
&self,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
model: Arc<dyn LanguageModel>,
window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
self.inner
.run(input, request, project, action_log, model, window, cx)
}
pub fn deserialize_card(
&self,
output: serde_json::Value,
project: Entity<Project>,
window: &mut Window,
cx: &mut App,
) -> Option<AnyToolCard> {
self.inner.deserialize_card(output, project, window, cx)
}
}
impl Debug for AnyTool {
impl Debug for dyn Tool {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("Tool").field("name", &self.name()).finish()
}

View File

@@ -6,7 +6,7 @@ use gpui::Global;
use gpui::{App, ReadGlobal};
use parking_lot::RwLock;
use crate::{AnyTool, Tool};
use crate::Tool;
#[derive(Default, Deref, DerefMut)]
struct GlobalToolRegistry(Arc<ToolRegistry>);
@@ -15,7 +15,7 @@ impl Global for GlobalToolRegistry {}
#[derive(Default)]
struct ToolRegistryState {
tools: HashMap<Arc<str>, AnyTool>,
tools: HashMap<Arc<str>, Arc<dyn Tool>>,
}
#[derive(Default)]
@@ -48,7 +48,7 @@ impl ToolRegistry {
pub fn register_tool(&self, tool: impl Tool) {
let mut state = self.state.write();
let tool_name: Arc<str> = tool.name().into();
state.tools.insert(tool_name, Arc::new(tool).into());
state.tools.insert(tool_name, Arc::new(tool));
}
/// Unregisters the provided [`Tool`].
@@ -63,12 +63,12 @@ impl ToolRegistry {
}
/// Returns the list of tools in the registry.
pub fn tools(&self) -> Vec<AnyTool> {
pub fn tools(&self) -> Vec<Arc<dyn Tool>> {
self.state.read().tools.values().cloned().collect()
}
/// Returns the [`Tool`] with the given name.
pub fn tool(&self, name: &str) -> Option<AnyTool> {
pub fn tool(&self, name: &str) -> Option<Arc<dyn Tool>> {
self.state.read().tools.get(name).cloned()
}
}

View File

@@ -1,77 +1,39 @@
use std::borrow::Borrow;
use std::sync::Arc;
use crate::{AnyTool, ToolRegistry, ToolSource};
use collections::{HashMap, HashSet, IndexMap};
use gpui::{App, SharedString};
use util::debug_panic;
use collections::{HashMap, IndexMap};
use gpui::App;
use crate::{Tool, ToolRegistry, ToolSource};
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)]
pub struct ToolId(usize);
/// A unique identifier for a tool within a working set.
#[derive(Clone, PartialEq, Eq, Hash, Default)]
pub struct UniqueToolName(SharedString);
impl Borrow<str> for UniqueToolName {
fn borrow(&self) -> &str {
&self.0
}
}
impl From<String> for UniqueToolName {
fn from(value: String) -> Self {
UniqueToolName(SharedString::new(value))
}
}
impl Into<String> for UniqueToolName {
fn into(self) -> String {
self.0.into()
}
}
impl std::fmt::Debug for UniqueToolName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl std::fmt::Display for UniqueToolName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0.as_ref())
}
}
/// A working set of tools for use in one instance of the Assistant Panel.
#[derive(Default)]
pub struct ToolWorkingSet {
context_server_tools_by_id: HashMap<ToolId, AnyTool>,
context_server_tools_by_name: HashMap<UniqueToolName, AnyTool>,
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
next_tool_id: ToolId,
}
impl ToolWorkingSet {
pub fn tool(&self, name: &str, cx: &App) -> Option<AnyTool> {
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
self.context_server_tools_by_name
.get(name)
.cloned()
.or_else(|| ToolRegistry::global(cx).tool(name))
}
pub fn tools(&self, cx: &App) -> Vec<(UniqueToolName, AnyTool)> {
let mut tools = ToolRegistry::global(cx)
.tools()
.into_iter()
.map(|tool| (UniqueToolName(tool.name().into()), tool))
.collect::<Vec<_>>();
tools.extend(self.context_server_tools_by_name.clone());
pub fn tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
let mut tools = ToolRegistry::global(cx).tools();
tools.extend(self.context_server_tools_by_id.values().cloned());
tools
}
pub fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<AnyTool>> {
pub fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
let mut tools_by_source = IndexMap::default();
for (_, tool) in self.tools(cx) {
for tool in self.tools(cx) {
tools_by_source
.entry(tool.source())
.or_insert_with(Vec::new)
@@ -87,330 +49,27 @@ impl ToolWorkingSet {
tools_by_source
}
pub fn insert(&mut self, tool: AnyTool, cx: &App) -> ToolId {
let tool_id = self.register_tool(tool);
self.tools_changed(cx);
tool_id
}
pub fn extend(&mut self, tools: impl Iterator<Item = AnyTool>, cx: &App) -> Vec<ToolId> {
let ids = tools.map(|tool| self.register_tool(tool)).collect();
self.tools_changed(cx);
ids
}
pub fn remove(&mut self, tool_ids_to_remove: &[ToolId], cx: &App) {
self.context_server_tools_by_id
.retain(|id, _| !tool_ids_to_remove.contains(id));
self.tools_changed(cx);
}
fn register_tool(&mut self, tool: AnyTool) -> ToolId {
pub fn insert(&mut self, tool: Arc<dyn Tool>) -> ToolId {
let tool_id = self.next_tool_id;
self.next_tool_id.0 += 1;
self.context_server_tools_by_id
.insert(tool_id, tool.clone());
self.tools_changed();
tool_id
}
fn tools_changed(&mut self, cx: &App) {
self.context_server_tools_by_name = resolve_context_server_tool_name_conflicts(
&self
.context_server_tools_by_id
pub fn remove(&mut self, tool_ids_to_remove: &[ToolId]) {
self.context_server_tools_by_id
.retain(|id, _| !tool_ids_to_remove.contains(id));
self.tools_changed();
}
fn tools_changed(&mut self) {
self.context_server_tools_by_name.clear();
self.context_server_tools_by_name.extend(
self.context_server_tools_by_id
.values()
.cloned()
.collect::<Vec<_>>(),
&ToolRegistry::global(cx).tools(),
.map(|tool| (tool.name(), tool.clone())),
);
}
}
fn resolve_context_server_tool_name_conflicts(
context_server_tools: &[AnyTool],
native_tools: &[AnyTool],
) -> HashMap<UniqueToolName, AnyTool> {
fn resolve_tool_name(tool: &AnyTool) -> String {
let mut tool_name = tool.name();
tool_name.truncate(MAX_TOOL_NAME_LENGTH);
tool_name
}
const MAX_TOOL_NAME_LENGTH: usize = 64;
let mut duplicated_tool_names = HashSet::default();
let mut seen_tool_names = HashSet::default();
seen_tool_names.extend(native_tools.iter().map(|tool| tool.name()));
for tool in context_server_tools {
let tool_name = resolve_tool_name(tool);
if seen_tool_names.contains(&tool_name) {
debug_assert!(
tool.source() != ToolSource::Native,
"Expected MCP tool but got a native tool: {}",
tool_name
);
duplicated_tool_names.insert(tool_name);
} else {
seen_tool_names.insert(tool_name);
}
}
if duplicated_tool_names.is_empty() {
return context_server_tools
.into_iter()
.map(|tool| (resolve_tool_name(tool).into(), tool.clone()))
.collect();
}
context_server_tools
.into_iter()
.filter_map(|tool| {
let mut tool_name = resolve_tool_name(tool);
if !duplicated_tool_names.contains(&tool_name) {
return Some((tool_name.into(), tool.clone()));
}
match tool.source() {
ToolSource::Native => {
debug_panic!("Expected MCP tool but got a native tool: {}", tool_name);
// Built-in tools always keep their original name
Some((tool_name.into(), tool.clone()))
}
ToolSource::ContextServer { id } => {
// Context server tools are prefixed with the context server ID, and truncated if necessary
tool_name.insert(0, '_');
if tool_name.len() + id.len() > MAX_TOOL_NAME_LENGTH {
let len = MAX_TOOL_NAME_LENGTH - tool_name.len();
let mut id = id.to_string();
id.truncate(len);
tool_name.insert_str(0, &id);
} else {
tool_name.insert_str(0, &id);
}
tool_name.truncate(MAX_TOOL_NAME_LENGTH);
if seen_tool_names.contains(&tool_name) {
log::error!("Cannot resolve tool name conflict for tool {}", tool.name());
None
} else {
Some((tool_name.into(), tool.clone()))
}
}
}
})
.collect()
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use gpui::{AnyWindowHandle, Entity, Task, TestAppContext};
use language_model::{LanguageModel, LanguageModelRequest};
use project::Project;
use crate::{ActionLog, Tool, ToolResult};
use super::*;
#[gpui::test]
fn test_unique_tool_names(cx: &mut TestAppContext) {
fn assert_tool(
tool_working_set: &ToolWorkingSet,
unique_name: &str,
expected_name: &str,
expected_source: ToolSource,
cx: &App,
) {
let tool = tool_working_set.tool(unique_name, cx).unwrap();
assert_eq!(tool.name(), expected_name);
assert_eq!(tool.source(), expected_source);
}
let tool_registry = cx.update(ToolRegistry::default_global);
tool_registry.register_tool(TestTool::new("tool1", ToolSource::Native));
tool_registry.register_tool(TestTool::new("tool2", ToolSource::Native));
let mut tool_working_set = ToolWorkingSet::default();
cx.update(|cx| {
tool_working_set.extend(
vec![
Arc::new(TestTool::new(
"tool2",
ToolSource::ContextServer { id: "mcp-1".into() },
))
.into(),
Arc::new(TestTool::new(
"tool2",
ToolSource::ContextServer { id: "mcp-2".into() },
))
.into(),
]
.into_iter(),
cx,
);
});
cx.update(|cx| {
assert_tool(&tool_working_set, "tool1", "tool1", ToolSource::Native, cx);
assert_tool(&tool_working_set, "tool2", "tool2", ToolSource::Native, cx);
assert_tool(
&tool_working_set,
"mcp-1_tool2",
"tool2",
ToolSource::ContextServer { id: "mcp-1".into() },
cx,
);
assert_tool(
&tool_working_set,
"mcp-2_tool2",
"tool2",
ToolSource::ContextServer { id: "mcp-2".into() },
cx,
);
})
}
#[gpui::test]
fn test_resolve_context_server_tool_name_conflicts() {
assert_resolve_context_server_tool_name_conflicts(
vec![
TestTool::new("tool1", ToolSource::Native),
TestTool::new("tool2", ToolSource::Native),
],
vec![TestTool::new(
"tool3",
ToolSource::ContextServer { id: "mcp-1".into() },
)],
vec!["tool3"],
);
assert_resolve_context_server_tool_name_conflicts(
vec![
TestTool::new("tool1", ToolSource::Native),
TestTool::new("tool2", ToolSource::Native),
],
vec![
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
],
vec!["mcp-1_tool3", "mcp-2_tool3"],
);
assert_resolve_context_server_tool_name_conflicts(
vec![
TestTool::new("tool1", ToolSource::Native),
TestTool::new("tool2", ToolSource::Native),
TestTool::new("tool3", ToolSource::Native),
],
vec![
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
],
vec!["mcp-1_tool3", "mcp-2_tool3"],
);
// Test deduplication of tools with very long names, in this case the mcp server name should be truncated
assert_resolve_context_server_tool_name_conflicts(
vec![TestTool::new(
"tool-with-very-very-very-long-name",
ToolSource::Native,
)],
vec![TestTool::new(
"tool-with-very-very-very-long-name",
ToolSource::ContextServer {
id: "mcp-with-very-very-very-long-name".into(),
},
)],
vec!["mcp-with-very-very-very-long-_tool-with-very-very-very-long-name"],
);
fn assert_resolve_context_server_tool_name_conflicts(
builtin_tools: Vec<TestTool>,
context_server_tools: Vec<TestTool>,
expected: Vec<&'static str>,
) {
let context_server_tools: Vec<AnyTool> = context_server_tools
.into_iter()
.map(|t| Arc::new(t).into())
.collect();
let builtin_tools: Vec<AnyTool> = builtin_tools
.into_iter()
.map(|t| Arc::new(t).into())
.collect();
let tools =
resolve_context_server_tool_name_conflicts(&context_server_tools, &builtin_tools);
assert_eq!(tools.len(), expected.len());
for (i, (name, _)) in tools.into_iter().enumerate() {
assert_eq!(
name.0.as_ref(),
expected[i],
"Expected '{}' got '{}' at index {}",
expected[i],
name,
i
);
}
}
}
struct TestTool {
name: String,
source: ToolSource,
}
impl TestTool {
fn new(name: impl Into<String>, source: ToolSource) -> Self {
Self {
name: name.into(),
source,
}
}
}
impl Tool for TestTool {
type Input = ();
fn name(&self) -> String {
self.name.clone()
}
fn icon(&self) -> icons::IconName {
icons::IconName::Ai
}
fn may_perform_edits(&self) -> bool {
false
}
fn needs_confirmation(&self, _input: &Self::Input, _cx: &App) -> bool {
true
}
fn source(&self) -> ToolSource {
self.source.clone()
}
fn description(&self) -> String {
"Test tool".to_string()
}
fn ui_text(&self, _input: &Self::Input) -> String {
"Test tool".to_string()
}
fn run(
self: Arc<Self>,
_input: Self::Input,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
_model: Arc<dyn LanguageModel>,
_window: Option<AnyWindowHandle>,
_cx: &mut App,
) -> ToolResult {
ToolResult {
output: Task::ready(Err(anyhow::anyhow!("No content"))),
card: None,
}
}
}
}

View File

@@ -46,6 +46,7 @@ pub use find_path_tool::FindPathToolInput;
pub use grep_tool::{GrepTool, GrepToolInput};
pub use open_tool::OpenTool;
pub use read_file_tool::{ReadFileTool, ReadFileToolInput};
pub use schema::root_schema_for;
pub use terminal_tool::TerminalTool;
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {

View File

@@ -40,13 +40,11 @@ pub struct CopyPathToolInput {
pub struct CopyPathTool;
impl Tool for CopyPathTool {
type Input = CopyPathToolInput;
fn name(&self) -> String {
"copy_path".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -66,15 +64,20 @@ impl Tool for CopyPathTool {
json_schema_for::<CopyPathToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
let src = MarkdownInlineCode(&input.source_path);
let dest = MarkdownInlineCode(&input.destination_path);
format!("Copy {src} to {dest}")
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<CopyPathToolInput>(input.clone()) {
Ok(input) => {
let src = MarkdownInlineCode(&input.source_path);
let dest = MarkdownInlineCode(&input.destination_path);
format!("Copy {src} to {dest}")
}
Err(_) => "Copy path".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -82,6 +85,10 @@ impl Tool for CopyPathTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<CopyPathToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let copy_task = project.update(cx, |project, cx| {
match project
.find_project_path(&input.source_path, cx)

View File

@@ -29,8 +29,6 @@ pub struct CreateDirectoryToolInput {
pub struct CreateDirectoryTool;
impl Tool for CreateDirectoryTool {
type Input = CreateDirectoryToolInput;
fn name(&self) -> String {
"create_directory".into()
}
@@ -39,7 +37,7 @@ impl Tool for CreateDirectoryTool {
include_str!("./create_directory_tool/description.md").into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -55,13 +53,18 @@ impl Tool for CreateDirectoryTool {
json_schema_for::<CreateDirectoryToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
format!("Create directory {}", MarkdownInlineCode(&input.path))
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<CreateDirectoryToolInput>(input.clone()) {
Ok(input) => {
format!("Create directory {}", MarkdownInlineCode(&input.path))
}
Err(_) => "Create directory".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -69,6 +72,10 @@ impl Tool for CreateDirectoryTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<CreateDirectoryToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let project_path = match project.read(cx).find_project_path(&input.path, cx) {
Some(project_path) => project_path,
None => {

View File

@@ -29,13 +29,11 @@ pub struct DeletePathToolInput {
pub struct DeletePathTool;
impl Tool for DeletePathTool {
type Input = DeletePathToolInput;
fn name(&self) -> String {
"delete_path".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -55,13 +53,16 @@ impl Tool for DeletePathTool {
json_schema_for::<DeletePathToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
format!("Delete “`{}`”", input.path)
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<DeletePathToolInput>(input.clone()) {
Ok(input) => format!("Delete “`{}`”", input.path),
Err(_) => "Delete path".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
@@ -69,7 +70,10 @@ impl Tool for DeletePathTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let path_str = input.path;
let path_str = match serde_json::from_value::<DeletePathToolInput>(input) {
Ok(input) => input.path,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let Some(project_path) = project.read(cx).find_project_path(&path_str, cx) else {
return Task::ready(Err(anyhow!(
"Couldn't delete {path_str} because that path isn't in this project."

View File

@@ -42,13 +42,11 @@ where
pub struct DiagnosticsTool;
impl Tool for DiagnosticsTool {
type Input = DiagnosticsToolInput;
fn name(&self) -> String {
"diagnostics".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -68,9 +66,15 @@ impl Tool for DiagnosticsTool {
json_schema_for::<DiagnosticsToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
if let Some(path) = input.path.as_ref().filter(|p| !p.is_empty()) {
format!("Check diagnostics for {}", MarkdownInlineCode(path))
fn ui_text(&self, input: &serde_json::Value) -> String {
if let Some(path) = serde_json::from_value::<DiagnosticsToolInput>(input.clone())
.ok()
.and_then(|input| match input.path {
Some(path) if !path.is_empty() => Some(path),
_ => None,
})
{
format!("Check diagnostics for {}", MarkdownInlineCode(&path))
} else {
"Check project diagnostics".to_string()
}
@@ -78,7 +82,7 @@ impl Tool for DiagnosticsTool {
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
@@ -86,7 +90,10 @@ impl Tool for DiagnosticsTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
match input.path {
match serde_json::from_value::<DiagnosticsToolInput>(input)
.ok()
.and_then(|input| input.path)
{
Some(path) if !path.is_empty() => {
let Some(project_path) = project.read(cx).find_project_path(&path, cx) else {
return Task::ready(Err(anyhow!("Could not find path {path} in project",)))

View File

@@ -9132,7 +9132,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
self.manipulate_lines(window, cx, |lines| lines.sort())
self.manipulate_immutable_lines(window, cx, |lines| lines.sort())
}
pub fn sort_lines_case_insensitive(
@@ -9141,7 +9141,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
self.manipulate_lines(window, cx, |lines| {
self.manipulate_immutable_lines(window, cx, |lines| {
lines.sort_by_key(|line| line.to_lowercase())
})
}
@@ -9152,7 +9152,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
self.manipulate_lines(window, cx, |lines| {
self.manipulate_immutable_lines(window, cx, |lines| {
let mut seen = HashSet::default();
lines.retain(|line| seen.insert(line.to_lowercase()));
})
@@ -9164,7 +9164,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
self.manipulate_lines(window, cx, |lines| {
self.manipulate_immutable_lines(window, cx, |lines| {
let mut seen = HashSet::default();
lines.retain(|line| seen.insert(*line));
})
@@ -9606,20 +9606,20 @@ impl Editor {
}
pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context<Self>) {
self.manipulate_lines(window, cx, |lines| lines.reverse())
self.manipulate_immutable_lines(window, cx, |lines| lines.reverse())
}
pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context<Self>) {
self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
}
fn manipulate_lines<Fn>(
fn manipulate_lines<M>(
&mut self,
window: &mut Window,
cx: &mut Context<Self>,
mut callback: Fn,
mut manipulate: M,
) where
Fn: FnMut(&mut Vec<&str>),
M: FnMut(&str) -> LineManipulationResult,
{
self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction);
@@ -9652,18 +9652,14 @@ impl Editor {
.text_for_range(start_point..end_point)
.collect::<String>();
let mut lines = text.split('\n').collect_vec();
let LineManipulationResult { new_text, line_count_before, line_count_after} = manipulate(&text);
let lines_before = lines.len();
callback(&mut lines);
let lines_after = lines.len();
edits.push((start_point..end_point, lines.join("\n")));
edits.push((start_point..end_point, new_text));
// Selections must change based on added and removed line count
let start_row =
MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32);
let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32);
let end_row = MultiBufferRow(start_row.0 + line_count_after.saturating_sub(1) as u32);
new_selections.push(Selection {
id: selection.id,
start: start_row,
@@ -9672,10 +9668,10 @@ impl Editor {
reversed: selection.reversed,
});
if lines_after > lines_before {
added_lines += lines_after - lines_before;
} else if lines_before > lines_after {
removed_lines += lines_before - lines_after;
if line_count_after > line_count_before {
added_lines += line_count_after - line_count_before;
} else if line_count_before > line_count_after {
removed_lines += line_count_before - line_count_after;
}
}
@@ -9720,6 +9716,171 @@ impl Editor {
})
}
fn manipulate_immutable_lines<Fn>(
&mut self,
window: &mut Window,
cx: &mut Context<Self>,
mut callback: Fn,
) where
Fn: FnMut(&mut Vec<&str>),
{
self.manipulate_lines(window, cx, |text| {
let mut lines: Vec<&str> = text.split('\n').collect();
let line_count_before = lines.len();
callback(&mut lines);
LineManipulationResult {
new_text: lines.join("\n"),
line_count_before,
line_count_after: lines.len(),
}
});
}
fn manipulate_mutable_lines<Fn>(
&mut self,
window: &mut Window,
cx: &mut Context<Self>,
mut callback: Fn,
) where
Fn: FnMut(&mut Vec<Cow<'_, str>>),
{
self.manipulate_lines(window, cx, |text| {
let mut lines: Vec<Cow<str>> = text.split('\n').map(Cow::from).collect();
let line_count_before = lines.len();
callback(&mut lines);
LineManipulationResult {
new_text: lines.join("\n"),
line_count_before,
line_count_after: lines.len(),
}
});
}
pub fn convert_indentation_to_spaces(
&mut self,
_: &ConvertIndentationToSpaces,
window: &mut Window,
cx: &mut Context<Self>,
) {
let settings = self.buffer.read(cx).language_settings(cx);
let tab_size = settings.tab_size.get() as usize;
self.manipulate_mutable_lines(window, cx, |lines| {
// Allocates a reasonably sized scratch buffer once for the whole loop
let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
// Avoids recomputing spaces that could be inserted many times
let space_cache: Vec<Vec<char>> = (1..=tab_size)
.map(|n| IndentSize::spaces(n as u32).chars().collect())
.collect();
for line in lines.iter_mut().filter(|line| !line.is_empty()) {
let mut chars = line.as_ref().chars();
let mut col = 0;
let mut changed = false;
while let Some(ch) = chars.next() {
match ch {
' ' => {
reindented_line.push(' ');
col += 1;
}
'\t' => {
// \t are converted to spaces depending on the current column
let spaces_len = tab_size - (col % tab_size);
reindented_line.extend(&space_cache[spaces_len - 1]);
col += spaces_len;
changed = true;
}
_ => {
// If we dont append before break, the character is consumed
reindented_line.push(ch);
break;
}
}
}
if !changed {
reindented_line.clear();
continue;
}
// Append the rest of the line and replace old reference with new one
reindented_line.extend(chars);
*line = Cow::Owned(reindented_line.clone());
reindented_line.clear();
}
});
}
pub fn convert_indentation_to_tabs(
&mut self,
_: &ConvertIndentationToTabs,
window: &mut Window,
cx: &mut Context<Self>,
) {
let settings = self.buffer.read(cx).language_settings(cx);
let tab_size = settings.tab_size.get() as usize;
self.manipulate_mutable_lines(window, cx, |lines| {
// Allocates a reasonably sized buffer once for the whole loop
let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
// Avoids recomputing spaces that could be inserted many times
let space_cache: Vec<Vec<char>> = (1..=tab_size)
.map(|n| IndentSize::spaces(n as u32).chars().collect())
.collect();
for line in lines.iter_mut().filter(|line| !line.is_empty()) {
let mut chars = line.chars();
let mut spaces_count = 0;
let mut first_non_indent_char = None;
let mut changed = false;
while let Some(ch) = chars.next() {
match ch {
' ' => {
// Keep track of spaces. Append \t when we reach tab_size
spaces_count += 1;
changed = true;
if spaces_count == tab_size {
reindented_line.push('\t');
spaces_count = 0;
}
}
'\t' => {
reindented_line.push('\t');
spaces_count = 0;
}
_ => {
// Dont append it yet, we might have remaining spaces
first_non_indent_char = Some(ch);
break;
}
}
}
if !changed {
reindented_line.clear();
continue;
}
// Remaining spaces that didn't make a full tab stop
if spaces_count > 0 {
reindented_line.extend(&space_cache[spaces_count - 1]);
}
// If we consume an extra character that was not indentation, add it back
if let Some(extra_char) = first_non_indent_char {
reindented_line.push(extra_char);
}
// Append the rest of the line and replace old reference with new one
reindented_line.extend(chars);
*line = Cow::Owned(reindented_line.clone());
reindented_line.clear();
}
});
}
pub fn convert_to_upper_case(
&mut self,
_: &ConvertToUpperCase,
@@ -21157,6 +21318,13 @@ pub struct LineHighlight {
pub type_id: Option<TypeId>,
}
struct LineManipulationResult {
pub new_text: String,
pub line_count_before: usize,
pub line_count_after: usize,
}
fn render_diff_hunk_controls(
row: u32,
status: &DiffHunkStatus,

View File

@@ -121,13 +121,11 @@ struct PartialInput {
const DEFAULT_UI_TEXT: &str = "Editing file";
impl Tool for EditFileTool {
type Input = EditFileToolInput;
fn name(&self) -> String {
"edit_file".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -147,20 +145,24 @@ impl Tool for EditFileTool {
json_schema_for::<EditFileToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
input.display_description.clone()
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<EditFileToolInput>(input.clone()) {
Ok(input) => input.display_description,
Err(_) => "Editing file".to_string(),
}
}
fn still_streaming_ui_text(&self, input: &Self::Input) -> String {
let description = input.display_description.trim();
if !description.is_empty() {
return description.to_string();
}
fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String {
if let Some(input) = serde_json::from_value::<PartialInput>(input.clone()).ok() {
let description = input.display_description.trim();
if !description.is_empty() {
return description.to_string();
}
let path = input.path.to_string_lossy();
let path = path.trim();
if !path.is_empty() {
return path.to_string();
let path = input.path.trim();
if !path.is_empty() {
return path.to_string();
}
}
DEFAULT_UI_TEXT.to_string()
@@ -168,7 +170,7 @@ impl Tool for EditFileTool {
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
@@ -176,6 +178,11 @@ impl Tool for EditFileTool {
window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<EditFileToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let project_path = match resolve_path(&input, project.clone(), cx) {
Ok(path) => path,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
@@ -1162,11 +1169,12 @@ mod tests {
let model = Arc::new(FakeLanguageModel::default());
let result = cx
.update(|cx| {
let input = EditFileToolInput {
let input = serde_json::to_value(EditFileToolInput {
display_description: "Some edit".into(),
path: "root/nonexistent_file.txt".into(),
mode: EditFileMode::Edit,
};
})
.unwrap();
Arc::new(EditFileTool)
.run(
input,
@@ -1280,22 +1288,24 @@ mod tests {
#[test]
fn still_streaming_ui_text_with_path() {
let input = EditFileToolInput {
path: "src/main.rs".into(),
display_description: "".into(),
mode: EditFileMode::Edit,
};
let input = json!({
"path": "src/main.rs",
"display_description": "",
"old_string": "old code",
"new_string": "new code"
});
assert_eq!(EditFileTool.still_streaming_ui_text(&input), "src/main.rs");
}
#[test]
fn still_streaming_ui_text_with_description() {
let input = EditFileToolInput {
path: "".into(),
display_description: "Fix error handling".into(),
mode: EditFileMode::Edit,
};
let input = json!({
"path": "",
"display_description": "Fix error handling",
"old_string": "old code",
"new_string": "new code"
});
assert_eq!(
EditFileTool.still_streaming_ui_text(&input),
@@ -1305,11 +1315,12 @@ mod tests {
#[test]
fn still_streaming_ui_text_with_path_and_description() {
let input = EditFileToolInput {
path: "src/main.rs".into(),
display_description: "Fix error handling".into(),
mode: EditFileMode::Edit,
};
let input = json!({
"path": "src/main.rs",
"display_description": "Fix error handling",
"old_string": "old code",
"new_string": "new code"
});
assert_eq!(
EditFileTool.still_streaming_ui_text(&input),
@@ -1319,11 +1330,12 @@ mod tests {
#[test]
fn still_streaming_ui_text_no_path_or_description() {
let input = EditFileToolInput {
path: "".into(),
display_description: "".into(),
mode: EditFileMode::Edit,
};
let input = json!({
"path": "",
"display_description": "",
"old_string": "old code",
"new_string": "new code"
});
assert_eq!(
EditFileTool.still_streaming_ui_text(&input),
@@ -1333,11 +1345,7 @@ mod tests {
#[test]
fn still_streaming_ui_text_with_null() {
let input = EditFileToolInput {
path: "".into(),
display_description: "".into(),
mode: EditFileMode::Edit,
};
let input = serde_json::Value::Null;
assert_eq!(
EditFileTool.still_streaming_ui_text(&input),
@@ -1449,11 +1457,12 @@ mod tests {
// Have the model stream unformatted content
let edit_result = {
let edit_task = cx.update(|cx| {
let input = EditFileToolInput {
let input = serde_json::to_value(EditFileToolInput {
display_description: "Create main function".into(),
path: "root/src/main.rs".into(),
mode: EditFileMode::Overwrite,
};
})
.unwrap();
Arc::new(EditFileTool)
.run(
input,
@@ -1512,11 +1521,12 @@ mod tests {
// Stream unformatted edits again
let edit_result = {
let edit_task = cx.update(|cx| {
let input = EditFileToolInput {
let input = serde_json::to_value(EditFileToolInput {
display_description: "Update main function".into(),
path: "root/src/main.rs".into(),
mode: EditFileMode::Overwrite,
};
})
.unwrap();
Arc::new(EditFileTool)
.run(
input,
@@ -1590,11 +1600,12 @@ mod tests {
// Have the model stream content that contains trailing whitespace
let edit_result = {
let edit_task = cx.update(|cx| {
let input = EditFileToolInput {
let input = serde_json::to_value(EditFileToolInput {
display_description: "Create main function".into(),
path: "root/src/main.rs".into(),
mode: EditFileMode::Overwrite,
};
})
.unwrap();
Arc::new(EditFileTool)
.run(
input,
@@ -1646,11 +1657,12 @@ mod tests {
// Stream edits again with trailing whitespace
let edit_result = {
let edit_task = cx.update(|cx| {
let input = EditFileToolInput {
let input = serde_json::to_value(EditFileToolInput {
display_description: "Update main function".into(),
path: "root/src/main.rs".into(),
mode: EditFileMode::Overwrite,
};
})
.unwrap();
Arc::new(EditFileTool)
.run(
input,

View File

@@ -3,10 +3,10 @@ use std::sync::Arc;
use std::{borrow::Cow, cell::RefCell};
use crate::schema::json_schema_for;
use anyhow::{Context as _, Result, bail};
use anyhow::{Context as _, Result, anyhow, bail};
use assistant_tool::{ActionLog, Tool, ToolResult};
use futures::AsyncReadExt as _;
use gpui::{AnyWindowHandle, App, AppContext as _, Entity};
use gpui::{AnyWindowHandle, App, AppContext as _, Entity, Task};
use html_to_markdown::{TagHandler, convert_html_to_markdown, markdown};
use http_client::{AsyncBody, HttpClientWithUrl};
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
@@ -113,13 +113,11 @@ impl FetchTool {
}
impl Tool for FetchTool {
type Input = FetchToolInput;
fn name(&self) -> String {
"fetch".to_string()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -139,13 +137,16 @@ impl Tool for FetchTool {
json_schema_for::<FetchToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
format!("Fetch {}", MarkdownEscaped(&input.url))
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<FetchToolInput>(input.clone()) {
Ok(input) => format!("Fetch {}", MarkdownEscaped(&input.url)),
Err(_) => "Fetch URL".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -153,6 +154,11 @@ impl Tool for FetchTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<FetchToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let text = cx.background_spawn({
let http_client = self.http_client.clone();
async move { Self::build_message(http_client, &input.url).await }

View File

@@ -51,13 +51,11 @@ const RESULTS_PER_PAGE: usize = 50;
pub struct FindPathTool;
impl Tool for FindPathTool {
type Input = FindPathToolInput;
fn name(&self) -> String {
"find_path".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -77,13 +75,16 @@ impl Tool for FindPathTool {
json_schema_for::<FindPathToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
format!("Find paths matching \"`{}`\"", input.glob)
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<FindPathToolInput>(input.clone()) {
Ok(input) => format!("Find paths matching “`{}`”", input.glob),
Err(_) => "Search paths".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -91,7 +92,10 @@ impl Tool for FindPathTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let (offset, glob) = (input.offset, input.glob);
let (offset, glob) = match serde_json::from_value::<FindPathToolInput>(input) {
Ok(input) => (input.offset, input.glob),
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let (sender, receiver) = oneshot::channel();

View File

@@ -0,0 +1,328 @@
use crate::commit::get_messages;
use crate::{GitRemote, Oid};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use futures::AsyncWriteExt;
use gpui::SharedString;
use serde::{Deserialize, Serialize};
use std::process::Stdio;
use std::{ops::Range, path::Path};
use text::Rope;
use time::OffsetDateTime;
use time::UtcOffset;
use time::macros::format_description;
pub use git2 as libgit;
#[derive(Debug, Clone, Default)]
pub struct Blame {
pub entries: Vec<BlameEntry>,
pub messages: HashMap<Oid, String>,
pub remote_url: Option<String>,
}
#[derive(Clone, Debug, Default)]
pub struct ParsedCommitMessage {
pub message: SharedString,
pub permalink: Option<url::Url>,
pub pull_request: Option<crate::hosting_provider::PullRequest>,
pub remote: Option<GitRemote>,
}
impl Blame {
pub async fn for_path(
git_binary: &Path,
working_directory: &Path,
path: &Path,
content: &Rope,
remote_url: Option<String>,
) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, content).await?;
let mut entries = parse_git_blame(&output)?;
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
let mut unique_shas = HashSet::default();
for entry in entries.iter_mut() {
unique_shas.insert(entry.sha);
}
let shas = unique_shas.into_iter().collect::<Vec<_>>();
let messages = get_messages(working_directory, &shas)
.await
.context("failed to get commit messages")?;
Ok(Self {
entries,
messages,
remote_url,
})
}
}
const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
pub struct BlameEntry {
pub sha: Oid,
pub range: Range<u32>,
pub original_line_number: u32,
pub author: Option<String>,
pub author_mail: Option<String>,
pub author_time: Option<i64>,
pub author_tz: Option<String>,
pub committer_name: Option<String>,
pub committer_email: Option<String>,
pub committer_time: Option<i64>,
pub committer_tz: Option<String>,
pub summary: Option<String>,
pub previous: Option<String>,
pub filename: String,
}
impl BlameEntry {
// Returns a BlameEntry by parsing the first line of a `git blame --incremental`
// entry. The line MUST have this format:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
fn new_from_blame_line(line: &str) -> Result<BlameEntry> {
let mut parts = line.split_whitespace();
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
.ok_or_else(|| anyhow!("failed to parse sha"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
let range = start_line..end_line;
Ok(Self {
sha,
range,
original_line_number,
..Default::default()
})
}
pub fn author_offset_date_time(&self) -> Result<time::OffsetDateTime> {
if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) {
let format = format_description!("[offset_hour][offset_minute]");
let offset = UtcOffset::parse(author_tz, &format)?;
let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?;
Ok(date_time_utc.to_offset(offset))
} else {
// Directly return current time in UTC if there's no committer time or timezone
Ok(time::OffsetDateTime::now_utc())
}
}
}
// parse_git_blame parses the output of `git blame --incremental`, which returns
// all the blame-entries for a given path incrementally, as it finds them.
//
// Each entry *always* starts with:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
//
// Each entry *always* ends with:
//
// filename <whitespace-quoted-filename-goes-here>
//
// Line numbers are 1-indexed.
//
// A `git blame --incremental` entry looks like this:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1
// author Joe Schmoe
// author-mail <joe.schmoe@example.com>
// author-time 1709741400
// author-tz +0100
// committer Joe Schmoe
// committer-mail <joe.schmoe@example.com>
// committer-time 1709741400
// committer-tz +0100
// summary Joe's cool commit
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// If the entry has the same SHA as an entry that was already printed then no
// signature information is printed:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html
fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
let mut entries: Vec<BlameEntry> = Vec::new();
let mut index: HashMap<Oid, usize> = HashMap::default();
let mut current_entry: Option<BlameEntry> = None;
for line in output.lines() {
let mut done = false;
match &mut current_entry {
None => {
let mut new_entry = BlameEntry::new_from_blame_line(line)?;
if let Some(existing_entry) = index
.get(&new_entry.sha)
.and_then(|slot| entries.get(*slot))
{
new_entry.author.clone_from(&existing_entry.author);
new_entry
.author_mail
.clone_from(&existing_entry.author_mail);
new_entry.author_time = existing_entry.author_time;
new_entry.author_tz.clone_from(&existing_entry.author_tz);
new_entry
.committer_name
.clone_from(&existing_entry.committer_name);
new_entry
.committer_email
.clone_from(&existing_entry.committer_email);
new_entry.committer_time = existing_entry.committer_time;
new_entry
.committer_tz
.clone_from(&existing_entry.committer_tz);
new_entry.summary.clone_from(&existing_entry.summary);
}
current_entry.replace(new_entry);
}
Some(entry) => {
let Some((key, value)) = line.split_once(' ') else {
continue;
};
let is_committed = !entry.sha.is_zero();
match key {
"filename" => {
entry.filename = value.into();
done = true;
}
"previous" => entry.previous = Some(value.into()),
"summary" if is_committed => entry.summary = Some(value.into()),
"author" if is_committed => entry.author = Some(value.into()),
"author-mail" if is_committed => entry.author_mail = Some(value.into()),
"author-time" if is_committed => {
entry.author_time = Some(value.parse::<i64>()?)
}
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
"committer" if is_committed => entry.committer_name = Some(value.into()),
"committer-mail" if is_committed => entry.committer_email = Some(value.into()),
"committer-time" if is_committed => {
entry.committer_time = Some(value.parse::<i64>()?)
}
"committer-tz" if is_committed => entry.committer_tz = Some(value.into()),
_ => {}
}
}
};
if done {
if let Some(entry) = current_entry.take() {
index.insert(entry.sha, entries.len());
// We only want annotations that have a commit.
if !entry.sha.is_zero() {
entries.push(entry);
}
}
}
}
Ok(entries)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::BlameEntry;
use super::parse_git_blame;
fn read_test_data(filename: &str) -> String {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push(filename);
std::fs::read_to_string(&path)
.unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path))
}
fn assert_eq_golden(entries: &Vec<BlameEntry>, golden_filename: &str) {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push("golden");
path.push(format!("{}.json", golden_filename));
let mut have_json =
serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON");
// We always want to save with a trailing newline.
have_json.push('\n');
let update = std::env::var("UPDATE_GOLDEN")
.map(|val| val.eq_ignore_ascii_case("true"))
.unwrap_or(false);
if update {
std::fs::create_dir_all(path.parent().unwrap())
.expect("could not create golden test data directory");
std::fs::write(&path, have_json).expect("could not write out golden data");
} else {
let want_json =
std::fs::read_to_string(&path).unwrap_or_else(|_| {
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
}).replace("\r\n", "\n");
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
}
}
#[test]
fn test_parse_git_blame_not_committed() {
let output = read_test_data("blame_incremental_not_committed");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_not_committed");
}
#[test]
fn test_parse_git_blame_simple() {
let output = read_test_data("blame_incremental_simple");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_simple");
}
#[test]
fn test_parse_git_blame_complex() {
let output = read_test_data("blame_incremental_complex");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_complex");
}
}

View File

@@ -0,0 +1,374 @@
use crate::commit::get_messages;
use crate::{GitRemote, Oid};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use futures::AsyncWriteExt;
use gpui::SharedString;
use serde::{Deserialize, Serialize};
use std::process::Stdio;
use std::{ops::Range, path::Path};
use text::Rope;
use time::OffsetDateTime;
use time::UtcOffset;
use time::macros::format_description;
pub use git2 as libgit;
#[derive(Debug, Clone, Default)]
pub struct Blame {
pub entries: Vec<BlameEntry>,
pub messages: HashMap<Oid, String>,
pub remote_url: Option<String>,
}
#[derive(Clone, Debug, Default)]
pub struct ParsedCommitMessage {
pub message: SharedString,
pub permalink: Option<url::Url>,
pub pull_request: Option<crate::hosting_provider::PullRequest>,
pub remote: Option<GitRemote>,
}
impl Blame {
pub async fn for_path(
git_binary: &Path,
working_directory: &Path,
path: &Path,
content: &Rope,
remote_url: Option<String>,
) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, content).await?;
let mut entries = parse_git_blame(&output)?;
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
let mut unique_shas = HashSet::default();
for entry in entries.iter_mut() {
unique_shas.insert(entry.sha);
}
let shas = unique_shas.into_iter().collect::<Vec<_>>();
let messages = get_messages(working_directory, &shas)
.await
.context("failed to get commit messages")?;
Ok(Self {
entries,
messages,
remote_url,
})
}
}
const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
async fn run_git_blame(
git_binary: &Path,
working_directory: &Path,
path: &Path,
contents: &Rope,
) -> Result<String> {
let mut child = util::command::new_smol_command(git_binary)
.current_dir(working_directory)
.arg("blame")
.arg("--incremental")
.arg("--contents")
.arg("-")
.arg(path.as_os_str())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
let stdin = child
.stdin
.as_mut()
.context("failed to get pipe to stdin of git blame command")?;
for chunk in contents.chunks() {
stdin.write_all(chunk.as_bytes()).await?;
}
stdin.flush().await?;
let output = child
.output()
.await
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let trimmed = stderr.trim();
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
return Ok(String::new());
}
return Err(anyhow!("git blame process failed: {}", stderr));
}
Ok(String::from_utf8(output.stdout)?)
}
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
pub struct BlameEntry {
pub sha: Oid,
pub range: Range<u32>,
pub original_line_number: u32,
pub author: Option<String>,
pub author_mail: Option<String>,
pub author_time: Option<i64>,
pub author_tz: Option<String>,
pub committer_name: Option<String>,
pub committer_email: Option<String>,
pub committer_time: Option<i64>,
pub committer_tz: Option<String>,
pub summary: Option<String>,
pub previous: Option<String>,
pub filename: String,
}
impl BlameEntry {
// Returns a BlameEntry by parsing the first line of a `git blame --incremental`
// entry. The line MUST have this format:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
fn new_from_blame_line(line: &str) -> Result<BlameEntry> {
let mut parts = line.split_whitespace();
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
.ok_or_else(|| anyhow!("failed to parse sha"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
let range = start_line..end_line;
Ok(Self {
sha,
range,
original_line_number,
..Default::default()
})
}
pub fn author_offset_date_time(&self) -> Result<time::OffsetDateTime> {
if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) {
let format = format_description!("[offset_hour][offset_minute]");
let offset = UtcOffset::parse(author_tz, &format)?;
let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?;
Ok(date_time_utc.to_offset(offset))
} else {
// Directly return current time in UTC if there's no committer time or timezone
Ok(time::OffsetDateTime::now_utc())
}
}
}
// parse_git_blame parses the output of `git blame --incremental`, which returns
// all the blame-entries for a given path incrementally, as it finds them.
//
// Each entry *always* starts with:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
//
// Each entry *always* ends with:
//
// filename <whitespace-quoted-filename-goes-here>
//
// Line numbers are 1-indexed.
//
// A `git blame --incremental` entry looks like this:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1
// author Joe Schmoe
// author-mail <joe.schmoe@example.com>
// author-time 1709741400
// author-tz +0100
// committer Joe Schmoe
// committer-mail <joe.schmoe@example.com>
// committer-time 1709741400
// committer-tz +0100
// summary Joe's cool commit
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// If the entry has the same SHA as an entry that was already printed then no
// signature information is printed:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html
fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
let mut entries: Vec<BlameEntry> = Vec::new();
let mut index: HashMap<Oid, usize> = HashMap::default();
let mut current_entry: Option<BlameEntry> = None;
for line in output.lines() {
let mut done = false;
match &mut current_entry {
None => {
let mut new_entry = BlameEntry::new_from_blame_line(line)?;
if let Some(existing_entry) = index
.get(&new_entry.sha)
.and_then(|slot| entries.get(*slot))
{
new_entry.author.clone_from(&existing_entry.author);
new_entry
.author_mail
.clone_from(&existing_entry.author_mail);
new_entry.author_time = existing_entry.author_time;
new_entry.author_tz.clone_from(&existing_entry.author_tz);
new_entry
.committer_name
.clone_from(&existing_entry.committer_name);
new_entry
.committer_email
.clone_from(&existing_entry.committer_email);
new_entry.committer_time = existing_entry.committer_time;
new_entry
.committer_tz
.clone_from(&existing_entry.committer_tz);
new_entry.summary.clone_from(&existing_entry.summary);
}
current_entry.replace(new_entry);
}
Some(entry) => {
let Some((key, value)) = line.split_once(' ') else {
continue;
};
let is_committed = !entry.sha.is_zero();
match key {
"filename" => {
entry.filename = value.into();
done = true;
}
"previous" => entry.previous = Some(value.into()),
"summary" if is_committed => entry.summary = Some(value.into()),
"author" if is_committed => entry.author = Some(value.into()),
"author-mail" if is_committed => entry.author_mail = Some(value.into()),
"author-time" if is_committed => {
entry.author_time = Some(value.parse::<i64>()?)
}
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
"committer" if is_committed => entry.committer_name = Some(value.into()),
"committer-mail" if is_committed => entry.committer_email = Some(value.into()),
"committer-time" if is_committed => {
entry.committer_time = Some(value.parse::<i64>()?)
}
"committer-tz" if is_committed => entry.committer_tz = Some(value.into()),
_ => {}
}
}
};
if done {
if let Some(entry) = current_entry.take() {
index.insert(entry.sha, entries.len());
// We only want annotations that have a commit.
if !entry.sha.is_zero() {
entries.push(entry);
}
}
}
}
Ok(entries)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::BlameEntry;
use super::parse_git_blame;
fn read_test_data(filename: &str) -> String {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push(filename);
std::fs::read_to_string(&path)
.unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path))
}
fn assert_eq_golden(entries: &Vec<BlameEntry>, golden_filename: &str) {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push("golden");
path.push(format!("{}.json", golden_filename));
let mut have_json =
serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON");
// We always want to save with a trailing newline.
have_json.push('\n');
let update = std::env::var("UPDATE_GOLDEN")
.map(|val| val.eq_ignore_ascii_case("true"))
.unwrap_or(false);
if update {
std::fs::create_dir_all(path.parent().unwrap())
.expect("could not create golden test data directory");
std::fs::write(&path, have_json).expect("could not write out golden data");
} else {
let want_json =
std::fs::read_to_string(&path).unwrap_or_else(|_| {
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
}).replace("\r\n", "\n");
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
}
}
#[test]
fn test_parse_git_blame_not_committed() {
let output = read_test_data("blame_incremental_not_committed");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_not_committed");
}
#[test]
fn test_parse_git_blame_simple() {
let output = read_test_data("blame_incremental_simple");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_simple");
}
#[test]
fn test_parse_git_blame_complex() {
let output = read_test_data("blame_incremental_complex");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_complex");
}
}

View File

@@ -53,13 +53,11 @@ const RESULTS_PER_PAGE: u32 = 20;
pub struct GrepTool;
impl Tool for GrepTool {
type Input = GrepToolInput;
fn name(&self) -> String {
"grep".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -79,25 +77,30 @@ impl Tool for GrepTool {
json_schema_for::<GrepToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
let page = input.page();
let regex_str = MarkdownInlineCode(&input.regex);
let case_info = if input.case_sensitive {
" (case-sensitive)"
} else {
""
};
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<GrepToolInput>(input.clone()) {
Ok(input) => {
let page = input.page();
let regex_str = MarkdownInlineCode(&input.regex);
let case_info = if input.case_sensitive {
" (case-sensitive)"
} else {
""
};
if page > 1 {
format!("Get page {page} of search results for regex {regex_str}{case_info}")
} else {
format!("Search files for regex {regex_str}{case_info}")
if page > 1 {
format!("Get page {page} of search results for regex {regex_str}{case_info}")
} else {
format!("Search files for regex {regex_str}{case_info}")
}
}
Err(_) => "Search with regex".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -108,6 +111,13 @@ impl Tool for GrepTool {
const CONTEXT_LINES: u32 = 2;
const MAX_ANCESTOR_LINES: u32 = 10;
let input = match serde_json::from_value::<GrepToolInput>(input) {
Ok(input) => input,
Err(error) => {
return Task::ready(Err(anyhow!("Failed to parse input: {error}"))).into();
}
};
let include_matcher = match PathMatcher::new(
input
.include_pattern
@@ -304,7 +314,7 @@ impl Tool for GrepTool {
mod tests {
use super::*;
use assistant_tool::Tool;
use gpui::{AppContext, TestAppContext, UpdateGlobal};
use gpui::{TestAppContext, UpdateGlobal};
use language::{Language, LanguageConfig, LanguageMatcher};
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project, WorktreeSettings};
@@ -338,12 +348,13 @@ mod tests {
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
// Test with include pattern for Rust files inside the root of the project
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "println".to_string(),
include_pattern: Some("root/**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
assert!(result.contains("main.rs"), "Should find matches in main.rs");
@@ -357,12 +368,13 @@ mod tests {
);
// Test with include pattern for src directory only
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "fn".to_string(),
include_pattern: Some("root/**/src/**".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
assert!(
@@ -379,12 +391,13 @@ mod tests {
);
// Test with empty include pattern (should default to all files)
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "fn".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
assert!(result.contains("main.rs"), "Should find matches in main.rs");
@@ -415,12 +428,13 @@ mod tests {
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
// Test case-insensitive search (default)
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "uppercase".to_string(),
include_pattern: Some("**/*.txt".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
assert!(
@@ -429,12 +443,13 @@ mod tests {
);
// Test case-sensitive search
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "uppercase".to_string(),
include_pattern: Some("**/*.txt".to_string()),
offset: 0,
case_sensitive: true,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
assert!(
@@ -443,12 +458,13 @@ mod tests {
);
// Test case-sensitive search
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "LOWERCASE".to_string(),
include_pattern: Some("**/*.txt".to_string()),
offset: 0,
case_sensitive: true,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
@@ -458,12 +474,13 @@ mod tests {
);
// Test case-sensitive search for lowercase pattern
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "lowercase".to_string(),
include_pattern: Some("**/*.txt".to_string()),
offset: 0,
case_sensitive: true,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
assert!(
@@ -559,12 +576,13 @@ mod tests {
let project = setup_syntax_test(cx).await;
// Test: Line at the top level of the file
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "This is at the top level".to_string(),
include_pattern: Some("**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
let expected = r#"
@@ -588,12 +606,13 @@ mod tests {
let project = setup_syntax_test(cx).await;
// Test: Line inside a function body
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "Function in nested module".to_string(),
include_pattern: Some("**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
let expected = r#"
@@ -619,12 +638,13 @@ mod tests {
let project = setup_syntax_test(cx).await;
// Test: Line with a function argument
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "second_arg".to_string(),
include_pattern: Some("**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
let expected = r#"
@@ -654,12 +674,13 @@ mod tests {
let project = setup_syntax_test(cx).await;
// Test: Line inside an if block
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "Inside if block".to_string(),
include_pattern: Some("**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
let expected = r#"
@@ -684,12 +705,13 @@ mod tests {
let project = setup_syntax_test(cx).await;
// Test: Line in the middle of a long function - should show message about remaining lines
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "Line 5".to_string(),
include_pattern: Some("**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
let expected = r#"
@@ -724,12 +746,13 @@ mod tests {
let project = setup_syntax_test(cx).await;
// Test: Line in the long function
let input = GrepToolInput {
let input = serde_json::to_value(GrepToolInput {
regex: "Line 12".to_string(),
include_pattern: Some("**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
})
.unwrap();
let result = run_grep_tool(input, project.clone(), cx).await;
let expected = r#"
@@ -751,7 +774,7 @@ mod tests {
}
async fn run_grep_tool(
input: GrepToolInput,
input: serde_json::Value,
project: Entity<Project>,
cx: &mut TestAppContext,
) -> String {
@@ -853,12 +876,9 @@ mod tests {
// Searching for files outside the project worktree should return no results
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "outside_function".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "outside_function"
});
Arc::new(GrepTool)
.run(
input,
@@ -882,12 +902,9 @@ mod tests {
// Searching within the project should succeed
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "main".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "main"
});
Arc::new(GrepTool)
.run(
input,
@@ -911,12 +928,9 @@ mod tests {
// Searching files that match file_scan_exclusions should return no results
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "special_configuration".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "special_configuration"
});
Arc::new(GrepTool)
.run(
input,
@@ -939,12 +953,9 @@ mod tests {
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "custom_metadata".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "custom_metadata"
});
Arc::new(GrepTool)
.run(
input,
@@ -968,12 +979,9 @@ mod tests {
// Searching private files should return no results
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "SECRET_KEY".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "SECRET_KEY"
});
Arc::new(GrepTool)
.run(
input,
@@ -996,12 +1004,9 @@ mod tests {
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "private_key_content".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "private_key_content"
});
Arc::new(GrepTool)
.run(
input,
@@ -1024,12 +1029,9 @@ mod tests {
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "sensitive_data".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "sensitive_data"
});
Arc::new(GrepTool)
.run(
input,
@@ -1053,12 +1055,9 @@ mod tests {
// Searching a normal file should still work, even with private_files configured
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "normal_file_content".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "normal_file_content"
});
Arc::new(GrepTool)
.run(
input,
@@ -1082,12 +1081,10 @@ mod tests {
// Path traversal attempts with .. in include_pattern should not escape project
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "outside_function".to_string(),
include_pattern: Some("../outside_project/**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "outside_function",
"include_pattern": "../outside_project/**/*.rs"
});
Arc::new(GrepTool)
.run(
input,
@@ -1188,12 +1185,10 @@ mod tests {
// Search for "secret" - should exclude files based on worktree-specific settings
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "secret".to_string(),
include_pattern: None,
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "secret",
"case_sensitive": false
});
Arc::new(GrepTool)
.run(
input,
@@ -1255,12 +1250,10 @@ mod tests {
// Test with `include_pattern` specific to one worktree
let result = cx
.update(|cx| {
let input = GrepToolInput {
regex: "secret".to_string(),
include_pattern: Some("worktree1/**/*.rs".to_string()),
offset: 0,
case_sensitive: false,
};
let input = json!({
"regex": "secret",
"include_pattern": "worktree1/**/*.rs"
});
Arc::new(GrepTool)
.run(
input,

View File

@@ -41,13 +41,11 @@ pub struct ListDirectoryToolInput {
pub struct ListDirectoryTool;
impl Tool for ListDirectoryTool {
type Input = ListDirectoryToolInput;
fn name(&self) -> String {
"list_directory".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -67,14 +65,19 @@ impl Tool for ListDirectoryTool {
json_schema_for::<ListDirectoryToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
let path = MarkdownInlineCode(&input.path);
format!("List the {path} directory's contents")
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<ListDirectoryToolInput>(input.clone()) {
Ok(input) => {
let path = MarkdownInlineCode(&input.path);
format!("List the {path} directory's contents")
}
Err(_) => "List directory".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -82,6 +85,11 @@ impl Tool for ListDirectoryTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<ListDirectoryToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
// Sometimes models will return these even though we tell it to give a path and not a glob.
// When this happens, just list the root worktree directories.
if matches!(input.path.as_str(), "." | "" | "./" | "*") {
@@ -218,7 +226,7 @@ impl Tool for ListDirectoryTool {
mod tests {
use super::*;
use assistant_tool::Tool;
use gpui::{AppContext, TestAppContext, UpdateGlobal};
use gpui::{TestAppContext, UpdateGlobal};
use indoc::indoc;
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project, WorktreeSettings};
@@ -277,9 +285,9 @@ mod tests {
let tool = Arc::new(ListDirectoryTool);
// Test listing root directory
let input = ListDirectoryToolInput {
path: "project".to_string(),
};
let input = json!({
"path": "project"
});
let result = cx
.update(|cx| {
@@ -312,9 +320,9 @@ mod tests {
);
// Test listing src directory
let input = ListDirectoryToolInput {
path: "project/src".to_string(),
};
let input = json!({
"path": "project/src"
});
let result = cx
.update(|cx| {
@@ -347,9 +355,9 @@ mod tests {
);
// Test listing directory with only files
let input = ListDirectoryToolInput {
path: "project/tests".to_string(),
};
let input = json!({
"path": "project/tests"
});
let result = cx
.update(|cx| {
@@ -391,9 +399,9 @@ mod tests {
let model = Arc::new(FakeLanguageModel::default());
let tool = Arc::new(ListDirectoryTool);
let input = ListDirectoryToolInput {
path: "project/empty_dir".to_string(),
};
let input = json!({
"path": "project/empty_dir"
});
let result = cx
.update(|cx| tool.run(input, Arc::default(), project, action_log, model, None, cx))
@@ -424,9 +432,9 @@ mod tests {
let tool = Arc::new(ListDirectoryTool);
// Test non-existent path
let input = ListDirectoryToolInput {
path: "project/nonexistent".to_string(),
};
let input = json!({
"path": "project/nonexistent"
});
let result = cx
.update(|cx| {
@@ -447,9 +455,9 @@ mod tests {
assert!(result.unwrap_err().to_string().contains("Path not found"));
// Test trying to list a file instead of directory
let input = ListDirectoryToolInput {
path: "project/file.txt".to_string(),
};
let input = json!({
"path": "project/file.txt"
});
let result = cx
.update(|cx| tool.run(input, Arc::default(), project, action_log, model, None, cx))
@@ -519,9 +527,9 @@ mod tests {
let tool = Arc::new(ListDirectoryTool);
// Listing root directory should exclude private and excluded files
let input = ListDirectoryToolInput {
path: "project".to_string(),
};
let input = json!({
"path": "project"
});
let result = cx
.update(|cx| {
@@ -560,9 +568,9 @@ mod tests {
);
// Trying to list an excluded directory should fail
let input = ListDirectoryToolInput {
path: "project/.secretdir".to_string(),
};
let input = json!({
"path": "project/.secretdir"
});
let result = cx
.update(|cx| {
@@ -592,9 +600,9 @@ mod tests {
);
// Listing a directory should exclude private files within it
let input = ListDirectoryToolInput {
path: "project/visible_dir".to_string(),
};
let input = json!({
"path": "project/visible_dir"
});
let result = cx
.update(|cx| {
@@ -712,9 +720,9 @@ mod tests {
let tool = Arc::new(ListDirectoryTool);
// Test listing worktree1/src - should exclude secret.rs and config.toml based on local settings
let input = ListDirectoryToolInput {
path: "worktree1/src".to_string(),
};
let input = json!({
"path": "worktree1/src"
});
let result = cx
.update(|cx| {
@@ -744,9 +752,9 @@ mod tests {
);
// Test listing worktree1/tests - should exclude fixture.sql based on local settings
let input = ListDirectoryToolInput {
path: "worktree1/tests".to_string(),
};
let input = json!({
"path": "worktree1/tests"
});
let result = cx
.update(|cx| {
@@ -772,9 +780,9 @@ mod tests {
);
// Test listing worktree2/lib - should exclude private.js and data.json based on local settings
let input = ListDirectoryToolInput {
path: "worktree2/lib".to_string(),
};
let input = json!({
"path": "worktree2/lib"
});
let result = cx
.update(|cx| {
@@ -804,9 +812,9 @@ mod tests {
);
// Test listing worktree2/docs - should exclude internal.md based on local settings
let input = ListDirectoryToolInput {
path: "worktree2/docs".to_string(),
};
let input = json!({
"path": "worktree2/docs"
});
let result = cx
.update(|cx| {
@@ -832,9 +840,9 @@ mod tests {
);
// Test trying to list an excluded directory directly
let input = ListDirectoryToolInput {
path: "worktree1/src/secret.rs".to_string(),
};
let input = json!({
"path": "worktree1/src/secret.rs"
});
let result = cx
.update(|cx| {

View File

@@ -38,13 +38,11 @@ pub struct MovePathToolInput {
pub struct MovePathTool;
impl Tool for MovePathTool {
type Input = MovePathToolInput;
fn name(&self) -> String {
"move_path".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -64,29 +62,34 @@ impl Tool for MovePathTool {
json_schema_for::<MovePathToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
let src = MarkdownInlineCode(&input.source_path);
let dest = MarkdownInlineCode(&input.destination_path);
let src_path = Path::new(&input.source_path);
let dest_path = Path::new(&input.destination_path);
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<MovePathToolInput>(input.clone()) {
Ok(input) => {
let src = MarkdownInlineCode(&input.source_path);
let dest = MarkdownInlineCode(&input.destination_path);
let src_path = Path::new(&input.source_path);
let dest_path = Path::new(&input.destination_path);
match dest_path
.file_name()
.and_then(|os_str| os_str.to_os_string().into_string().ok())
{
Some(filename) if src_path.parent() == dest_path.parent() => {
let filename = MarkdownInlineCode(&filename);
format!("Rename {src} to {filename}")
}
_ => {
format!("Move {src} to {dest}")
match dest_path
.file_name()
.and_then(|os_str| os_str.to_os_string().into_string().ok())
{
Some(filename) if src_path.parent() == dest_path.parent() => {
let filename = MarkdownInlineCode(&filename);
format!("Rename {src} to {filename}")
}
_ => {
format!("Move {src} to {dest}")
}
}
}
Err(_) => "Move path".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -94,6 +97,10 @@ impl Tool for MovePathTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<MovePathToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let rename_task = project.update(cx, |project, cx| {
match project
.find_project_path(&input.source_path, cx)

View File

@@ -1,7 +1,7 @@
use std::sync::Arc;
use crate::schema::json_schema_for;
use anyhow::Result;
use anyhow::{Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use chrono::{Local, Utc};
use gpui::{AnyWindowHandle, App, Entity, Task};
@@ -29,13 +29,11 @@ pub struct NowToolInput {
pub struct NowTool;
impl Tool for NowTool {
type Input = NowToolInput;
fn name(&self) -> String {
"now".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -55,13 +53,13 @@ impl Tool for NowTool {
json_schema_for::<NowToolInput>(format)
}
fn ui_text(&self, _input: &Self::Input) -> String {
fn ui_text(&self, _input: &serde_json::Value) -> String {
"Get current time".to_string()
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -69,6 +67,11 @@ impl Tool for NowTool {
_window: Option<AnyWindowHandle>,
_cx: &mut App,
) -> ToolResult {
let input: NowToolInput = match serde_json::from_value(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let now = match input.timezone {
Timezone::Utc => Utc::now().to_rfc3339(),
Timezone::Local => Local::now().to_rfc3339(),

View File

@@ -1,7 +1,7 @@
use crate::schema::json_schema_for;
use anyhow::{Context as _, Result};
use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use gpui::{AnyWindowHandle, App, AppContext, Entity};
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
use project::Project;
use schemars::JsonSchema;
@@ -19,13 +19,11 @@ pub struct OpenToolInput {
pub struct OpenTool;
impl Tool for OpenTool {
type Input = OpenToolInput;
fn name(&self) -> String {
"open".to_string()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
true
}
fn may_perform_edits(&self) -> bool {
@@ -43,13 +41,16 @@ impl Tool for OpenTool {
json_schema_for::<OpenToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
format!("Open `{}`", MarkdownEscaped(&input.path_or_url))
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<OpenToolInput>(input.clone()) {
Ok(input) => format!("Open `{}`", MarkdownEscaped(&input.path_or_url)),
Err(_) => "Open file or URL".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -57,6 +58,11 @@ impl Tool for OpenTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input: OpenToolInput = match serde_json::from_value(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
// If path_or_url turns out to be a path in the project, make it absolute.
let abs_path = to_absolute_path(&input.path_or_url, project, cx);

View File

@@ -51,13 +51,11 @@ pub struct ReadFileToolInput {
pub struct ReadFileTool;
impl Tool for ReadFileTool {
type Input = ReadFileToolInput;
fn name(&self) -> String {
"read_file".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -77,18 +75,23 @@ impl Tool for ReadFileTool {
json_schema_for::<ReadFileToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
let path = MarkdownInlineCode(&input.path);
match (input.start_line, input.end_line) {
(Some(start), None) => format!("Read file {path} (from line {start})"),
(Some(start), Some(end)) => format!("Read file {path} (lines {start}-{end})"),
_ => format!("Read file {path}"),
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<ReadFileToolInput>(input.clone()) {
Ok(input) => {
let path = MarkdownInlineCode(&input.path);
match (input.start_line, input.end_line) {
(Some(start), None) => format!("Read file {path} (from line {start})"),
(Some(start), Some(end)) => format!("Read file {path} (lines {start}-{end})"),
_ => format!("Read file {path}"),
}
}
Err(_) => "Read file".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
@@ -96,6 +99,11 @@ impl Tool for ReadFileTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<ReadFileToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
return Task::ready(Err(anyhow!("Path {} not found in project", &input.path))).into();
};
@@ -281,7 +289,7 @@ impl Tool for ReadFileTool {
#[cfg(test)]
mod test {
use super::*;
use gpui::{AppContext, TestAppContext, UpdateGlobal};
use gpui::{TestAppContext, UpdateGlobal};
use language::{Language, LanguageConfig, LanguageMatcher};
use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project, WorktreeSettings};
@@ -300,12 +308,9 @@ mod test {
let model = Arc::new(FakeLanguageModel::default());
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/nonexistent_file.txt".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "root/nonexistent_file.txt"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -342,11 +347,9 @@ mod test {
let model = Arc::new(FakeLanguageModel::default());
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/small_file.txt".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "root/small_file.txt"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -386,11 +389,9 @@ mod test {
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/large_file.rs".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "root/large_file.rs"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -420,11 +421,10 @@ mod test {
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/large_file.rs".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "root/large_file.rs",
"offset": 1
});
Arc::new(ReadFileTool)
.run(
input,
@@ -477,11 +477,11 @@ mod test {
let model = Arc::new(FakeLanguageModel::default());
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/multiline.txt".to_string(),
start_line: Some(2),
end_line: Some(4),
};
let input = json!({
"path": "root/multiline.txt",
"start_line": 2,
"end_line": 4
});
Arc::new(ReadFileTool)
.run(
input,
@@ -520,11 +520,11 @@ mod test {
// start_line of 0 should be treated as 1
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/multiline.txt".to_string(),
start_line: Some(0),
end_line: Some(2),
};
let input = json!({
"path": "root/multiline.txt",
"start_line": 0,
"end_line": 2
});
Arc::new(ReadFileTool)
.run(
input,
@@ -543,11 +543,11 @@ mod test {
// end_line of 0 should result in at least 1 line
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/multiline.txt".to_string(),
start_line: Some(1),
end_line: Some(0),
};
let input = json!({
"path": "root/multiline.txt",
"start_line": 1,
"end_line": 0
});
Arc::new(ReadFileTool)
.run(
input,
@@ -566,11 +566,11 @@ mod test {
// when start_line > end_line, should still return at least 1 line
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "root/multiline.txt".to_string(),
start_line: Some(3),
end_line: Some(2),
};
let input = json!({
"path": "root/multiline.txt",
"start_line": 3,
"end_line": 2
});
Arc::new(ReadFileTool)
.run(
input,
@@ -694,11 +694,9 @@ mod test {
// Reading a file outside the project worktree should fail
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "/outside_project/sensitive_file.txt".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "/outside_project/sensitive_file.txt"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -720,11 +718,9 @@ mod test {
// Reading a file within the project should succeed
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/allowed_file.txt".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/allowed_file.txt"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -746,11 +742,9 @@ mod test {
// Reading files that match file_scan_exclusions should fail
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/.secretdir/config".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/.secretdir/config"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -771,11 +765,9 @@ mod test {
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/.mymetadata".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/.mymetadata"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -797,11 +789,9 @@ mod test {
// Reading private files should fail
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/secrets/.mysecrets".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/.mysecrets"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -822,11 +812,9 @@ mod test {
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/subdir/special.privatekey".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/subdir/special.privatekey"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -847,11 +835,9 @@ mod test {
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/subdir/data.mysensitive".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/subdir/data.mysensitive"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -873,11 +859,9 @@ mod test {
// Reading a normal file should still work, even with private_files configured
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/subdir/normal_file.txt".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/subdir/normal_file.txt"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -900,11 +884,9 @@ mod test {
// Path traversal attempts with .. should fail
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
path: "project_root/../outside_project/sensitive_file.txt".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "project_root/../outside_project/sensitive_file.txt"
});
Arc::new(ReadFileTool)
.run(
input,
@@ -999,11 +981,9 @@ mod test {
let tool = Arc::new(ReadFileTool);
// Test reading allowed files in worktree1
let input = ReadFileToolInput {
path: "worktree1/src/main.rs".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree1/src/main.rs"
});
let result = cx
.update(|cx| {
@@ -1027,11 +1007,9 @@ mod test {
);
// Test reading private file in worktree1 should fail
let input = ReadFileToolInput {
path: "worktree1/src/secret.rs".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree1/src/secret.rs"
});
let result = cx
.update(|cx| {
@@ -1058,11 +1036,9 @@ mod test {
);
// Test reading excluded file in worktree1 should fail
let input = ReadFileToolInput {
path: "worktree1/tests/fixture.sql".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree1/tests/fixture.sql"
});
let result = cx
.update(|cx| {
@@ -1089,11 +1065,9 @@ mod test {
);
// Test reading allowed files in worktree2
let input = ReadFileToolInput {
path: "worktree2/lib/public.js".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree2/lib/public.js"
});
let result = cx
.update(|cx| {
@@ -1117,11 +1091,9 @@ mod test {
);
// Test reading private file in worktree2 should fail
let input = ReadFileToolInput {
path: "worktree2/lib/private.js".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree2/lib/private.js"
});
let result = cx
.update(|cx| {
@@ -1148,11 +1120,9 @@ mod test {
);
// Test reading excluded file in worktree2 should fail
let input = ReadFileToolInput {
path: "worktree2/docs/internal.md".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree2/docs/internal.md"
});
let result = cx
.update(|cx| {
@@ -1180,11 +1150,9 @@ mod test {
// Test that files allowed in one worktree but not in another are handled correctly
// (e.g., config.toml is private in worktree1 but doesn't exist in worktree2)
let input = ReadFileToolInput {
path: "worktree1/src/config.toml".to_string(),
start_line: None,
end_line: None,
};
let input = json!({
"path": "worktree1/src/config.toml"
});
let result = cx
.update(|cx| {

View File

@@ -22,10 +22,12 @@ fn schema_to_json(
Ok(value)
}
fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
pub fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
let mut generator = match format {
LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3()
// TODO: Gemini docs mention using a subset of OpenAPI 3, so this may benefit from using
// `SchemaSettings::openapi3()`.
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::draft07()
.with(|settings| {
settings.meta_schema = None;
settings.inline_subschemas = true;

View File

@@ -0,0 +1,47 @@
You are an expert text editor. Taking the following file as an input:
```{{path}}
{{file_content}}
```
Produce a series of edits following the given user instructions:
<user_instructions>
{{instructions}}
</user_instructions>
Your response must be a series of edits in the following format:
<edits>
<old_text>
OLD TEXT 1 HERE
</old_text>
<new_text>
NEW TEXT 1 HERE
</new_text>
<old_text>
OLD TEXT 2 HERE
</old_text>
<new_text>
NEW TEXT 2 HERE
</new_text>
<old_text>
OLD TEXT 3 HERE
</old_text>
<new_text>
NEW TEXT 3 HERE
</new_text>
</edits>
Rules for editing:
- `old_text` represents full lines (including indentation) in the input file that will be replaced with `new_text`
- It is crucial that `old_text` is unique and unambiguous.
- Always include enough context around the lines you want to replace in `old_text` such that it's impossible to mistake them for other lines.
- If you want to replace all occurrences, repeat the same `old_text`/`new_text` pair multiple times and I will apply them sequentially, one occurrence at a time.
- Don't explain why you made a change, just report the edits.
- Make sure you follow the instructions carefully and thoroughly, avoid doing *less* or *more* than instructed.
<edits>

View File

@@ -2,7 +2,7 @@ use crate::{
schema::json_schema_for,
ui::{COLLAPSED_LINES, ToolOutputPreview},
};
use anyhow::{Context as _, Result};
use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus};
use futures::{FutureExt as _, future::Shared};
use gpui::{
@@ -72,13 +72,11 @@ impl TerminalTool {
}
impl Tool for TerminalTool {
type Input = TerminalToolInput;
fn name(&self) -> String {
Self::NAME.to_string()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
true
}
@@ -98,24 +96,30 @@ impl Tool for TerminalTool {
json_schema_for::<TerminalToolInput>(format)
}
fn ui_text(&self, input: &Self::Input) -> String {
let mut lines = input.command.lines();
let first_line = lines.next().unwrap_or_default();
let remaining_line_count = lines.count();
match remaining_line_count {
0 => MarkdownInlineCode(&first_line).to_string(),
1 => MarkdownInlineCode(&format!(
"{} - {} more line",
first_line, remaining_line_count
))
.to_string(),
n => MarkdownInlineCode(&format!("{} - {} more lines", first_line, n)).to_string(),
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<TerminalToolInput>(input.clone()) {
Ok(input) => {
let mut lines = input.command.lines();
let first_line = lines.next().unwrap_or_default();
let remaining_line_count = lines.count();
match remaining_line_count {
0 => MarkdownInlineCode(&first_line).to_string(),
1 => MarkdownInlineCode(&format!(
"{} - {} more line",
first_line, remaining_line_count
))
.to_string(),
n => MarkdownInlineCode(&format!("{} - {} more lines", first_line, n))
.to_string(),
}
}
Err(_) => "Run terminal command".to_string(),
}
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -123,6 +127,11 @@ impl Tool for TerminalTool {
window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input: TerminalToolInput = match serde_json::from_value(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let working_dir = match working_dir(&input, &project, cx) {
Ok(dir) => dir,
Err(err) => return Task::ready(Err(err)).into(),
@@ -747,7 +756,7 @@ mod tests {
let result = cx.update(|cx| {
TerminalTool::run(
Arc::new(TerminalTool::new(cx)),
input,
serde_json::to_value(input).unwrap(),
Arc::default(),
project.clone(),
action_log.clone(),
@@ -782,7 +791,7 @@ mod tests {
let check = |input, expected, cx: &mut App| {
let headless_result = TerminalTool::run(
Arc::new(TerminalTool::new(cx)),
input,
serde_json::to_value(input).unwrap(),
Arc::default(),
project.clone(),
action_log.clone(),

View File

@@ -1,7 +1,7 @@
use std::sync::Arc;
use crate::schema::json_schema_for;
use anyhow::Result;
use anyhow::{Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use gpui::{AnyWindowHandle, App, Entity, Task};
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
@@ -20,13 +20,11 @@ pub struct ThinkingToolInput {
pub struct ThinkingTool;
impl Tool for ThinkingTool {
type Input = ThinkingToolInput;
fn name(&self) -> String {
"thinking".to_string()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -46,13 +44,13 @@ impl Tool for ThinkingTool {
json_schema_for::<ThinkingToolInput>(format)
}
fn ui_text(&self, _input: &Self::Input) -> String {
fn ui_text(&self, _input: &serde_json::Value) -> String {
"Thinking".to_string()
}
fn run(
self: Arc<Self>,
_input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -61,6 +59,10 @@ impl Tool for ThinkingTool {
_cx: &mut App,
) -> ToolResult {
// This tool just "thinks out loud" and doesn't perform any actions.
Task::ready(Ok("Finished thinking.".to_string().into())).into()
Task::ready(match serde_json::from_value::<ThinkingToolInput>(input) {
Ok(_input) => Ok("Finished thinking.".to_string().into()),
Err(err) => Err(anyhow!(err)),
})
.into()
}
}

View File

@@ -28,13 +28,11 @@ pub struct WebSearchToolInput {
pub struct WebSearchTool;
impl Tool for WebSearchTool {
type Input = WebSearchToolInput;
fn name(&self) -> String {
"web_search".into()
}
fn needs_confirmation(&self, _: &Self::Input, _: &App) -> bool {
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
false
}
@@ -54,13 +52,13 @@ impl Tool for WebSearchTool {
json_schema_for::<WebSearchToolInput>(format)
}
fn ui_text(&self, _input: &Self::Input) -> String {
fn ui_text(&self, _input: &serde_json::Value) -> String {
"Searching the Web".to_string()
}
fn run(
self: Arc<Self>,
input: Self::Input,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
@@ -68,6 +66,10 @@ impl Tool for WebSearchTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let input = match serde_json::from_value::<WebSearchToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
};
let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else {
return Task::ready(Err(anyhow!("Web search is not available."))).into();
};

View File

@@ -28,17 +28,7 @@ use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
actions!(
auto_update,
[
/// Checks for available updates.
Check,
/// Dismisses the update error message.
DismissErrorMessage,
/// Opens the release notes for the current version in a browser.
ViewReleaseNotes,
]
);
actions!(auto_update, [Check, DismissErrorMessage, ViewReleaseNotes,]);
#[derive(Serialize)]
struct UpdateRequestBody {

View File

@@ -12,13 +12,7 @@ use workspace::Workspace;
use workspace::notifications::simple_message_notification::MessageNotification;
use workspace::notifications::{NotificationId, show_app_notification};
actions!(
auto_update,
[
/// Opens the release notes for the current version in a new tab.
ViewReleaseNotesLocally
]
);
actions!(auto_update, [ViewReleaseNotesLocally]);
pub fn init(cx: &mut App) {
notify_if_app_was_updated(cx);

View File

@@ -29,7 +29,7 @@ client.workspace = true
collections.workspace = true
fs.workspace = true
futures.workspace = true
gpui = { workspace = true, features = ["screen-capture"] }
gpui.workspace = true
language.workspace = true
log.workspace = true
postage.workspace = true

View File

@@ -81,17 +81,7 @@ pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(500);
pub const MAX_RECONNECTION_DELAY: Duration = Duration::from_secs(10);
pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20);
actions!(
client,
[
/// Signs in to Zed account.
SignIn,
/// Signs out of Zed account.
SignOut,
/// Reconnects to the collaboration server.
Reconnect
]
);
actions!(client, [SignIn, SignOut, Reconnect]);
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct ClientSettingsContent {

View File

@@ -35,7 +35,6 @@ dashmap.workspace = true
derive_more.workspace = true
envy = "0.4.2"
futures.workspace = true
gpui = { workspace = true, features = ["screen-capture"] }
hex.workspace = true
http_client.workspace = true
jsonwebtoken.workspace = true

View File

@@ -107,7 +107,7 @@ CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("proj
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
CREATE TABLE "project_repositories" (
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"project_id" INTEGER NOT NULL,
"abs_path" VARCHAR,
"id" INTEGER NOT NULL,
"entry_ids" VARCHAR,
@@ -124,7 +124,7 @@ CREATE TABLE "project_repositories" (
CREATE INDEX "index_project_repositories_on_project_id" ON "project_repositories" ("project_id");
CREATE TABLE "project_repository_statuses" (
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"project_id" INTEGER NOT NULL,
"repository_id" INTEGER NOT NULL,
"repo_path" VARCHAR NOT NULL,
"status" INT8 NOT NULL,

View File

@@ -1,25 +0,0 @@
DELETE FROM project_repositories
WHERE project_id NOT IN (SELECT id FROM projects);
ALTER TABLE project_repositories
ADD CONSTRAINT fk_project_repositories_project_id
FOREIGN KEY (project_id)
REFERENCES projects (id)
ON DELETE CASCADE
NOT VALID;
ALTER TABLE project_repositories
VALIDATE CONSTRAINT fk_project_repositories_project_id;
DELETE FROM project_repository_statuses
WHERE project_id NOT IN (SELECT id FROM projects);
ALTER TABLE project_repository_statuses
ADD CONSTRAINT fk_project_repository_statuses_project_id
FOREIGN KEY (project_id)
REFERENCES projects (id)
ON DELETE CASCADE
NOT VALID;
ALTER TABLE project_repository_statuses
VALIDATE CONSTRAINT fk_project_repository_statuses_project_id;

View File

@@ -1404,9 +1404,6 @@ async fn sync_model_request_usage_with_stripe(
llm_db: &Arc<LlmDatabase>,
stripe_billing: &Arc<StripeBilling>,
) -> anyhow::Result<()> {
log::info!("Stripe usage sync: Starting");
let started_at = Utc::now();
let staff_users = app.db.get_staff_users().await?;
let staff_user_ids = staff_users
.iter()
@@ -1451,10 +1448,6 @@ async fn sync_model_request_usage_with_stripe(
.find_price_by_lookup_key("claude-3-7-sonnet-requests-max")
.await?;
let usage_meter_count = usage_meters.len();
log::info!("Stripe usage sync: Syncing {usage_meter_count} usage meters");
for (usage_meter, usage) in usage_meters {
maybe!(async {
let Some((billing_customer, billing_subscription)) =
@@ -1511,10 +1504,5 @@ async fn sync_model_request_usage_with_stripe(
.log_err();
}
log::info!(
"Stripe usage sync: Synced {usage_meter_count} usage meters in {:?}",
Utc::now() - started_at
);
Ok(())
}

View File

@@ -4,19 +4,20 @@ mod tables;
#[cfg(test)]
pub mod tests;
use crate::{Error, Result};
use crate::{Error, Result, executor::Executor};
use anyhow::{Context as _, anyhow};
use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use dashmap::DashMap;
use futures::StreamExt;
use project_repository_statuses::StatusKind;
use rand::{Rng, SeedableRng, prelude::StdRng};
use rpc::ExtensionProvides;
use rpc::{
ConnectionId, ExtensionMetadata,
proto::{self},
};
use sea_orm::{
ActiveValue, Condition, ConnectionTrait, DatabaseConnection, DatabaseTransaction,
ActiveValue, Condition, ConnectionTrait, DatabaseConnection, DatabaseTransaction, DbErr,
FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
TransactionTrait,
entity::prelude::*,
@@ -32,6 +33,7 @@ use std::{
ops::{Deref, DerefMut},
rc::Rc,
sync::Arc,
time::Duration,
};
use time::PrimitiveDateTime;
use tokio::sync::{Mutex, OwnedMutexGuard};
@@ -56,7 +58,6 @@ pub use tables::*;
#[cfg(test)]
pub struct DatabaseTestOptions {
pub executor: gpui::BackgroundExecutor,
pub runtime: tokio::runtime::Runtime,
pub query_failure_probability: parking_lot::Mutex<f64>,
}
@@ -68,6 +69,8 @@ pub struct Database {
pool: DatabaseConnection,
rooms: DashMap<RoomId, Arc<Mutex<()>>>,
projects: DashMap<ProjectId, Arc<Mutex<()>>>,
rng: Mutex<StdRng>,
executor: Executor,
notification_kinds_by_id: HashMap<NotificationKindId, &'static str>,
notification_kinds_by_name: HashMap<String, NotificationKindId>,
#[cfg(test)]
@@ -78,15 +81,17 @@ pub struct Database {
// separate files in the `queries` folder.
impl Database {
/// Connects to the database with the given options
pub async fn new(options: ConnectOptions) -> Result<Self> {
pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> {
sqlx::any::install_default_drivers();
Ok(Self {
options: options.clone(),
pool: sea_orm::Database::connect(options).await?,
rooms: DashMap::with_capacity(16384),
projects: DashMap::with_capacity(16384),
rng: Mutex::new(StdRng::seed_from_u64(0)),
notification_kinds_by_id: HashMap::default(),
notification_kinds_by_name: HashMap::default(),
executor,
#[cfg(test)]
test_options: None,
})
@@ -102,13 +107,48 @@ impl Database {
self.projects.clear();
}
/// Transaction runs things in a transaction. If you want to call other methods
/// and pass the transaction around you need to reborrow the transaction at each
/// call site with: `&*tx`.
pub async fn transaction<F, Fut, T>(&self, f: F) -> Result<T>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let body = async {
let (tx, result) = self.with_transaction(&f).await?;
let mut i = 0;
loop {
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(result) => match tx.commit().await.map_err(Into::into) {
Ok(()) => return Ok(result),
Err(error) => {
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
},
Err(error) => {
tx.rollback().await?;
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
}
i += 1;
}
};
self.run(body).await
}
pub async fn weak_transaction<F, Fut, T>(&self, f: F) -> Result<T>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let body = async {
let (tx, result) = self.with_weak_transaction(&f).await?;
match result {
Ok(result) => match tx.commit().await.map_err(Into::into) {
Ok(()) => Ok(result),
@@ -134,28 +174,44 @@ impl Database {
Fut: Send + Future<Output = Result<Option<(RoomId, T)>>>,
{
let body = async {
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(Some((room_id, data))) => {
let lock = self.rooms.entry(room_id).or_default().clone();
let _guard = lock.lock_owned().await;
match tx.commit().await.map_err(Into::into) {
Ok(()) => Ok(Some(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
})),
Err(error) => Err(error),
let mut i = 0;
loop {
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(Some((room_id, data))) => {
let lock = self.rooms.entry(room_id).or_default().clone();
let _guard = lock.lock_owned().await;
match tx.commit().await.map_err(Into::into) {
Ok(()) => {
return Ok(Some(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
}));
}
Err(error) => {
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
}
}
Ok(None) => match tx.commit().await.map_err(Into::into) {
Ok(()) => return Ok(None),
Err(error) => {
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
},
Err(error) => {
tx.rollback().await?;
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
}
Ok(None) => match tx.commit().await.map_err(Into::into) {
Ok(()) => Ok(None),
Err(error) => Err(error),
},
Err(error) => {
tx.rollback().await?;
Err(error)
}
i += 1;
}
};
@@ -173,26 +229,38 @@ impl Database {
{
let room_id = Database::room_id_for_project(self, project_id).await?;
let body = async {
let lock = if let Some(room_id) = room_id {
self.rooms.entry(room_id).or_default().clone()
} else {
self.projects.entry(project_id).or_default().clone()
};
let _guard = lock.lock_owned().await;
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(data) => match tx.commit().await.map_err(Into::into) {
Ok(()) => Ok(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
}),
Err(error) => Err(error),
},
Err(error) => {
tx.rollback().await?;
Err(error)
let mut i = 0;
loop {
let lock = if let Some(room_id) = room_id {
self.rooms.entry(room_id).or_default().clone()
} else {
self.projects.entry(project_id).or_default().clone()
};
let _guard = lock.lock_owned().await;
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(data) => match tx.commit().await.map_err(Into::into) {
Ok(()) => {
return Ok(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
});
}
Err(error) => {
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
},
Err(error) => {
tx.rollback().await?;
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
}
i += 1;
}
};
@@ -212,22 +280,34 @@ impl Database {
Fut: Send + Future<Output = Result<T>>,
{
let body = async {
let lock = self.rooms.entry(room_id).or_default().clone();
let _guard = lock.lock_owned().await;
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(data) => match tx.commit().await.map_err(Into::into) {
Ok(()) => Ok(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
}),
Err(error) => Err(error),
},
Err(error) => {
tx.rollback().await?;
Err(error)
let mut i = 0;
loop {
let lock = self.rooms.entry(room_id).or_default().clone();
let _guard = lock.lock_owned().await;
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(data) => match tx.commit().await.map_err(Into::into) {
Ok(()) => {
return Ok(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
});
}
Err(error) => {
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
},
Err(error) => {
tx.rollback().await?;
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
}
i += 1;
}
};
@@ -235,6 +315,28 @@ impl Database {
}
async fn with_transaction<F, Fut, T>(&self, f: &F) -> Result<(DatabaseTransaction, Result<T>)>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let tx = self
.pool
.begin_with_config(Some(IsolationLevel::Serializable), None)
.await?;
let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await;
let tx = Arc::get_mut(&mut tx)
.and_then(|tx| tx.take())
.context("couldn't complete transaction because it's still in use")?;
Ok((tx, result))
}
async fn with_weak_transaction<F, Fut, T>(
&self,
f: &F,
) -> Result<(DatabaseTransaction, Result<T>)>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
@@ -259,13 +361,13 @@ impl Database {
{
#[cfg(test)]
{
use rand::prelude::*;
let test_options = self.test_options.as_ref().unwrap();
test_options.executor.simulate_random_delay().await;
let fail_probability = *test_options.query_failure_probability.lock();
if test_options.executor.rng().gen_bool(fail_probability) {
return Err(anyhow!("simulated query failure"))?;
if let Executor::Deterministic(executor) = &self.executor {
executor.simulate_random_delay().await;
let fail_probability = *test_options.query_failure_probability.lock();
if executor.rng().gen_bool(fail_probability) {
return Err(anyhow!("simulated query failure"))?;
}
}
test_options.runtime.block_on(future)
@@ -276,6 +378,46 @@ impl Database {
future.await
}
}
async fn retry_on_serialization_error(&self, error: &Error, prev_attempt_count: usize) -> bool {
// If the error is due to a failure to serialize concurrent transactions, then retry
// this transaction after a delay. With each subsequent retry, double the delay duration.
// Also vary the delay randomly in order to ensure different database connections retry
// at different times.
const SLEEPS: [f32; 10] = [10., 20., 40., 80., 160., 320., 640., 1280., 2560., 5120.];
if is_serialization_error(error) && prev_attempt_count < SLEEPS.len() {
let base_delay = SLEEPS[prev_attempt_count];
let randomized_delay = base_delay * self.rng.lock().await.gen_range(0.5..=2.0);
log::warn!(
"retrying transaction after serialization error. delay: {} ms.",
randomized_delay
);
self.executor
.sleep(Duration::from_millis(randomized_delay as u64))
.await;
true
} else {
false
}
}
}
fn is_serialization_error(error: &Error) -> bool {
const SERIALIZATION_FAILURE_CODE: &str = "40001";
match error {
Error::Database(
DbErr::Exec(sea_orm::RuntimeErr::SqlxError(error))
| DbErr::Query(sea_orm::RuntimeErr::SqlxError(error)),
) if error
.as_database_error()
.and_then(|error| error.code())
.as_deref()
== Some(SERIALIZATION_FAILURE_CODE) =>
{
true
}
_ => false,
}
}
/// A handle to a [`DatabaseTransaction`].

View File

@@ -20,7 +20,7 @@ impl Database {
&self,
params: &CreateBillingCustomerParams,
) -> Result<billing_customer::Model> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let customer = billing_customer::Entity::insert(billing_customer::ActiveModel {
user_id: ActiveValue::set(params.user_id),
stripe_customer_id: ActiveValue::set(params.stripe_customer_id.clone()),
@@ -40,7 +40,7 @@ impl Database {
id: BillingCustomerId,
params: &UpdateBillingCustomerParams,
) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
billing_customer::Entity::update(billing_customer::ActiveModel {
id: ActiveValue::set(id),
user_id: params.user_id.clone(),
@@ -61,7 +61,7 @@ impl Database {
&self,
id: BillingCustomerId,
) -> Result<Option<billing_customer::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_customer::Entity::find()
.filter(billing_customer::Column::Id.eq(id))
.one(&*tx)
@@ -75,7 +75,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<billing_customer::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_customer::Entity::find()
.filter(billing_customer::Column::UserId.eq(user_id))
.one(&*tx)
@@ -89,7 +89,7 @@ impl Database {
&self,
stripe_customer_id: &str,
) -> Result<Option<billing_customer::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_customer::Entity::find()
.filter(billing_customer::Column::StripeCustomerId.eq(stripe_customer_id))
.one(&*tx)

View File

@@ -22,7 +22,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<billing_preference::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_preference::Entity::find()
.filter(billing_preference::Column::UserId.eq(user_id))
.one(&*tx)
@@ -37,7 +37,7 @@ impl Database {
user_id: UserId,
params: &CreateBillingPreferencesParams,
) -> Result<billing_preference::Model> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let preferences = billing_preference::Entity::insert(billing_preference::ActiveModel {
user_id: ActiveValue::set(user_id),
max_monthly_llm_usage_spending_in_cents: ActiveValue::set(
@@ -65,7 +65,7 @@ impl Database {
user_id: UserId,
params: &UpdateBillingPreferencesParams,
) -> Result<billing_preference::Model> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let preferences = billing_preference::Entity::update_many()
.set(billing_preference::ActiveModel {
max_monthly_llm_usage_spending_in_cents: params

View File

@@ -35,7 +35,7 @@ impl Database {
&self,
params: &CreateBillingSubscriptionParams,
) -> Result<billing_subscription::Model> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let id = billing_subscription::Entity::insert(billing_subscription::ActiveModel {
billing_customer_id: ActiveValue::set(params.billing_customer_id),
kind: ActiveValue::set(params.kind),
@@ -64,7 +64,7 @@ impl Database {
id: BillingSubscriptionId,
params: &UpdateBillingSubscriptionParams,
) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
billing_subscription::Entity::update(billing_subscription::ActiveModel {
id: ActiveValue::set(id),
billing_customer_id: params.billing_customer_id.clone(),
@@ -90,7 +90,7 @@ impl Database {
&self,
id: BillingSubscriptionId,
) -> Result<Option<billing_subscription::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_subscription::Entity::find_by_id(id)
.one(&*tx)
.await?)
@@ -103,7 +103,7 @@ impl Database {
&self,
stripe_subscription_id: &str,
) -> Result<Option<billing_subscription::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_subscription::Entity::find()
.filter(
billing_subscription::Column::StripeSubscriptionId.eq(stripe_subscription_id),
@@ -118,7 +118,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<billing_subscription::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(billing_customer::Column::UserId.eq(user_id))
@@ -152,7 +152,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Vec<billing_subscription::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let subscriptions = billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(billing_customer::Column::UserId.eq(user_id))
@@ -169,7 +169,7 @@ impl Database {
&self,
user_ids: HashSet<UserId>,
) -> Result<HashMap<UserId, (billing_customer::Model, billing_subscription::Model)>> {
self.transaction(|tx| {
self.weak_transaction(|tx| {
let user_ids = user_ids.clone();
async move {
let mut rows = billing_subscription::Entity::find()
@@ -201,7 +201,7 @@ impl Database {
&self,
user_ids: HashSet<UserId>,
) -> Result<HashMap<UserId, (billing_customer::Model, billing_subscription::Model)>> {
self.transaction(|tx| {
self.weak_transaction(|tx| {
let user_ids = user_ids.clone();
async move {
let mut rows = billing_subscription::Entity::find()
@@ -236,7 +236,7 @@ impl Database {
/// Returns the count of the active billing subscriptions for the user with the specified ID.
pub async fn count_active_billing_subscriptions(&self, user_id: UserId) -> Result<usize> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let count = billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(

View File

@@ -501,8 +501,10 @@ impl Database {
/// Returns all channels for the user with the given ID.
pub async fn get_channels_for_user(&self, user_id: UserId) -> Result<ChannelsForUser> {
self.transaction(|tx| async move { self.get_user_channels(user_id, None, true, &tx).await })
.await
self.weak_transaction(
|tx| async move { self.get_user_channels(user_id, None, true, &tx).await },
)
.await
}
/// Returns all channels for the user with the given ID that are descendants

View File

@@ -15,7 +15,7 @@ impl Database {
user_b_busy: bool,
}
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let user_a_participant = Alias::new("user_a_participant");
let user_b_participant = Alias::new("user_b_participant");
let mut db_contacts = contact::Entity::find()
@@ -91,7 +91,7 @@ impl Database {
/// Returns whether the given user is a busy (on a call).
pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let participant = room_participant::Entity::find()
.filter(room_participant::Column::UserId.eq(user_id))
.one(&*tx)

View File

@@ -9,7 +9,7 @@ pub enum ContributorSelector {
impl Database {
/// Retrieves the GitHub logins of all users who have signed the CLA.
pub async fn get_contributors(&self) -> Result<Vec<String>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryGithubLogin {
GithubLogin,
@@ -32,7 +32,7 @@ impl Database {
&self,
selector: &ContributorSelector,
) -> Result<Option<DateTime>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let condition = match selector {
ContributorSelector::GitHubUserId { github_user_id } => {
user::Column::GithubUserId.eq(*github_user_id)
@@ -69,7 +69,7 @@ impl Database {
github_user_created_at: DateTimeUtc,
initial_channel_id: Option<ChannelId>,
) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let user = self
.update_or_create_user_by_github_account_tx(
github_login,

View File

@@ -8,7 +8,7 @@ impl Database {
model: &str,
digests: &[Vec<u8>],
) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let embeddings = {
let mut db_embeddings = embedding::Entity::find()
.filter(
@@ -52,7 +52,7 @@ impl Database {
model: &str,
embeddings: &HashMap<Vec<u8>, Vec<f32>>,
) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
let now_offset_datetime = OffsetDateTime::now_utc();
let retrieved_at =
@@ -78,7 +78,7 @@ impl Database {
}
pub async fn purge_old_embeddings(&self) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
embedding::Entity::delete_many()
.filter(
embedding::Column::RetrievedAt

View File

@@ -15,7 +15,7 @@ impl Database {
max_schema_version: i32,
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let mut condition = Condition::all()
.add(
extension::Column::LatestVersion
@@ -43,7 +43,7 @@ impl Database {
ids: &[&str],
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let extensions = extension::Entity::find()
.filter(extension::Column::ExternalId.is_in(ids.iter().copied()))
.all(&*tx)
@@ -123,7 +123,7 @@ impl Database {
&self,
extension_id: &str,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let condition = extension::Column::ExternalId
.eq(extension_id)
.into_condition();
@@ -162,7 +162,7 @@ impl Database {
extension_id: &str,
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Option<ExtensionMetadata>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.one(&*tx)
@@ -187,7 +187,7 @@ impl Database {
extension_id: &str,
version: &str,
) -> Result<Option<ExtensionMetadata>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.filter(extension_version::Column::Version.eq(version))
@@ -204,7 +204,7 @@ impl Database {
}
pub async fn get_known_extension_versions(&self) -> Result<HashMap<String, Vec<String>>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let mut extension_external_ids_by_id = HashMap::default();
let mut rows = extension::Entity::find().stream(&*tx).await?;
@@ -242,7 +242,7 @@ impl Database {
&self,
versions_by_extension_id: &HashMap<&str, Vec<NewExtensionVersion>>,
) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
for (external_id, versions) in versions_by_extension_id {
if versions.is_empty() {
continue;
@@ -349,7 +349,7 @@ impl Database {
}
pub async fn record_extension_download(&self, extension: &str, version: &str) -> Result<bool> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryId {
Id,

View File

@@ -13,7 +13,7 @@ impl Database {
&self,
params: &CreateProcessedStripeEventParams,
) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
processed_stripe_event::Entity::insert(processed_stripe_event::ActiveModel {
stripe_event_id: ActiveValue::set(params.stripe_event_id.clone()),
stripe_event_type: ActiveValue::set(params.stripe_event_type.clone()),
@@ -35,7 +35,7 @@ impl Database {
&self,
event_id: &str,
) -> Result<Option<processed_stripe_event::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(processed_stripe_event::Entity::find_by_id(event_id)
.one(&*tx)
.await?)
@@ -48,7 +48,7 @@ impl Database {
&self,
event_ids: &[&str],
) -> Result<Vec<processed_stripe_event::Model>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
Ok(processed_stripe_event::Entity::find()
.filter(
processed_stripe_event::Column::StripeEventId.is_in(event_ids.iter().copied()),

View File

@@ -112,7 +112,7 @@ impl Database {
}
pub async fn delete_project(&self, project_id: ProjectId) -> Result<()> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
project::Entity::delete_by_id(project_id).exec(&*tx).await?;
Ok(())
})

View File

@@ -80,7 +80,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<proto::IncomingCall>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
let pending_participant = room_participant::Entity::find()
.filter(
room_participant::Column::UserId

View File

@@ -142,50 +142,6 @@ impl Database {
}
}
loop {
let delete_query = Query::delete()
.from_table(project_repository_statuses::Entity)
.and_where(
Expr::tuple([Expr::col((
project_repository_statuses::Entity,
project_repository_statuses::Column::ProjectId,
))
.into()])
.in_subquery(
Query::select()
.columns([(
project_repository_statuses::Entity,
project_repository_statuses::Column::ProjectId,
)])
.from(project_repository_statuses::Entity)
.inner_join(
project::Entity,
Expr::col((project::Entity, project::Column::Id)).equals((
project_repository_statuses::Entity,
project_repository_statuses::Column::ProjectId,
)),
)
.and_where(project::Column::HostConnectionServerId.ne(server_id))
.limit(10000)
.to_owned(),
),
)
.to_owned();
let statement = Statement::from_sql_and_values(
tx.get_database_backend(),
delete_query
.to_string(sea_orm::sea_query::PostgresQueryBuilder)
.as_str(),
vec![],
);
let result = tx.execute(statement).await?;
if result.rows_affected() == 0 {
break;
}
}
Ok(())
})
.await

View File

@@ -382,7 +382,7 @@ impl Database {
/// Returns the active flags for the user.
pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
self.transaction(|tx| async move {
self.weak_transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryAs {
Flag,

View File

@@ -17,15 +17,11 @@ use crate::migrations::run_database_migrations;
use super::*;
use gpui::BackgroundExecutor;
use parking_lot::Mutex;
use rand::prelude::*;
use sea_orm::ConnectionTrait;
use sqlx::migrate::MigrateDatabase;
use std::{
sync::{
Arc,
atomic::{AtomicI32, AtomicU32, Ordering::SeqCst},
},
time::Duration,
use std::sync::{
Arc,
atomic::{AtomicI32, AtomicU32, Ordering::SeqCst},
};
pub struct TestDb {
@@ -45,7 +41,9 @@ impl TestDb {
let mut db = runtime.block_on(async {
let mut options = ConnectOptions::new(url);
options.max_connections(5);
let mut db = Database::new(options).await.unwrap();
let mut db = Database::new(options, Executor::Deterministic(executor.clone()))
.await
.unwrap();
let sql = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/migrations.sqlite/20221109000000_test_schema.sql"
@@ -62,7 +60,6 @@ impl TestDb {
});
db.test_options = Some(DatabaseTestOptions {
executor,
runtime,
query_failure_probability: parking_lot::Mutex::new(0.0),
});
@@ -96,7 +93,9 @@ impl TestDb {
options
.max_connections(5)
.idle_timeout(Duration::from_secs(0));
let mut db = Database::new(options).await.unwrap();
let mut db = Database::new(options, Executor::Deterministic(executor.clone()))
.await
.unwrap();
let migrations_path = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations");
run_database_migrations(db.options(), migrations_path)
.await
@@ -106,7 +105,6 @@ impl TestDb {
});
db.test_options = Some(DatabaseTestOptions {
executor,
runtime,
query_failure_probability: parking_lot::Mutex::new(0.0),
});

View File

@@ -49,7 +49,7 @@ async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
db.save_embeddings(model, &embeddings).await.unwrap();
// Reach into the DB and change the retrieved at to be > 60 days
db.transaction(|tx| {
db.weak_transaction(|tx| {
let digest = digest.clone();
async move {
let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61));

View File

@@ -285,7 +285,7 @@ impl AppState {
pub async fn new(config: Config, executor: Executor) -> Result<Arc<Self>> {
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
db_options.max_connections(config.database_max_connections);
let mut db = Database::new(db_options).await?;
let mut db = Database::new(db_options, Executor::Production).await?;
db.initialize_notification_kinds().await?;
let llm_db = if let Some((llm_database_url, llm_database_max_connections)) = config

View File

@@ -59,7 +59,7 @@ async fn main() -> Result<()> {
let config = envy::from_env::<Config>().expect("error loading config");
let db_options = db::ConnectOptions::new(config.database_url.clone());
let mut db = Database::new(db_options).await?;
let mut db = Database::new(db_options, Executor::Production).await?;
db.initialize_notification_kinds().await?;
collab::seed::seed(&config, &db, false).await?;
@@ -253,7 +253,7 @@ async fn main() -> Result<()> {
async fn setup_app_database(config: &Config) -> Result<()> {
let db_options = db::ConnectOptions::new(config.database_url.clone());
let mut db = Database::new(db_options).await?;
let mut db = Database::new(db_options, Executor::Production).await?;
let migrations_path = config.migrations_path.as_deref().unwrap_or_else(|| {
#[cfg(feature = "sqlite")]

View File

@@ -22,9 +22,7 @@ use gpui::{
use language::{
Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig,
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
language_settings::{
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
},
language_settings::{AllLanguageSettings, Formatter, PrettierSettings, SelectedFormatter},
tree_sitter_rust, tree_sitter_typescript,
};
use lsp::{LanguageServerId, OneOf};
@@ -4591,14 +4589,13 @@ async fn test_formatting_buffer(
cx_a.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
Formatter::External {
file.defaults.formatter =
Some(SelectedFormatter::List(vec![Formatter::External {
command: "awk".into(),
arguments: Some(
vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
),
},
)));
}]));
});
});
});
@@ -4698,9 +4695,10 @@ async fn test_prettier_formatting_buffer(
cx_b.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
Formatter::LanguageServer { name: None },
)));
file.defaults.formatter =
Some(SelectedFormatter::List(vec![Formatter::LanguageServer {
name: None,
}]));
file.defaults.prettier = Some(PrettierSettings {
allowed: true,
..PrettierSettings::default()
@@ -4821,7 +4819,7 @@ async fn test_definition(
);
let definitions_1 = project_b
.update(cx_b, |p, cx| p.definitions(&buffer_b, 23, cx))
.update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx))
.await
.unwrap();
cx_b.read(|cx| {
@@ -4852,7 +4850,7 @@ async fn test_definition(
);
let definitions_2 = project_b
.update(cx_b, |p, cx| p.definitions(&buffer_b, 33, cx))
.update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx))
.await
.unwrap();
cx_b.read(|cx| {
@@ -4889,7 +4887,7 @@ async fn test_definition(
);
let type_definitions = project_b
.update(cx_b, |p, cx| p.type_definitions(&buffer_b, 7, cx))
.update(cx_b, |p, cx| p.type_definition(&buffer_b, 7, cx))
.await
.unwrap();
cx_b.read(|cx| {
@@ -5057,7 +5055,7 @@ async fn test_references(
lsp_response_tx
.unbounded_send(Err(anyhow!("can't find references")))
.unwrap();
assert_eq!(references.await.unwrap(), []);
references.await.unwrap_err();
// User is informed that the request is no longer pending.
executor.run_until_parked();
@@ -5641,7 +5639,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
let definitions;
let buffer_b2;
if rng.r#gen() {
definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx));
definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
(buffer_b2, _) = project_b
.update(cx_b, |p, cx| {
p.open_buffer_with_lsp((worktree_id, "b.rs"), cx)
@@ -5655,7 +5653,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
})
.await
.unwrap();
definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx));
definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
}
let definitions = definitions.await.unwrap();

View File

@@ -838,7 +838,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.map(|_| Ok(()))
.boxed(),
LspRequestKind::Definition => project
.definitions(&buffer, offset, cx)
.definition(&buffer, offset, cx)
.map_ok(|_| ())
.boxed(),
LspRequestKind::Highlights => project

View File

@@ -6,16 +6,12 @@ use debugger_ui::debugger_panel::DebugPanel;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::StreamExt as _;
use gpui::{
AppContext as _, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _,
VisualContext,
};
use gpui::{BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _, VisualContext};
use http_client::BlockedHttpClient;
use language::{
FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageRegistry,
language_settings::{
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
language_settings,
AllLanguageSettings, Formatter, PrettierSettings, SelectedFormatter, language_settings,
},
tree_sitter_typescript,
};
@@ -505,9 +501,10 @@ async fn test_ssh_collaboration_formatting_with_prettier(
cx_b.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
Formatter::LanguageServer { name: None },
)));
file.defaults.formatter =
Some(SelectedFormatter::List(vec![Formatter::LanguageServer {
name: None,
}]));
file.defaults.prettier = Some(PrettierSettings {
allowed: true,
..PrettierSettings::default()

View File

@@ -30,13 +30,7 @@ use workspace::{
};
use workspace::{item::Dedup, notifications::NotificationId};
actions!(
collab,
[
/// Copies a link to the current position in the channel buffer.
CopyLink
]
);
actions!(collab, [CopyLink]);
pub fn init(cx: &mut App) {
workspace::FollowableViewRegistry::register::<ChannelView>(cx)

View File

@@ -71,13 +71,7 @@ struct SerializedChatPanel {
width: Option<Pixels>,
}
actions!(
chat_panel,
[
/// Toggles focus on the chat panel.
ToggleFocus
]
);
actions!(chat_panel, [ToggleFocus]);
impl ChatPanel {
pub fn new(

View File

@@ -44,25 +44,15 @@ use workspace::{
actions!(
collab_panel,
[
/// Toggles focus on the collaboration panel.
ToggleFocus,
/// Removes the selected channel or contact.
Remove,
/// Opens the context menu for the selected item.
Secondary,
/// Collapses the selected channel in the tree view.
CollapseSelectedChannel,
/// Expands the selected channel in the tree view.
ExpandSelectedChannel,
/// Starts moving a channel to a new location.
StartMoveChannel,
/// Moves the selected item to the current location.
MoveSelected,
/// Inserts a space character in the filter input.
InsertSpace,
/// Moves the selected channel up in the list.
MoveChannelUp,
/// Moves the selected channel down in the list.
MoveChannelDown,
]
);

View File

@@ -17,13 +17,9 @@ use workspace::{ModalView, notifications::DetachAndPromptErr};
actions!(
channel_modal,
[
/// Selects the next control in the channel modal.
SelectNextControl,
/// Toggles between invite members and manage members mode.
ToggleMode,
/// Toggles admin status for the selected member.
ToggleMemberAdmin,
/// Removes the selected member from the channel.
RemoveMember
]
);

View File

@@ -74,13 +74,7 @@ pub struct NotificationPresenter {
pub can_navigate: bool,
}
actions!(
notification_panel,
[
/// Toggles focus on the notification panel.
ToggleFocus
]
);
actions!(notification_panel, [ToggleFocus]);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {

View File

@@ -61,7 +61,7 @@ impl RenderOnce for ComponentExample {
12.0,
12.0,
))
.shadow_xs()
.shadow_sm()
.child(self.element),
)
.into_any_element()

View File

@@ -46,17 +46,11 @@ pub use crate::sign_in::{CopilotCodeVerification, initiate_sign_in, reinstall_an
actions!(
copilot,
[
/// Requests a code completion suggestion from Copilot.
Suggest,
/// Cycles to the next Copilot suggestion.
NextSuggestion,
/// Cycles to the previous Copilot suggestion.
PreviousSuggestion,
/// Reinstalls the Copilot language server.
Reinstall,
/// Signs in to GitHub Copilot.
SignIn,
/// Signs out of GitHub Copilot.
SignOut
]
);

View File

@@ -25,9 +25,7 @@ anyhow.workspace = true
async-trait.workspace = true
collections.workspace = true
dap.workspace = true
dotenvy.workspace = true
futures.workspace = true
fs.workspace = true
gpui.workspace = true
json_dotpath.workspace = true
language.workspace = true

View File

@@ -7,22 +7,13 @@ use dap::{
latest_github_release,
},
};
use fs::Fs;
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
use log::warn;
use serde_json::{Map, Value};
use std::{env::consts, ffi::OsStr, path::PathBuf, sync::OnceLock};
use task::TcpArgumentsTemplate;
use util;
use std::{
env::consts,
ffi::OsStr,
path::{Path, PathBuf},
str::FromStr,
sync::OnceLock,
};
use crate::*;
#[derive(Default, Debug)]
@@ -446,34 +437,22 @@ impl DebugAdapter for GoDebugAdapter {
adapter_path.join("dlv").to_string_lossy().to_string()
};
let cwd = Some(
task_definition
.config
.get("cwd")
.and_then(|s| s.as_str())
.map(PathBuf::from)
.unwrap_or_else(|| delegate.worktree_root_path().to_path_buf()),
);
let cwd = task_definition
.config
.get("cwd")
.and_then(|s| s.as_str())
.map(PathBuf::from)
.unwrap_or_else(|| delegate.worktree_root_path().to_path_buf());
let arguments;
let command;
let connection;
let mut configuration = task_definition.config.clone();
let mut envs = HashMap::default();
if let Some(configuration) = configuration.as_object_mut() {
configuration
.entry("cwd")
.or_insert_with(|| delegate.worktree_root_path().to_string_lossy().into());
handle_envs(
configuration,
&mut envs,
cwd.as_deref(),
delegate.fs().clone(),
)
.await;
}
if let Some(connection_options) = &task_definition.tcp_connection {
@@ -515,8 +494,8 @@ impl DebugAdapter for GoDebugAdapter {
Ok(DebugAdapterBinary {
command,
arguments,
cwd,
envs,
cwd: Some(cwd),
envs: HashMap::default(),
connection,
request_args: StartDebuggingRequestArguments {
configuration,
@@ -525,44 +504,3 @@ impl DebugAdapter for GoDebugAdapter {
})
}
}
// delve doesn't do anything with the envFile setting, so we intercept it
async fn handle_envs(
config: &mut Map<String, Value>,
envs: &mut HashMap<String, String>,
cwd: Option<&Path>,
fs: Arc<dyn Fs>,
) -> Option<()> {
let env_files = match config.get("envFile")? {
Value::Array(arr) => arr.iter().map(|v| v.as_str()).collect::<Vec<_>>(),
Value::String(s) => vec![Some(s.as_str())],
_ => return None,
};
let rebase_path = |path: PathBuf| {
if path.is_absolute() {
Some(path)
} else {
cwd.map(|p| p.join(path))
}
};
for path in env_files {
let Some(path) = path
.and_then(|s| PathBuf::from_str(s).ok())
.and_then(rebase_path)
else {
continue;
};
if let Ok(file) = fs.open_sync(&path).await {
envs.extend(dotenvy::from_read_iter(file).filter_map(Result::ok))
} else {
warn!("While starting Go debug session: failed to read env file {path:?}");
};
}
// remove envFile now that it's been handled
config.remove("entry");
Some(())
}

Some files were not shown because too many files have changed in this diff Show More