Compare commits
142 Commits
gpui-butto
...
fix-evals
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b6b85fbb70 | ||
|
|
fc78408ee4 | ||
|
|
37f49ce304 | ||
|
|
cc428330a9 | ||
|
|
1475ace6f1 | ||
|
|
dd4e8b9e66 | ||
|
|
b188e5d3aa | ||
|
|
e3d3daec92 | ||
|
|
ced8e4d88e | ||
|
|
fa1abd8201 | ||
|
|
ee4e43f1b6 | ||
|
|
d61e1e24a7 | ||
|
|
3c03d53e3e | ||
|
|
8ab664a52c | ||
|
|
2044426634 | ||
|
|
02fa6f6fc2 | ||
|
|
80a00cd241 | ||
|
|
06f725d51b | ||
|
|
baf6d82cd4 | ||
|
|
28ec7fbb81 | ||
|
|
0415e853d5 | ||
|
|
1c9b818342 | ||
|
|
0d7f4842f3 | ||
|
|
ab017129d8 | ||
|
|
71fb17c507 | ||
|
|
97e437c632 | ||
|
|
66667d1eef | ||
|
|
dce22a965e | ||
|
|
5f452dbca2 | ||
|
|
b2a92097ee | ||
|
|
eb35d25a7d | ||
|
|
8742d4ab90 | ||
|
|
b829f72c17 | ||
|
|
ffa8310d04 | ||
|
|
3fda539c46 | ||
|
|
b444b326cb | ||
|
|
f196288e2d | ||
|
|
e30cc131b4 | ||
|
|
09c8a84935 | ||
|
|
6e5996a815 | ||
|
|
c8f56e38b1 | ||
|
|
cfd3b0ff7b | ||
|
|
afe23cf85a | ||
|
|
f915c24279 | ||
|
|
bdd9e015ab | ||
|
|
6bbab4b55a | ||
|
|
7450b788f3 | ||
|
|
0c03519393 | ||
|
|
636eff2e9a | ||
|
|
6c8f4002d9 | ||
|
|
91bc5aefa4 | ||
|
|
2f3564b85f | ||
|
|
d61a544400 | ||
|
|
8061bacee3 | ||
|
|
77dadfedfe | ||
|
|
0023b37bfc | ||
|
|
4ece4a635f | ||
|
|
77c2aecf93 | ||
|
|
3ee56c196c | ||
|
|
3b1f6eaab8 | ||
|
|
44fbe27d31 | ||
|
|
a824119367 | ||
|
|
16366cf9f2 | ||
|
|
1e51a7ac44 | ||
|
|
d547a86e31 | ||
|
|
4bb04cef9d | ||
|
|
89700c3682 | ||
|
|
7609402200 | ||
|
|
a0ec9cf383 | ||
|
|
eb318c1626 | ||
|
|
5e5a124ae1 | ||
|
|
65e751ca33 | ||
|
|
17cf04558b | ||
|
|
36ae564b61 | ||
|
|
110195cdae | ||
|
|
b7d5e6480a | ||
|
|
0fa9f05313 | ||
|
|
051f49ce9a | ||
|
|
e5670ba081 | ||
|
|
e4262f97af | ||
|
|
944a0df436 | ||
|
|
a1be61949d | ||
|
|
a092e2dc03 | ||
|
|
b1c7fa1dac | ||
|
|
df66237428 | ||
|
|
ca513f52bf | ||
|
|
e9c9a8a269 | ||
|
|
315321bf8c | ||
|
|
c747a57b7e | ||
|
|
f73c8e5841 | ||
|
|
f7a0834f54 | ||
|
|
83d513aef4 | ||
|
|
b440e1a467 | ||
|
|
5c4f9e57d8 | ||
|
|
05f8001ee9 | ||
|
|
b93c67438c | ||
|
|
fdec966226 | ||
|
|
9041f734fd | ||
|
|
844c7ad22e | ||
|
|
926f377c6c | ||
|
|
26a8cac0d8 | ||
|
|
c7aae6bd62 | ||
|
|
851121ffd4 | ||
|
|
e48daa92c0 | ||
|
|
d9f12879e2 | ||
|
|
42dd511fc2 | ||
|
|
571c5e7407 | ||
|
|
c76295251b | ||
|
|
b057b4697f | ||
|
|
57424e4743 | ||
|
|
2b6dab9197 | ||
|
|
70b0c4d63d | ||
|
|
875d1ef263 | ||
|
|
e1a2e8a3aa | ||
|
|
a829281841 | ||
|
|
592568ff87 | ||
|
|
83afe56a61 | ||
|
|
e468f9d2da | ||
|
|
1ce2652a89 | ||
|
|
784d51c40f | ||
|
|
0079c99c2c | ||
|
|
230eb12f72 | ||
|
|
dd3956eaf1 | ||
|
|
122d6c9e4d | ||
|
|
19e89a8b2d | ||
|
|
919ffe7655 | ||
|
|
841a4e35ea | ||
|
|
175ce05fd1 | ||
|
|
e518941445 | ||
|
|
10b8174c1b | ||
|
|
21fd1c8b80 | ||
|
|
c80bd698f8 | ||
|
|
03419da6f1 | ||
|
|
f56960ab5b | ||
|
|
4d827924f0 | ||
|
|
25b4591539 | ||
|
|
afbf527aa2 | ||
|
|
eb9ea20313 | ||
|
|
3d2ab4e58c | ||
|
|
ff0060aa36 | ||
|
|
d791c6cdb1 | ||
|
|
c7725e31d9 |
@@ -13,12 +13,6 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = ["-C", "link-args=-all_load"]
|
||||
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = ["-C", "link-args=-all_load"]
|
||||
|
||||
[target.'cfg(target_os = "windows")']
|
||||
rustflags = [
|
||||
"--cfg",
|
||||
|
||||
@@ -30,3 +30,7 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e
|
||||
# 2024-07-05 Improved formatting of default keymaps (single line per bind)
|
||||
# https://github.com/zed-industries/zed/pull/13887
|
||||
813cc3f5e537372fc86720b5e71b6e1c815440ab
|
||||
|
||||
# 2024-07-24 docs: Format docs
|
||||
# https://github.com/zed-industries/zed/pull/15352
|
||||
3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c
|
||||
|
||||
20
.mailmap
20
.mailmap
@@ -19,6 +19,8 @@ amtoaer <amtoaer@gmail.com>
|
||||
amtoaer <amtoaer@gmail.com> <amtoaer@outlook.com>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev> <andreicek@0x7f.dev>
|
||||
Angelk90 <angelo.k90@hotmail.it>
|
||||
Angelk90 <angelo.k90@hotmail.it> <20476002+Angelk90@users.noreply.github.com>
|
||||
Antonio Scandurra <me@as-cii.com>
|
||||
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Ben Kunkle <ben@zed.dev>
|
||||
@@ -38,6 +40,8 @@ Dairon Medina <dairon.medina@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Edwin Aronsson <75266237+4teapo@users.noreply.github.com>
|
||||
Elvis Pranskevichus <elvis@geldata.com>
|
||||
Elvis Pranskevichus <elvis@geldata.com> <elvis@magic.io>
|
||||
Evren Sen <nervenes@icloud.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrsen@users.noreply.github.com>
|
||||
@@ -69,6 +73,8 @@ Lilith Iris <itslirissama@gmail.com> <83819417+Irilith@users.noreply.github.com>
|
||||
LoganDark <contact@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <git@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <github@logandark.mozmail.com>
|
||||
Marko Kungla <marko.kungla@gmail.com>
|
||||
Marko Kungla <marko.kungla@gmail.com> <marko@mkungla.dev>
|
||||
Marshall Bowers <git@maxdeviant.com>
|
||||
Marshall Bowers <git@maxdeviant.com> <elliott.codes@gmail.com>
|
||||
Marshall Bowers <git@maxdeviant.com> <marshall@zed.dev>
|
||||
@@ -84,6 +90,7 @@ Michael Sloan <michael@zed.dev> <mgsloan@google.com>
|
||||
Mikayla Maki <mikayla@zed.dev>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@gmail.com>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@icloud.com>
|
||||
Morgan Krey <morgan@zed.dev>
|
||||
Muhammad Talal Anwar <mail@talal.io>
|
||||
Muhammad Talal Anwar <mail@talal.io> <talalanwar@outlook.com>
|
||||
Nate Butler <iamnbutler@gmail.com>
|
||||
@@ -116,11 +123,18 @@ Shish <webmaster@shishnet.org>
|
||||
Shish <webmaster@shishnet.org> <shish@shishnet.org>
|
||||
Smit Barmase <0xtimsb@gmail.com>
|
||||
Smit Barmase <0xtimsb@gmail.com> <smit@zed.dev>
|
||||
Thomas <github.thomaub@gmail.com>
|
||||
Thomas <github.thomaub@gmail.com> <thomas.aubry94@gmail.com>
|
||||
Thomas <github.thomaub@gmail.com> <thomas.aubry@paylead.fr>
|
||||
Thomas Heartman <thomasheartman+github@gmail.com>
|
||||
Thomas Heartman <thomasheartman+github@gmail.com> <thomas@getunleash.io>
|
||||
Thomas Mickley-Doyle <tmickleydoyle@gmail.com>
|
||||
Thomas Mickley-Doyle <tmickleydoyle@gmail.com> <thomas@zed.dev>
|
||||
Thorben Kröger <dev@thorben.net>
|
||||
Thorben Kröger <dev@thorben.net> <thorben.kroeger@hexagon.com>
|
||||
Thorsten Ball <thorsten@zed.dev>
|
||||
Thorsten Ball <thorsten@zed.dev> <me@thorstenball.com>
|
||||
Thorsten Ball <thorsten@zed.dev> <mrnugget@gmail.com>
|
||||
Thorsten Ball <mrnugget@gmail.com>
|
||||
Thorsten Ball <mrnugget@gmail.com> <me@thorstenball.com>
|
||||
Thorsten Ball <mrnugget@gmail.com> <thorsten@zed.dev>
|
||||
Tristan Hume <tris.hume@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com> <tristan@anthropic.com>
|
||||
Uladzislau Kaminski <i@uladkaminski.com>
|
||||
|
||||
2
.rules
2
.rules
@@ -115,7 +115,7 @@ Other entities can then register a callback to handle these events by doing `cx.
|
||||
GPUI has had some changes to its APIs. Always write code using the new APIs:
|
||||
|
||||
* `spawn` methods now take async closures (`AsyncFn`), and so should be called like `cx.spawn(async move |cx| ...)`.
|
||||
* Use `Entity<T>`. This replaces `Model<T>` and `View<T>` which longer exists and should NEVER be used.
|
||||
* Use `Entity<T>`. This replaces `Model<T>` and `View<T>` which no longer exist and should NEVER be used.
|
||||
* Use `App` references. This replaces `AppContext` which no longer exists and should NEVER be used.
|
||||
* Use `Context<T>` references. This replaces `ModelContext<T>` which no longer exists and should NEVER be used.
|
||||
* `Window` is now passed around explicitly. The new interface adds a `Window` reference parameter to some methods, and adds some new "*_in" methods for plumbing `Window`. The old types `WindowContext` and `ViewContext<T>` should NEVER be used.
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
{
|
||||
"label": "Debug Zed (CodeLLDB)",
|
||||
"adapter": "CodeLLDB",
|
||||
"program": "target/debug/zed",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed (GDB)",
|
||||
"adapter": "GDB",
|
||||
"program": "target/debug/zed",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"initialize_args": {
|
||||
"stopAtBeginningOfMainSubprogram": true
|
||||
|
||||
1504
Cargo.lock
generated
1504
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
20
Cargo.toml
20
Cargo.toml
@@ -74,11 +74,12 @@ members = [
|
||||
"crates/inline_completion",
|
||||
"crates/inline_completion_button",
|
||||
"crates/install_cli",
|
||||
"crates/jj",
|
||||
"crates/jj_ui",
|
||||
"crates/journal",
|
||||
"crates/language",
|
||||
"crates/language_extension",
|
||||
"crates/language_model",
|
||||
"crates/language_model_selector",
|
||||
"crates/language_models",
|
||||
"crates/language_selector",
|
||||
"crates/language_tools",
|
||||
@@ -279,11 +280,12 @@ indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion = { path = "crates/inline_completion" }
|
||||
inline_completion_button = { path = "crates/inline_completion_button" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
jj = { path = "crates/jj" }
|
||||
jj_ui = { path = "crates/jj_ui" }
|
||||
journal = { path = "crates/journal" }
|
||||
language = { path = "crates/language" }
|
||||
language_extension = { path = "crates/language_extension" }
|
||||
language_model = { path = "crates/language_model" }
|
||||
language_model_selector = { path = "crates/language_model_selector" }
|
||||
language_models = { path = "crates/language_models" }
|
||||
language_selector = { path = "crates/language_selector" }
|
||||
language_tools = { path = "crates/language_tools" }
|
||||
@@ -426,6 +428,7 @@ convert_case = "0.8.0"
|
||||
core-foundation = "0.10.0"
|
||||
core-foundation-sys = "0.8.6"
|
||||
core-video = { version = "0.4.3", features = ["metal"] }
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
ctor = "0.4.0"
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "be69a016ba710191b9fdded28c8b042af4b617f7" }
|
||||
dashmap = "6.0"
|
||||
@@ -458,6 +461,8 @@ indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
jj-lib = { git = "https://github.com/jj-vcs/jj", rev = "e18eb8e05efaa153fad5ef46576af145bba1807f" }
|
||||
json_dotpath = "1.1"
|
||||
jsonschema = "0.30.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
@@ -470,6 +475,7 @@ lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c9c189
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
metal = "0.29"
|
||||
mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
|
||||
moka = { version = "0.12.10", features = ["sync"] }
|
||||
naga = { version = "25.0", features = ["wgsl-in"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
@@ -548,9 +554,9 @@ syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.9.0"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = "0.6.0"
|
||||
tiktoken-rs = "0.7.0"
|
||||
time = { version = "0.3", features = [
|
||||
"macros",
|
||||
"parsing",
|
||||
@@ -593,7 +599,7 @@ unindent = "0.2.0"
|
||||
url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
walkdir = "2.3"
|
||||
walkdir = "2.5"
|
||||
wasi-preview1-component-adapter-provider = "29"
|
||||
wasm-encoder = "0.221"
|
||||
wasmparser = "0.221"
|
||||
@@ -603,12 +609,14 @@ wasmtime = { version = "29", default-features = false, features = [
|
||||
"runtime",
|
||||
"cranelift",
|
||||
"component-model",
|
||||
"incremental-cache",
|
||||
"parallel-compilation",
|
||||
] }
|
||||
wasmtime-wasi = "29"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
zed_llm_client = "0.8.1"
|
||||
zed_llm_client = "0.8.2"
|
||||
zstd = "0.11"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
|
||||
@@ -8,10 +8,6 @@ Welcome to Zed, a high-performance, multiplayer code editor from the creators of
|
||||
|
||||
### Installation
|
||||
|
||||
<a href="https://repology.org/project/zed-editor/versions">
|
||||
<img src="https://repology.org/badge/vertical-allrepos/zed-editor.svg?minversion=0.143.5" alt="Packaging status" align="right">
|
||||
</a>
|
||||
|
||||
On macOS and Linux you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager).
|
||||
|
||||
Other platforms are not yet available:
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
"f4": "debugger::Start",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"ctrl-shift-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"cmd-f11": "debugger::StepInto",
|
||||
@@ -512,6 +513,8 @@
|
||||
"alt-ctrl-o": "projects::OpenRecent",
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
// Change to open path modal for existing remote connection by setting the parameter
|
||||
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
@@ -556,6 +559,7 @@
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-b": "outline_panel::ToggleFocus",
|
||||
"ctrl-shift-g": "git_panel::ToggleFocus",
|
||||
"ctrl-shift-d": "debug_panel::ToggleFocus",
|
||||
"ctrl-?": "agent::ToggleFocus",
|
||||
"alt-save": "workspace::SaveAll",
|
||||
"ctrl-alt-s": "workspace::SaveAll",
|
||||
@@ -593,7 +597,6 @@
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"ctrl-shift-d": "editor::DuplicateLineDown",
|
||||
"ctrl-shift-j": "editor::JoinLines",
|
||||
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
|
||||
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
|
||||
@@ -860,6 +863,13 @@
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"ctrl-t": "debugger::ToggleThreadPicker",
|
||||
"ctrl-i": "debugger::ToggleSessionPicker"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "CollabPanel && not_editing",
|
||||
"bindings": {
|
||||
@@ -928,6 +938,7 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
|
||||
@@ -1,15 +1,4 @@
|
||||
[
|
||||
// Moved before Standard macOS bindings so that `cmd-w` is not the last binding for
|
||||
// `workspace::CloseWindow` and displayed/intercepted by macOS
|
||||
{
|
||||
"context": "PromptLibrary",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "rules_library::NewRule",
|
||||
"cmd-shift-s": "rules_library::ToggleDefaultRule",
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
// Standard macOS bindings
|
||||
{
|
||||
"use_key_equivalents": true,
|
||||
@@ -17,6 +6,7 @@
|
||||
"f4": "debugger::Start",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"shift-cmd-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"f11": "debugger::StepInto",
|
||||
@@ -379,6 +369,15 @@
|
||||
"shift-backspace": "agent::RemoveSelectedThread"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptLibrary",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "rules_library::NewRule",
|
||||
"cmd-shift-s": "rules_library::ToggleDefaultRule",
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -588,6 +587,7 @@
|
||||
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"ctrl-cmd-o": "projects::OpenRemote",
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true }],
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
@@ -623,6 +623,7 @@
|
||||
"cmd-shift-e": "project_panel::ToggleFocus",
|
||||
"cmd-shift-b": "outline_panel::ToggleFocus",
|
||||
"ctrl-shift-g": "git_panel::ToggleFocus",
|
||||
"cmd-shift-d": "debug_panel::ToggleFocus",
|
||||
"cmd-?": "agent::ToggleFocus",
|
||||
"cmd-alt-s": "workspace::SaveAll",
|
||||
"cmd-k m": "language_selector::Toggle",
|
||||
@@ -928,6 +929,13 @@
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"cmd-t": "debugger::ToggleThreadPicker",
|
||||
"cmd-i": "debugger::ToggleSessionPicker"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "CollabPanel && not_editing",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1011,7 +1019,7 @@
|
||||
"alt-right": ["terminal::SendText", "\u001bf"],
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
// There are conflicting bindings for these keys in the global context.
|
||||
// these bindings override them, remove at your own risk:
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
|
||||
@@ -72,7 +72,9 @@
|
||||
"alt-left": "editor::SelectToPreviousWordStart",
|
||||
"alt-right": "editor::SelectToNextWordEnd",
|
||||
"pagedown": "editor::SelectPageDown",
|
||||
"ctrl-v": "editor::SelectPageDown",
|
||||
"pageup": "editor::SelectPageUp",
|
||||
"alt-v": "editor::SelectPageUp",
|
||||
"ctrl-f": "editor::SelectRight",
|
||||
"ctrl-b": "editor::SelectLeft",
|
||||
"ctrl-n": "editor::SelectDown",
|
||||
|
||||
@@ -51,9 +51,7 @@
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"f3": "editor::FindNextMatch",
|
||||
"shift-f3": "editor::FindPreviousMatch"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -72,7 +72,9 @@
|
||||
"alt-left": "editor::SelectToPreviousWordStart",
|
||||
"alt-right": "editor::SelectToNextWordEnd",
|
||||
"pagedown": "editor::SelectPageDown",
|
||||
"ctrl-v": "editor::SelectPageDown",
|
||||
"pageup": "editor::SelectPageUp",
|
||||
"alt-v": "editor::SelectPageUp",
|
||||
"ctrl-f": "editor::SelectRight",
|
||||
"ctrl-b": "editor::SelectLeft",
|
||||
"ctrl-n": "editor::SelectDown",
|
||||
|
||||
@@ -53,9 +53,7 @@
|
||||
"cmd-shift-j": "editor::JoinLines",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"cmd-g": "editor::FindNextMatch",
|
||||
"cmd-shift-g": "editor::FindPreviousMatch"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -152,6 +152,7 @@
|
||||
"g end": ["vim::EndOfLine", { "display_lines": true }],
|
||||
"g 0": ["vim::StartOfLine", { "display_lines": true }],
|
||||
"g home": ["vim::StartOfLine", { "display_lines": true }],
|
||||
"g shift-m": ["vim::MiddleOfLine", { "display_lines": true }],
|
||||
"g ^": ["vim::FirstNonWhitespace", { "display_lines": true }],
|
||||
"g v": "vim::RestoreVisualSelection",
|
||||
"g ]": "editor::GoToDiagnostic",
|
||||
@@ -845,13 +846,5 @@
|
||||
// and Windows.
|
||||
"alt-l": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Fixes https://github.com/zed-industries/zed/issues/29095 by ensuring that
|
||||
// the last binding for editor::ToggleComments is not ctrl-c.
|
||||
"context": "hack_to_fix_ctrl-c",
|
||||
"bindings": {
|
||||
"g c": "editor::ToggleComments"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1715,6 +1715,8 @@
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": [],
|
||||
// Whether to read ~/.ssh/config for ssh connection sources.
|
||||
"read_ssh_config": true,
|
||||
// Configures context servers for use by the agent.
|
||||
"context_servers": {},
|
||||
"debugger": {
|
||||
|
||||
@@ -60,6 +60,7 @@ struct Content {
|
||||
message: String,
|
||||
on_click:
|
||||
Option<Arc<dyn Fn(&mut ActivityIndicator, &mut Window, &mut Context<ActivityIndicator>)>>,
|
||||
tooltip_message: Option<String>,
|
||||
}
|
||||
|
||||
impl ActivityIndicator {
|
||||
@@ -262,6 +263,7 @@ impl ActivityIndicator {
|
||||
});
|
||||
window.dispatch_action(Box::new(workspace::OpenLog), cx);
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
// Show any language server has pending activity.
|
||||
@@ -305,6 +307,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message,
|
||||
on_click: Some(Arc::new(Self::toggle_language_server_work_context_menu)),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -332,6 +335,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: job_info.message.into(),
|
||||
on_click: None,
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -374,6 +378,7 @@ impl ActivityIndicator {
|
||||
.retain(|status| !downloading.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -402,6 +407,7 @@ impl ActivityIndicator {
|
||||
.retain(|status| !checking_for_update.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -428,6 +434,7 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.show_error_message(&Default::default(), window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -446,6 +453,7 @@ impl ActivityIndicator {
|
||||
});
|
||||
window.dispatch_action(Box::new(workspace::OpenLog), cx);
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -462,6 +470,7 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
}),
|
||||
AutoUpdateStatus::Downloading => Some(Content {
|
||||
icon: Some(
|
||||
@@ -473,6 +482,7 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
}),
|
||||
AutoUpdateStatus::Installing => Some(Content {
|
||||
icon: Some(
|
||||
@@ -484,8 +494,12 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
}),
|
||||
AutoUpdateStatus::Updated { binary_path } => Some(Content {
|
||||
AutoUpdateStatus::Updated {
|
||||
binary_path,
|
||||
version,
|
||||
} => Some(Content {
|
||||
icon: None,
|
||||
message: "Click to restart and update Zed".to_string(),
|
||||
on_click: Some(Arc::new({
|
||||
@@ -494,6 +508,14 @@ impl ActivityIndicator {
|
||||
};
|
||||
move |_, _, cx| workspace::reload(&reload, cx)
|
||||
})),
|
||||
tooltip_message: Some(format!("Install version: {}", {
|
||||
match version {
|
||||
auto_update::VersionCheckType::Sha(sha) => sha.to_string(),
|
||||
auto_update::VersionCheckType::Semantic(semantic_version) => {
|
||||
semantic_version.to_string()
|
||||
}
|
||||
}
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Errored => Some(Content {
|
||||
icon: Some(
|
||||
@@ -505,6 +527,7 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
}),
|
||||
AutoUpdateStatus::Idle => None,
|
||||
};
|
||||
@@ -524,6 +547,7 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -575,7 +599,14 @@ impl Render for ActivityIndicator {
|
||||
)
|
||||
.tooltip(Tooltip::text(content.message))
|
||||
} else {
|
||||
button.child(Label::new(content.message).size(LabelSize::Small))
|
||||
button
|
||||
.child(Label::new(content.message).size(LabelSize::Small))
|
||||
.when_some(
|
||||
content.tooltip_message,
|
||||
|this, tooltip_message| {
|
||||
this.tooltip(Tooltip::text(tooltip_message))
|
||||
},
|
||||
)
|
||||
}
|
||||
})
|
||||
.when_some(content.on_click, |this, handler| {
|
||||
|
||||
@@ -52,7 +52,6 @@ itertools.workspace = true
|
||||
jsonschema.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
|
||||
@@ -333,7 +333,6 @@ fn tool_use_markdown_style(window: &Window, cx: &mut App) -> MarkdownStyle {
|
||||
}
|
||||
|
||||
const CODEBLOCK_CONTAINER_GROUP: &str = "codeblock_container";
|
||||
const MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK: usize = 10;
|
||||
|
||||
fn render_markdown_code_block(
|
||||
message_id: MessageId,
|
||||
@@ -346,17 +345,20 @@ fn render_markdown_code_block(
|
||||
_window: &Window,
|
||||
cx: &App,
|
||||
) -> Div {
|
||||
let label_size = rems(0.8125);
|
||||
|
||||
let label = match kind {
|
||||
CodeBlockKind::Indented => None,
|
||||
CodeBlockKind::Fenced => Some(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::new(IconName::Code)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::XSmall),
|
||||
)
|
||||
.child(Label::new("untitled").size(LabelSize::Small))
|
||||
.child(div().text_size(label_size).child("Plain Text"))
|
||||
.into_any_element(),
|
||||
),
|
||||
CodeBlockKind::FencedLang(raw_language_name) => Some(render_code_language(
|
||||
@@ -393,7 +395,7 @@ fn render_markdown_code_block(
|
||||
.id(("code-block-header-label", ix))
|
||||
.ml_1()
|
||||
.gap_1()
|
||||
.child(Label::new(file_name).size(LabelSize::Small))
|
||||
.child(div().text_size(label_size).child(file_name))
|
||||
.child(Label::new(path).color(Color::Muted).size(LabelSize::Small))
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::with_meta(
|
||||
@@ -406,9 +408,10 @@ fn render_markdown_code_block(
|
||||
})
|
||||
.into_any_element()
|
||||
} else {
|
||||
Label::new(path_range.path.to_string_lossy().to_string())
|
||||
.size(LabelSize::Small)
|
||||
div()
|
||||
.ml_1()
|
||||
.text_size(label_size)
|
||||
.child(path_range.path.to_string_lossy().to_string())
|
||||
.into_any_element()
|
||||
};
|
||||
|
||||
@@ -456,19 +459,13 @@ fn render_markdown_code_block(
|
||||
.copied_code_block_ids
|
||||
.contains(&(message_id, ix));
|
||||
|
||||
let can_expand = metadata.line_count >= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
|
||||
|
||||
let is_expanded = if can_expand {
|
||||
active_thread.read(cx).is_codeblock_expanded(message_id, ix)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let is_expanded = active_thread.read(cx).is_codeblock_expanded(message_id, ix);
|
||||
|
||||
let codeblock_header_bg = cx
|
||||
.theme()
|
||||
.colors()
|
||||
.element_background
|
||||
.blend(cx.theme().colors().editor_foreground.opacity(0.01));
|
||||
.blend(cx.theme().colors().editor_foreground.opacity(0.025));
|
||||
|
||||
let control_buttons = h_flex()
|
||||
.visible_on_hover(CODEBLOCK_CONTAINER_GROUP)
|
||||
@@ -519,44 +516,48 @@ fn render_markdown_code_block(
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(can_expand, |header| {
|
||||
header.child(
|
||||
IconButton::new(
|
||||
("expand-collapse-code", ix),
|
||||
if is_expanded {
|
||||
IconName::ChevronUp
|
||||
} else {
|
||||
IconName::ChevronDown
|
||||
},
|
||||
)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(if is_expanded {
|
||||
"Collapse Code"
|
||||
.child(
|
||||
IconButton::new(
|
||||
("expand-collapse-code", ix),
|
||||
if is_expanded {
|
||||
IconName::ChevronUp
|
||||
} else {
|
||||
"Expand Code"
|
||||
}))
|
||||
.on_click({
|
||||
let active_thread = active_thread.clone();
|
||||
move |_event, _window, cx| {
|
||||
active_thread.update(cx, |this, cx| {
|
||||
this.toggle_codeblock_expanded(message_id, ix);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
IconName::ChevronDown
|
||||
},
|
||||
)
|
||||
});
|
||||
.icon_color(Color::Muted)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(if is_expanded {
|
||||
"Collapse Code"
|
||||
} else {
|
||||
"Expand Code"
|
||||
}))
|
||||
.on_click({
|
||||
let active_thread = active_thread.clone();
|
||||
move |_event, _window, cx| {
|
||||
active_thread.update(cx, |this, cx| {
|
||||
this.toggle_codeblock_expanded(message_id, ix);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
let codeblock_header = h_flex()
|
||||
.relative()
|
||||
.p_1()
|
||||
.gap_1()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.bg(codeblock_header_bg)
|
||||
.rounded_t_md()
|
||||
.map(|this| {
|
||||
if !is_expanded {
|
||||
this.rounded_md()
|
||||
} else {
|
||||
this.rounded_t_md()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
}
|
||||
})
|
||||
.children(label)
|
||||
.child(control_buttons);
|
||||
|
||||
@@ -564,12 +565,12 @@ fn render_markdown_code_block(
|
||||
.group(CODEBLOCK_CONTAINER_GROUP)
|
||||
.my_2()
|
||||
.overflow_hidden()
|
||||
.rounded_lg()
|
||||
.rounded_md()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(codeblock_header)
|
||||
.when(can_expand && !is_expanded, |this| this.max_h_80())
|
||||
.when(!is_expanded, |this| this.h(rems_from_px(31.)))
|
||||
}
|
||||
|
||||
fn open_path(
|
||||
@@ -630,10 +631,13 @@ fn render_code_language(
|
||||
.map(|language| language.name().into())
|
||||
.unwrap_or(name_fallback);
|
||||
|
||||
let label_size = rems(0.8125);
|
||||
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.children(icon_path.map(|icon| icon.color(Color::Muted).size(IconSize::Small)))
|
||||
.child(Label::new(language_label).size(LabelSize::Small))
|
||||
.px_1()
|
||||
.gap_1p5()
|
||||
.children(icon_path.map(|icon| icon.color(Color::Muted).size(IconSize::XSmall)))
|
||||
.child(div().text_size(label_size).child(language_label))
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
@@ -1014,6 +1018,7 @@ impl ActiveThread {
|
||||
self.push_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1027,6 +1032,7 @@ impl ActiveThread {
|
||||
self.edited_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1540,11 +1546,15 @@ impl ActiveThread {
|
||||
let project = self.thread.read(cx).project().clone();
|
||||
let prompt_store = self.thread_store.read(cx).prompt_store().clone();
|
||||
|
||||
let git_store = project.read(cx).git_store().clone();
|
||||
let checkpoint = git_store.update(cx, |git_store, cx| git_store.checkpoint(cx));
|
||||
|
||||
let load_context_task =
|
||||
crate::context::load_context(new_context, &project, &prompt_store, cx);
|
||||
self._load_edited_message_context_task =
|
||||
Some(cx.spawn_in(window, async move |this, cx| {
|
||||
let context = load_context_task.await;
|
||||
let (context, checkpoint) =
|
||||
futures::future::join(load_context_task, checkpoint).await;
|
||||
let _ = this
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
@@ -1553,6 +1563,7 @@ impl ActiveThread {
|
||||
Role::User,
|
||||
vec![MessageSegment::Text(edited_text)],
|
||||
Some(context.loaded_context),
|
||||
checkpoint.ok(),
|
||||
cx,
|
||||
);
|
||||
for message_id in this.messages_after(message_id) {
|
||||
@@ -2362,41 +2373,17 @@ impl ActiveThread {
|
||||
}),
|
||||
transform: Some(Arc::new({
|
||||
let active_thread = cx.entity();
|
||||
let editor_bg = cx.theme().colors().editor_background;
|
||||
|
||||
move |el, range, metadata, _, cx| {
|
||||
let can_expand = metadata.line_count
|
||||
>= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
|
||||
|
||||
if !can_expand {
|
||||
return el;
|
||||
}
|
||||
|
||||
move |element, range, _, _, cx| {
|
||||
let is_expanded = active_thread
|
||||
.read(cx)
|
||||
.is_codeblock_expanded(message_id, range.start);
|
||||
|
||||
if is_expanded {
|
||||
return el;
|
||||
return element;
|
||||
}
|
||||
|
||||
el.child(
|
||||
div()
|
||||
.absolute()
|
||||
.bottom_0()
|
||||
.left_0()
|
||||
.w_full()
|
||||
.h_1_4()
|
||||
.rounded_b_lg()
|
||||
.bg(linear_gradient(
|
||||
0.,
|
||||
linear_color_stop(editor_bg, 0.),
|
||||
linear_color_stop(
|
||||
editor_bg.opacity(0.),
|
||||
1.,
|
||||
),
|
||||
)),
|
||||
)
|
||||
element
|
||||
}
|
||||
})),
|
||||
},
|
||||
@@ -3408,6 +3395,11 @@ impl ActiveThread {
|
||||
.or_insert(true);
|
||||
*is_expanded = !*is_expanded;
|
||||
}
|
||||
|
||||
pub fn scroll_to_bottom(&mut self, cx: &mut Context<Self>) {
|
||||
self.list_state.reset(self.messages.len());
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ActiveThreadEvent {
|
||||
|
||||
@@ -85,6 +85,7 @@ actions!(
|
||||
KeepAll,
|
||||
Follow,
|
||||
ResetTrialUpsell,
|
||||
ResetTrialEndUpsell,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -216,7 +217,6 @@ fn register_slash_commands(cx: &mut App) {
|
||||
slash_command_registry.register_command(assistant_slash_commands::PromptSlashCommand, true);
|
||||
slash_command_registry.register_command(assistant_slash_commands::SelectionCommand, true);
|
||||
slash_command_registry.register_command(assistant_slash_commands::DefaultSlashCommand, false);
|
||||
slash_command_registry.register_command(assistant_slash_commands::TerminalSlashCommand, true);
|
||||
slash_command_registry.register_command(assistant_slash_commands::NowSlashCommand, false);
|
||||
slash_command_registry
|
||||
.register_command(assistant_slash_commands::DiagnosticsSlashCommand, true);
|
||||
|
||||
@@ -3,10 +3,10 @@ use fs::Fs;
|
||||
use gpui::{Entity, FocusHandle, SharedString};
|
||||
|
||||
use crate::Thread;
|
||||
use language_model::{ConfiguredModel, LanguageModelRegistry};
|
||||
use language_model_selector::{
|
||||
use assistant_context_editor::language_model_selector::{
|
||||
LanguageModelSelector, LanguageModelSelectorPopoverMenu, ToggleModelSelector,
|
||||
};
|
||||
use language_model::{ConfiguredModel, LanguageModelRegistry};
|
||||
use settings::update_settings_file;
|
||||
use std::sync::Arc;
|
||||
use ui::{PopoverMenuHandle, Tooltip, prelude::*};
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use markdown::Markdown;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -17,6 +17,7 @@ use assistant_settings::{AssistantDockPosition, AssistantSettings};
|
||||
use assistant_slash_command::SlashCommandWorkingSet;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
|
||||
use assistant_context_editor::language_model_selector::ToggleModelSelector;
|
||||
use client::{UserStore, zed_urls};
|
||||
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
|
||||
use fs::Fs;
|
||||
@@ -30,7 +31,6 @@ use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
LanguageModelProviderTosView, LanguageModelRegistry, RequestUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use language_model_selector::ToggleModelSelector;
|
||||
use project::{Project, ProjectPath, Worktree};
|
||||
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
|
||||
use proto::Plan;
|
||||
@@ -66,8 +66,8 @@ use crate::ui::AgentOnboardingModal;
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, ContextStore, DeleteRecentlyOpenThread, ExpandMessageEditor,
|
||||
Follow, InlineAssistant, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff,
|
||||
OpenHistory, ResetTrialUpsell, TextThreadStore, ThreadEvent, ToggleContextPicker,
|
||||
ToggleNavigationMenu, ToggleOptionsMenu,
|
||||
OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, TextThreadStore, ThreadEvent,
|
||||
ToggleContextPicker, ToggleNavigationMenu, ToggleOptionsMenu,
|
||||
};
|
||||
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
@@ -157,7 +157,10 @@ pub fn init(cx: &mut App) {
|
||||
window.refresh();
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialUpsell, _window, cx| {
|
||||
set_trial_upsell_dismissed(false, cx);
|
||||
TrialUpsell::set_dismissed(false, cx);
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| {
|
||||
TrialEndUpsell::set_dismissed(false, cx);
|
||||
});
|
||||
},
|
||||
)
|
||||
@@ -567,6 +570,15 @@ impl AgentPanel {
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in recently_opened.iter() {
|
||||
if let RecentEntry::Context(context) = entry {
|
||||
if context.read(cx).path().is_none() {
|
||||
log::error!(
|
||||
"bug: text thread in recent history list was never saved"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let summary = entry.summary(cx);
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
@@ -1200,12 +1212,7 @@ impl AgentPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(workspace) = self
|
||||
.workspace
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace dropped"))
|
||||
.log_err()
|
||||
else {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1290,14 +1297,26 @@ impl AgentPanel {
|
||||
let new_is_history = matches!(new_view, ActiveView::History);
|
||||
|
||||
match &self.active_view {
|
||||
ActiveView::Thread { thread, .. } => self.history_store.update(cx, |store, cx| {
|
||||
ActiveView::Thread { thread, .. } => {
|
||||
if let Some(thread) = thread.upgrade() {
|
||||
if thread.read(cx).is_empty() {
|
||||
let id = thread.read(cx).id().clone();
|
||||
store.remove_recently_opened_thread(id, cx);
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_thread(id, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
}
|
||||
ActiveView::PromptEditor { context_editor, .. } => {
|
||||
let context = context_editor.read(cx).context();
|
||||
// When switching away from an unsaved text thread, delete its entry.
|
||||
if context.read(cx).path().is_none() {
|
||||
let context = context.clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_entry(&RecentEntry::Context(context), cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -1911,12 +1930,23 @@ impl AgentPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn should_render_trial_end_upsell(&self, cx: &mut Context<Self>) -> bool {
|
||||
if TrialEndUpsell::dismissed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let plan = self.user_store.read(cx).current_plan();
|
||||
let has_previous_trial = self.user_store.read(cx).trial_started_at().is_some();
|
||||
|
||||
matches!(plan, Some(Plan::Free)) && has_previous_trial
|
||||
}
|
||||
|
||||
fn should_render_upsell(&self, cx: &mut Context<Self>) -> bool {
|
||||
if !matches!(self.active_view, ActiveView::Thread { .. }) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.hide_trial_upsell || dismissed_trial_upsell() {
|
||||
if self.hide_trial_upsell || TrialUpsell::dismissed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1962,125 +1992,115 @@ impl AgentPanel {
|
||||
move |toggle_state, _window, cx| {
|
||||
let toggle_state_bool = toggle_state.selected();
|
||||
|
||||
set_trial_upsell_dismissed(toggle_state_bool, cx);
|
||||
TrialUpsell::set_dismissed(toggle_state_bool, cx);
|
||||
},
|
||||
);
|
||||
|
||||
Some(
|
||||
div().p_2().child(
|
||||
v_flex()
|
||||
let contents = div()
|
||||
.size_full()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(Headline::new("Build better with Zed Pro").size(HeadlineSize::Small))
|
||||
.child(
|
||||
Label::new("Try Zed Pro for free for 14 days - no credit card required.")
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
"Use your own API keys or enable usage-based billing once you hit the cap.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.elevation_2(cx)
|
||||
.rounded(px(8.))
|
||||
.bg(cx.theme().colors().background.alpha(0.5))
|
||||
.p(px(3.))
|
||||
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(h_flex().items_center().gap_1().child(checkbox))
|
||||
.child(
|
||||
div()
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.border_1()
|
||||
.rounded(px(5.))
|
||||
.border_color(cx.theme().colors().text.alpha(0.1))
|
||||
.overflow_hidden()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.px_4()
|
||||
.py_3()
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right(px(-1.0))
|
||||
.w(px(441.))
|
||||
.h(px(167.))
|
||||
.child(
|
||||
Vector::new(VectorName::Grid, rems_from_px(441.), rems_from_px(167.)).color(ui::Color::Custom(cx.theme().colors().text.alpha(0.1)))
|
||||
)
|
||||
Button::new("dismiss-button", "Not Now")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(cx, |this, cx| {
|
||||
this.hide_trial_upsell = true;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top(px(-8.0))
|
||||
.right_0()
|
||||
.w(px(400.))
|
||||
.h(px(92.))
|
||||
.child(
|
||||
Vector::new(VectorName::AiGrid, rems_from_px(400.), rems_from_px(92.)).color(ui::Color::Custom(cx.theme().colors().text.alpha(0.32)))
|
||||
)
|
||||
)
|
||||
// .child(
|
||||
// div()
|
||||
// .absolute()
|
||||
// .top_0()
|
||||
// .right(px(360.))
|
||||
// .size(px(401.))
|
||||
// .overflow_hidden()
|
||||
// .bg(cx.theme().colors().panel_background)
|
||||
// )
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w(px(660.))
|
||||
.h(px(401.))
|
||||
.overflow_hidden()
|
||||
.bg(linear_gradient(
|
||||
75.,
|
||||
linear_color_stop(cx.theme().colors().panel_background.alpha(0.01), 1.0),
|
||||
linear_color_stop(cx.theme().colors().panel_background, 0.45),
|
||||
))
|
||||
)
|
||||
.child(Headline::new("Build better with Zed Pro").size(HeadlineSize::Small))
|
||||
.child(Label::new("Try Zed Pro for free for 14 days - no credit card required.").size(LabelSize::Small))
|
||||
.child(Label::new("Use your own API keys or enable usage-based billing once you hit the cap.").color(Color::Muted))
|
||||
Button::new("cta-button", "Start Trial")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| cx.open_url(&zed_urls::account_url(cx))),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
Some(self.render_upsell_container(cx, contents))
|
||||
}
|
||||
|
||||
fn render_trial_end_upsell(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<impl IntoElement> {
|
||||
if !self.should_render_trial_end_upsell(cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
self.render_upsell_container(
|
||||
cx,
|
||||
div()
|
||||
.size_full()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(
|
||||
Headline::new("Your Zed Pro trial has expired.").size(HeadlineSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new("You've been automatically reset to the free plan.")
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(div())
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(h_flex().items_center().gap_1().child(checkbox))
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Button::new("dismiss-button", "Not Now")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(
|
||||
cx,
|
||||
|this, cx| {
|
||||
let hidden =
|
||||
this.hide_trial_upsell;
|
||||
println!("hidden: {}", hidden);
|
||||
this.hide_trial_upsell = true;
|
||||
let new_hidden =
|
||||
this.hide_trial_upsell;
|
||||
println!(
|
||||
"new_hidden: {}",
|
||||
new_hidden
|
||||
);
|
||||
|
||||
cx.notify();
|
||||
},
|
||||
);
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Start Trial")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url(&zed_urls::account_url(cx))
|
||||
}),
|
||||
),
|
||||
Button::new("dismiss-button", "Stay on Free")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(cx, |_this, cx| {
|
||||
TrialEndUpsell::set_dismissed(true, cx);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Upgrade to Zed Pro")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url(&zed_urls::account_url(cx))
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -2088,6 +2108,91 @@ impl AgentPanel {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_upsell_container(&self, cx: &mut Context<Self>, content: Div) -> Div {
|
||||
div().p_2().child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.elevation_2(cx)
|
||||
.rounded(px(8.))
|
||||
.bg(cx.theme().colors().background.alpha(0.5))
|
||||
.p(px(3.))
|
||||
.child(
|
||||
div()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.border_1()
|
||||
.rounded(px(5.))
|
||||
.border_color(cx.theme().colors().text.alpha(0.1))
|
||||
.overflow_hidden()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.px_4()
|
||||
.py_3()
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right(px(-1.0))
|
||||
.w(px(441.))
|
||||
.h(px(167.))
|
||||
.child(
|
||||
Vector::new(
|
||||
VectorName::Grid,
|
||||
rems_from_px(441.),
|
||||
rems_from_px(167.),
|
||||
)
|
||||
.color(ui::Color::Custom(cx.theme().colors().text.alpha(0.1))),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top(px(-8.0))
|
||||
.right_0()
|
||||
.w(px(400.))
|
||||
.h(px(92.))
|
||||
.child(
|
||||
Vector::new(
|
||||
VectorName::AiGrid,
|
||||
rems_from_px(400.),
|
||||
rems_from_px(92.),
|
||||
)
|
||||
.color(ui::Color::Custom(cx.theme().colors().text.alpha(0.32))),
|
||||
),
|
||||
)
|
||||
// .child(
|
||||
// div()
|
||||
// .absolute()
|
||||
// .top_0()
|
||||
// .right(px(360.))
|
||||
// .size(px(401.))
|
||||
// .overflow_hidden()
|
||||
// .bg(cx.theme().colors().panel_background)
|
||||
// )
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w(px(660.))
|
||||
.h(px(401.))
|
||||
.overflow_hidden()
|
||||
.bg(linear_gradient(
|
||||
75.,
|
||||
linear_color_stop(
|
||||
cx.theme().colors().panel_background.alpha(0.01),
|
||||
1.0,
|
||||
),
|
||||
linear_color_stop(cx.theme().colors().panel_background, 0.45),
|
||||
)),
|
||||
)
|
||||
.child(content),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_active_thread_or_empty_state(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
@@ -2806,6 +2911,7 @@ impl Render for AgentPanel {
|
||||
.on_action(cx.listener(Self::toggle_zoom))
|
||||
.child(self.render_toolbar(window, cx))
|
||||
.children(self.render_trial_upsell(window, cx))
|
||||
.children(self.render_trial_end_upsell(window, cx))
|
||||
.map(|parent| match &self.active_view {
|
||||
ActiveView::Thread { .. } => parent
|
||||
.relative()
|
||||
@@ -2993,25 +3099,14 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
const DISMISSED_TRIAL_UPSELL_KEY: &str = "dismissed-trial-upsell";
|
||||
struct TrialUpsell;
|
||||
|
||||
fn dismissed_trial_upsell() -> bool {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.read_kvp(DISMISSED_TRIAL_UPSELL_KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
impl Dismissable for TrialUpsell {
|
||||
const KEY: &'static str = "dismissed-trial-upsell";
|
||||
}
|
||||
|
||||
fn set_trial_upsell_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
db::write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.write_kvp(DISMISSED_TRIAL_UPSELL_KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.delete_kvp(DISMISSED_TRIAL_UPSELL_KEY.into())
|
||||
.await
|
||||
}
|
||||
})
|
||||
struct TrialEndUpsell;
|
||||
|
||||
impl Dismissable for TrialEndUpsell {
|
||||
const KEY: &'static str = "dismissed-trial-end-upsell";
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::context::ContextLoadResult;
|
||||
use crate::inline_prompt_editor::CodegenStatus;
|
||||
use crate::{context::load_context, context_store::ContextStore};
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::HashSet;
|
||||
@@ -419,16 +419,16 @@ impl CodegenAlternative {
|
||||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("invalid transformation range"));
|
||||
anyhow::bail!("invalid transformation range");
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("invalid transformation range"));
|
||||
anyhow::bail!("invalid transformation range");
|
||||
};
|
||||
|
||||
let prompt = self
|
||||
.builder
|
||||
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
|
||||
.context("generating content prompt")?;
|
||||
|
||||
let context_task = self.context_store.as_ref().map(|context_store| {
|
||||
if let Some(project) = self.project.upgrade() {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::ops::Range;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_context_editor::AssistantContext;
|
||||
use collections::{HashSet, IndexSet};
|
||||
use futures::{self, FutureExt};
|
||||
@@ -142,17 +142,12 @@ impl ContextStore {
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<Option<AgentContextHandle>> {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return Err(anyhow!("failed to read project"));
|
||||
};
|
||||
|
||||
let Some(entry_id) = project
|
||||
let project = self.project.upgrade().context("failed to read project")?;
|
||||
let entry_id = project
|
||||
.read(cx)
|
||||
.entry_for_path(project_path, cx)
|
||||
.map(|entry| entry.id)
|
||||
else {
|
||||
return Err(anyhow!("no entry found for directory context"));
|
||||
};
|
||||
.context("no entry found for directory context")?;
|
||||
|
||||
let context_id = self.next_context_id.post_inc();
|
||||
let context = AgentContextHandle::Directory(DirectoryContextHandle {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{collections::VecDeque, path::Path, sync::Arc};
|
||||
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use anyhow::Context as _;
|
||||
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::future::{TryFutureExt as _, join_all};
|
||||
@@ -130,7 +130,10 @@ impl HistoryStore {
|
||||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async { Err(anyhow!("no thread store")) }.boxed()
|
||||
async {
|
||||
anyhow::bail!("no thread store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
SerializedRecentEntry::Context(id) => context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
@@ -140,7 +143,10 @@ impl HistoryStore {
|
||||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async { Err(anyhow!("no context store")) }.boxed()
|
||||
async {
|
||||
anyhow::bail!("no context store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
});
|
||||
let entries = join_all(entries)
|
||||
|
||||
@@ -9,8 +9,10 @@ use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use assistant_context_editor::language_model_selector::ToggleModelSelector;
|
||||
use client::ErrorExt;
|
||||
use collections::VecDeque;
|
||||
use db::kvp::Dismissable;
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
@@ -23,7 +25,6 @@ use gpui::{
|
||||
Focusable, FontWeight, Subscription, TextStyle, WeakEntity, Window, anchored, deferred, point,
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use language_model_selector::ToggleModelSelector;
|
||||
use parking_lot::Mutex;
|
||||
use settings::Settings;
|
||||
use std::cmp;
|
||||
@@ -33,7 +34,6 @@ use ui::utils::WithRemSize;
|
||||
use ui::{
|
||||
CheckboxWithLabel, IconButtonShape, KeyBinding, Popover, PopoverMenuHandle, Tooltip, prelude::*,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct PromptEditor<T> {
|
||||
@@ -722,7 +722,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
.child(CheckboxWithLabel::new(
|
||||
"dont-show-again",
|
||||
Label::new("Don't show again"),
|
||||
if dismissed_rate_limit_notice() {
|
||||
if RateLimitNotice::dismissed() {
|
||||
ui::ToggleState::Selected
|
||||
} else {
|
||||
ui::ToggleState::Unselected
|
||||
@@ -734,7 +734,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
ui::ToggleState::Selected => true,
|
||||
};
|
||||
|
||||
set_rate_limit_notice_dismissed(is_dismissed, cx)
|
||||
RateLimitNotice::set_dismissed(is_dismissed, cx);
|
||||
},
|
||||
))
|
||||
.child(
|
||||
@@ -974,7 +974,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
CodegenStatus::Error(error) => {
|
||||
if cx.has_flag::<ZedProFeatureFlag>()
|
||||
&& error.error_code() == proto::ErrorCode::RateLimitExceeded
|
||||
&& !dismissed_rate_limit_notice()
|
||||
&& !RateLimitNotice::dismissed()
|
||||
{
|
||||
self.show_rate_limit_notice = true;
|
||||
cx.notify();
|
||||
@@ -1180,27 +1180,10 @@ impl PromptEditor<TerminalCodegen> {
|
||||
}
|
||||
}
|
||||
|
||||
const DISMISSED_RATE_LIMIT_NOTICE_KEY: &str = "dismissed-rate-limit-notice";
|
||||
struct RateLimitNotice;
|
||||
|
||||
fn dismissed_rate_limit_notice() -> bool {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.read_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
}
|
||||
|
||||
fn set_rate_limit_notice_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
db::write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.write_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.delete_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY.into())
|
||||
.await
|
||||
}
|
||||
})
|
||||
impl Dismissable for RateLimitNotice {
|
||||
const KEY: &'static str = "dismissed-rate-limit-notice";
|
||||
}
|
||||
|
||||
pub enum CodegenStatus {
|
||||
|
||||
@@ -8,6 +8,7 @@ use crate::ui::{
|
||||
AnimatedLabel, MaxModeTooltip,
|
||||
preview::{AgentPreview, UsageCallout},
|
||||
};
|
||||
use assistant_context_editor::language_model_selector::ToggleModelSelector;
|
||||
use assistant_settings::{AssistantSettings, CompletionMode};
|
||||
use buffer_diff::BufferDiff;
|
||||
use client::UserStore;
|
||||
@@ -30,7 +31,6 @@ use language_model::{
|
||||
ConfiguredModel, LanguageModelRequestMessage, MessageContent, RequestUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use language_model_selector::ToggleModelSelector;
|
||||
use multi_buffer;
|
||||
use project::Project;
|
||||
use prompt_store::PromptStore;
|
||||
|
||||
@@ -24,7 +24,7 @@ use language_model::{
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUseId, MessageContent,
|
||||
ModelRequestLimitReachedError, PaymentRequiredError, RequestUsage, Role, SelectedModel,
|
||||
StopReason, TokenUsage,
|
||||
StopReason, TokenUsage, WrappedTextContent,
|
||||
};
|
||||
use postage::stream::Stream as _;
|
||||
use project::Project;
|
||||
@@ -214,7 +214,7 @@ pub struct GitState {
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ThreadCheckpoint {
|
||||
message_id: MessageId,
|
||||
git_checkpoint: GitStoreCheckpoint,
|
||||
@@ -881,7 +881,10 @@ impl Thread {
|
||||
|
||||
pub fn output_for_tool(&self, id: &LanguageModelToolUseId) -> Option<&Arc<str>> {
|
||||
match &self.tool_use.tool_result(id)?.content {
|
||||
LanguageModelToolResultContent::Text(str) => Some(str),
|
||||
LanguageModelToolResultContent::Text(text)
|
||||
| LanguageModelToolResultContent::WrappedText(WrappedTextContent { text, .. }) => {
|
||||
Some(text)
|
||||
}
|
||||
LanguageModelToolResultContent::Image(_) => {
|
||||
// TODO: We should display image
|
||||
None
|
||||
@@ -996,6 +999,7 @@ impl Thread {
|
||||
new_role: Role,
|
||||
new_segments: Vec<MessageSegment>,
|
||||
loaded_context: Option<LoadedContext>,
|
||||
checkpoint: Option<GitStoreCheckpoint>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
let Some(message) = self.messages.iter_mut().find(|message| message.id == id) else {
|
||||
@@ -1006,6 +1010,15 @@ impl Thread {
|
||||
if let Some(context) = loaded_context {
|
||||
message.loaded_context = context;
|
||||
}
|
||||
if let Some(git_checkpoint) = checkpoint {
|
||||
self.checkpoints_by_message.insert(
|
||||
id,
|
||||
ThreadCheckpoint {
|
||||
message_id: id,
|
||||
git_checkpoint,
|
||||
},
|
||||
);
|
||||
}
|
||||
self.touch_updated_at();
|
||||
cx.emit(ThreadEvent::MessageEdited(id));
|
||||
true
|
||||
@@ -1617,7 +1630,7 @@ impl Thread {
|
||||
CompletionRequestStatus::Failed {
|
||||
code, message, request_id
|
||||
} => {
|
||||
return Err(anyhow!("completion request failed. request_id: {request_id}, code: {code}, message: {message}"));
|
||||
anyhow::bail!("completion request failed. request_id: {request_id}, code: {code}, message: {message}");
|
||||
}
|
||||
CompletionRequestStatus::UsageUpdated {
|
||||
amount, limit
|
||||
@@ -2505,8 +2518,12 @@ impl Thread {
|
||||
|
||||
writeln!(markdown, "**\n")?;
|
||||
match &tool_result.content {
|
||||
LanguageModelToolResultContent::Text(str) => {
|
||||
writeln!(markdown, "{}", str)?;
|
||||
LanguageModelToolResultContent::Text(text)
|
||||
| LanguageModelToolResultContent::WrappedText(WrappedTextContent {
|
||||
text,
|
||||
..
|
||||
}) => {
|
||||
writeln!(markdown, "{text}")?;
|
||||
}
|
||||
LanguageModelToolResultContent::Image(image) => {
|
||||
writeln!(markdown, "", image.source)?;
|
||||
|
||||
@@ -419,7 +419,7 @@ impl ThreadStore {
|
||||
let thread = database
|
||||
.try_find_thread(id.clone())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
|
||||
.with_context(|| format!("no thread found with ID: {id:?}"))?;
|
||||
|
||||
let thread = this.update_in(cx, |this, window, cx| {
|
||||
cx.new(|cx| {
|
||||
@@ -699,20 +699,14 @@ impl SerializedThread {
|
||||
SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
|
||||
saved_thread_json,
|
||||
)?),
|
||||
_ => Err(anyhow!(
|
||||
"unrecognized serialized thread version: {}",
|
||||
version
|
||||
)),
|
||||
_ => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
|
||||
},
|
||||
None => {
|
||||
let saved_thread =
|
||||
serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
|
||||
Ok(saved_thread.upgrade())
|
||||
}
|
||||
version => Err(anyhow!(
|
||||
"unrecognized serialized thread version: {:?}",
|
||||
version
|
||||
)),
|
||||
version => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,6 +42,20 @@ pub enum Model {
|
||||
alias = "claude-3-7-sonnet-thinking-latest"
|
||||
)]
|
||||
Claude3_7SonnetThinking,
|
||||
#[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
|
||||
ClaudeOpus4,
|
||||
#[serde(
|
||||
rename = "claude-opus-4-thinking",
|
||||
alias = "claude-opus-4-thinking-latest"
|
||||
)]
|
||||
ClaudeOpus4Thinking,
|
||||
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
|
||||
ClaudeSonnet4,
|
||||
#[serde(
|
||||
rename = "claude-sonnet-4-thinking",
|
||||
alias = "claude-sonnet-4-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4Thinking,
|
||||
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
|
||||
Claude3_5Haiku,
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
|
||||
@@ -89,13 +103,25 @@ impl Model {
|
||||
Ok(Self::Claude3Sonnet)
|
||||
} else if id.starts_with("claude-3-haiku") {
|
||||
Ok(Self::Claude3Haiku)
|
||||
} else if id.starts_with("claude-opus-4-thinking") {
|
||||
Ok(Self::ClaudeOpus4Thinking)
|
||||
} else if id.starts_with("claude-opus-4") {
|
||||
Ok(Self::ClaudeOpus4)
|
||||
} else if id.starts_with("claude-sonnet-4-thinking") {
|
||||
Ok(Self::ClaudeSonnet4Thinking)
|
||||
} else if id.starts_with("claude-sonnet-4") {
|
||||
Ok(Self::ClaudeSonnet4)
|
||||
} else {
|
||||
Err(anyhow!("invalid model id"))
|
||||
anyhow::bail!("invalid model id {id}");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeOpus4 => "claude-opus-4-latest",
|
||||
Model::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
|
||||
Model::ClaudeSonnet4 => "claude-sonnet-4-latest",
|
||||
Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
@@ -110,6 +136,8 @@ impl Model {
|
||||
/// The id of the model that should be used for making API requests
|
||||
pub fn request_id(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => "claude-opus-4-20250514",
|
||||
Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
@@ -122,6 +150,10 @@ impl Model {
|
||||
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeOpus4 => "Claude 4 Opus",
|
||||
Model::ClaudeOpus4Thinking => "Claude 4 Opus Thinking",
|
||||
Model::ClaudeSonnet4 => "Claude 4 Sonnet",
|
||||
Model::ClaudeSonnet4Thinking => "Claude 4 Sonnet Thinking",
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
@@ -137,7 +169,11 @@ impl Model {
|
||||
|
||||
pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -156,7 +192,11 @@ impl Model {
|
||||
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -173,7 +213,11 @@ impl Model {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku => 8_192,
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking => 8_192,
|
||||
Self::Custom {
|
||||
max_output_tokens, ..
|
||||
} => max_output_tokens.unwrap_or(4_096),
|
||||
@@ -182,7 +226,11 @@ impl Model {
|
||||
|
||||
pub fn default_temperature(&self) -> f32 {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku
|
||||
@@ -201,10 +249,14 @@ impl Model {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => AnthropicModelMode::Default,
|
||||
Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
|
||||
Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4Thinking => AnthropicModelMode::Thinking {
|
||||
budget_tokens: Some(4_096),
|
||||
},
|
||||
Self::Custom { mode, .. } => mode.clone(),
|
||||
@@ -385,10 +437,10 @@ impl RateLimitInfo {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
|
||||
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> anyhow::Result<&'a str> {
|
||||
Ok(headers
|
||||
.get(key)
|
||||
.ok_or_else(|| anyhow!("missing header `{key}`"))?
|
||||
.with_context(|| format!("missing header `{key}`"))?
|
||||
.to_str()?)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This crate was essentially pulled out verbatim from main `zed` crate to avoid having to run RustEmbed macro whenever zed has to be rebuilt. It saves a second or two on an incremental build.
|
||||
use anyhow::anyhow;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{App, AssetSource, Result, SharedString};
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
@@ -21,7 +21,7 @@ impl AssetSource for Assets {
|
||||
fn load(&self, path: &str) -> Result<Option<std::borrow::Cow<'static, [u8]>>> {
|
||||
Self::get(path)
|
||||
.map(|f| Some(f.data))
|
||||
.ok_or_else(|| anyhow!("could not find asset at path \"{}\"", path))
|
||||
.with_context(|| format!("loading asset at path {path:?}"))
|
||||
}
|
||||
|
||||
fn list(&self, path: &str) -> Result<Vec<SharedString>> {
|
||||
@@ -39,7 +39,7 @@ impl AssetSource for Assets {
|
||||
|
||||
impl Assets {
|
||||
/// Populate the [`TextSystem`] of the given [`AppContext`] with all `.ttf` fonts in the `fonts` directory.
|
||||
pub fn load_fonts(&self, cx: &App) -> gpui::Result<()> {
|
||||
pub fn load_fonts(&self, cx: &App) -> anyhow::Result<()> {
|
||||
let font_paths = self.list("fonts")?;
|
||||
let mut embedded_fonts = Vec::new();
|
||||
for font_path in font_paths {
|
||||
|
||||
@@ -22,6 +22,7 @@ clock.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
@@ -29,15 +30,16 @@ gpui.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
log.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
open_ai.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
regex.workspace = true
|
||||
rope.workspace = true
|
||||
rpc.workspace = true
|
||||
|
||||
@@ -2,6 +2,7 @@ mod context;
|
||||
mod context_editor;
|
||||
mod context_history;
|
||||
mod context_store;
|
||||
pub mod language_model_selector;
|
||||
mod slash_command;
|
||||
mod slash_command_picker;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#[cfg(test)]
|
||||
mod context_tests;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::{
|
||||
SlashCommandContent, SlashCommandEvent, SlashCommandLine, SlashCommandOutputSection,
|
||||
@@ -3011,7 +3011,7 @@ impl SavedContext {
|
||||
let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
|
||||
match saved_context_json
|
||||
.get("version")
|
||||
.ok_or_else(|| anyhow!("version not found"))?
|
||||
.context("version not found")?
|
||||
{
|
||||
serde_json::Value::String(version) => match version.as_str() {
|
||||
SavedContext::VERSION => {
|
||||
@@ -3032,9 +3032,9 @@ impl SavedContext {
|
||||
serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
|
||||
Ok(saved_context.upgrade())
|
||||
}
|
||||
_ => Err(anyhow!("unrecognized saved context version: {}", version)),
|
||||
_ => anyhow::bail!("unrecognized saved context version: {version:?}"),
|
||||
},
|
||||
_ => Err(anyhow!("version not found on saved context")),
|
||||
_ => anyhow::bail!("version not found on saved context"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
use crate::language_model_selector::{
|
||||
LanguageModelSelector, LanguageModelSelectorPopoverMenu, ToggleModelSelector,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection, SlashCommandWorkingSet};
|
||||
@@ -36,9 +39,6 @@ use language_model::{
|
||||
LanguageModelImage, LanguageModelProvider, LanguageModelProviderTosView, LanguageModelRegistry,
|
||||
Role,
|
||||
};
|
||||
use language_model_selector::{
|
||||
LanguageModelSelector, LanguageModelSelectorPopoverMenu, ToggleModelSelector,
|
||||
};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use picker::Picker;
|
||||
use project::{Project, Worktree};
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::{
|
||||
AssistantContext, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext,
|
||||
SavedContextMetadata,
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_slash_command::{SlashCommandId, SlashCommandWorkingSet};
|
||||
use client::{Client, TypedEnvelope, proto, telemetry::Telemetry};
|
||||
use clock::ReplicaId;
|
||||
@@ -164,16 +164,18 @@ impl ContextStore {
|
||||
) -> Result<proto::OpenContextResponse> {
|
||||
let context_id = ContextId::from_proto(envelope.payload.context_id);
|
||||
let operations = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host contexts can be opened"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
"only the host contexts can be opened"
|
||||
);
|
||||
|
||||
let context = this
|
||||
.loaded_context_for_id(&context_id, cx)
|
||||
.context("context not found")?;
|
||||
if context.read(cx).replica_id() != ReplicaId::default() {
|
||||
return Err(anyhow!("context must be opened via the host"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
context.read(cx).replica_id() == ReplicaId::default(),
|
||||
"context must be opened via the host"
|
||||
);
|
||||
|
||||
anyhow::Ok(
|
||||
context
|
||||
@@ -193,9 +195,10 @@ impl ContextStore {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::CreateContextResponse> {
|
||||
let (context_id, operations) = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("can only create contexts as the host"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
"can only create contexts as the host"
|
||||
);
|
||||
|
||||
let context = this.create(cx);
|
||||
let context_id = context.read(cx).id().clone();
|
||||
@@ -237,9 +240,10 @@ impl ContextStore {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::SynchronizeContextsResponse> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host can synchronize contexts"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
"only the host can synchronize contexts"
|
||||
);
|
||||
|
||||
let mut local_versions = Vec::new();
|
||||
for remote_version_proto in envelope.payload.contexts {
|
||||
@@ -370,7 +374,7 @@ impl ContextStore {
|
||||
) -> Task<Result<Entity<AssistantContext>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
return Task::ready(Err(anyhow::anyhow!("project was not remote")));
|
||||
};
|
||||
|
||||
let replica_id = project.replica_id();
|
||||
@@ -533,7 +537,7 @@ impl ContextStore {
|
||||
) -> Task<Result<Entity<AssistantContext>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
return Task::ready(Err(anyhow::anyhow!("project was not remote")));
|
||||
};
|
||||
|
||||
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
|
||||
|
||||
@@ -23,6 +23,7 @@ log.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
mistral = { workspace = true, features = ["schemars"] }
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
|
||||
@@ -10,6 +10,7 @@ use deepseek::Model as DeepseekModel;
|
||||
use gpui::{App, Pixels, SharedString};
|
||||
use language_model::{CloudModel, LanguageModel};
|
||||
use lmstudio::Model as LmStudioModel;
|
||||
use mistral::Model as MistralModel;
|
||||
use ollama::Model as OllamaModel;
|
||||
use schemars::{JsonSchema, schema::Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -71,6 +72,11 @@ pub enum AssistantProviderContentV1 {
|
||||
default_model: Option<DeepseekModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "mistral")]
|
||||
Mistral {
|
||||
default_model: Option<MistralModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
@@ -249,6 +255,12 @@ impl AssistantSettingsContent {
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::Mistral { default_model, .. } => {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "mistral".into(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
}),
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
@@ -700,6 +712,7 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
"zed.dev".into(),
|
||||
"copilot_chat".into(),
|
||||
"deepseek".into(),
|
||||
"mistral".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use anyhow::Result;
|
||||
use futures::StreamExt;
|
||||
use futures::stream::{self, BoxStream};
|
||||
use gpui::{App, SharedString, Task, WeakEntity, Window};
|
||||
use language::HighlightId;
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
|
||||
pub use language_model::Role;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -16,6 +17,7 @@ use std::{
|
||||
ops::Range,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::ActiveTheme;
|
||||
use workspace::{Workspace, ui::IconName};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
@@ -325,6 +327,18 @@ impl SlashCommandLine {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_label_for_command(command_name: &str, arguments: &[&str], cx: &App) -> CodeLabel {
|
||||
let mut label = CodeLabel::default();
|
||||
label.push_str(command_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(
|
||||
&arguments.join(" "),
|
||||
cx.theme().syntax().highlight_id("comment").map(HighlightId),
|
||||
);
|
||||
label.filter_range = 0..command_name.len();
|
||||
label
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
@@ -35,7 +35,6 @@ rope.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
terminal_view.workspace = true
|
||||
text.workspace = true
|
||||
toml.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -12,11 +12,6 @@ mod selection_command;
|
||||
mod streaming_example_command;
|
||||
mod symbols_command;
|
||||
mod tab_command;
|
||||
mod terminal_command;
|
||||
|
||||
use gpui::App;
|
||||
use language::{CodeLabel, HighlightId};
|
||||
use ui::ActiveTheme as _;
|
||||
|
||||
pub use crate::cargo_workspace_command::*;
|
||||
pub use crate::context_server_command::*;
|
||||
@@ -32,16 +27,5 @@ pub use crate::selection_command::*;
|
||||
pub use crate::streaming_example_command::*;
|
||||
pub use crate::symbols_command::*;
|
||||
pub use crate::tab_command::*;
|
||||
pub use crate::terminal_command::*;
|
||||
|
||||
pub fn create_label_for_command(command_name: &str, arguments: &[&str], cx: &App) -> CodeLabel {
|
||||
let mut label = CodeLabel::default();
|
||||
label.push_str(command_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(
|
||||
&arguments.join(" "),
|
||||
cx.theme().syntax().highlight_id("comment").map(HighlightId),
|
||||
);
|
||||
label.filter_range = 0..command_name.len();
|
||||
label
|
||||
}
|
||||
use assistant_slash_command::create_label_for_command;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_slash_command::{
|
||||
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
|
||||
SlashCommandOutputSection, SlashCommandResult,
|
||||
@@ -84,9 +84,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
|
||||
if let Some(server) = self.store.read(cx).get_running_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let Some(protocol) = server.client() else {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
|
||||
let completion_result = protocol
|
||||
.completion(
|
||||
@@ -139,21 +137,16 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
let store = self.store.read(cx);
|
||||
if let Some(server) = store.get_running_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let Some(protocol) = server.client() else {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
|
||||
|
||||
// Check that there are only user roles
|
||||
if result
|
||||
.messages
|
||||
.iter()
|
||||
.any(|msg| !matches!(msg.role, context_server::types::Role::User))
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"Prompt contains non-user roles, which is not supported"
|
||||
));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
result
|
||||
.messages
|
||||
.iter()
|
||||
.all(|msg| matches!(msg.role, context_server::types::Role::User)),
|
||||
"Prompt contains non-user roles, which is not supported"
|
||||
);
|
||||
|
||||
// Extract text from user messages into a single prompt string
|
||||
let mut prompt = result
|
||||
@@ -192,9 +185,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
}
|
||||
|
||||
fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String, String)> {
|
||||
if arguments.is_empty() {
|
||||
return Err(anyhow!("No arguments given"));
|
||||
}
|
||||
anyhow::ensure!(!arguments.is_empty(), "No arguments given");
|
||||
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() == 1 => {
|
||||
@@ -202,16 +193,16 @@ fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String,
|
||||
let arg_value = arguments.join(" ");
|
||||
Ok((arg_name, arg_value))
|
||||
}
|
||||
Some(_) => Err(anyhow!("Prompt must have exactly one argument")),
|
||||
None => Err(anyhow!("Prompt has no arguments")),
|
||||
Some(_) => anyhow::bail!("Prompt must have exactly one argument"),
|
||||
None => anyhow::bail!("Prompt has no arguments"),
|
||||
}
|
||||
}
|
||||
|
||||
fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result<HashMap<String, String>> {
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() > 1 => Err(anyhow!(
|
||||
"Prompt has more than one argument, which is not supported"
|
||||
)),
|
||||
Some(args) if args.len() > 1 => {
|
||||
anyhow::bail!("Prompt has more than one argument, which is not supported");
|
||||
}
|
||||
Some(args) if args.len() == 1 => {
|
||||
if !arguments.is_empty() {
|
||||
let mut map = HashMap::default();
|
||||
@@ -220,15 +211,15 @@ fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result<HashMap<Str
|
||||
} else if arguments.is_empty() && args[0].required == Some(false) {
|
||||
Ok(HashMap::default())
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects argument but none given"))
|
||||
anyhow::bail!("Prompt expects argument but none given");
|
||||
}
|
||||
}
|
||||
Some(_) | None => {
|
||||
if arguments.is_empty() {
|
||||
Ok(HashMap::default())
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects no arguments but some were given"))
|
||||
}
|
||||
anyhow::ensure!(
|
||||
arguments.is_empty(),
|
||||
"Prompt expects no arguments but some were given"
|
||||
);
|
||||
Ok(HashMap::default())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,10 +118,7 @@ impl SlashCommand for DeltaSlashCommand {
|
||||
}
|
||||
}
|
||||
|
||||
if !changes_detected {
|
||||
return Err(anyhow!("no new changes detected"));
|
||||
}
|
||||
|
||||
anyhow::ensure!(changes_detected, "no new changes detected");
|
||||
Ok(output.to_event_stream())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
@@ -189,7 +189,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
window.spawn(cx, async move |_| {
|
||||
task.await?
|
||||
.map(|output| output.to_event_stream())
|
||||
.ok_or_else(|| anyhow!("No diagnostics found"))
|
||||
.context("No diagnostics found")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
@@ -52,15 +52,16 @@ impl DocsSlashCommand {
|
||||
.is_none()
|
||||
{
|
||||
let index_provider_deps = maybe!({
|
||||
let workspace = workspace.clone().ok_or_else(|| anyhow!("no workspace"))?;
|
||||
let workspace = workspace
|
||||
.as_ref()
|
||||
.context("no workspace")?
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace was dropped"))?;
|
||||
.context("workspace dropped")?;
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let cargo_workspace_root = Self::path_to_cargo_toml(project, cx)
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf()))
|
||||
.ok_or_else(|| anyhow!("no Cargo workspace root found"))?;
|
||||
.context("no Cargo workspace root found")?;
|
||||
|
||||
anyhow::Ok((fs, cargo_workspace_root))
|
||||
});
|
||||
@@ -78,10 +79,11 @@ impl DocsSlashCommand {
|
||||
.is_none()
|
||||
{
|
||||
let http_client = maybe!({
|
||||
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
|
||||
let workspace = workspace
|
||||
.as_ref()
|
||||
.context("no workspace")?
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace was dropped"))?;
|
||||
.context("workspace was dropped")?;
|
||||
let project = workspace.read(cx).project().clone();
|
||||
anyhow::Ok(project.read(cx).client().http_client())
|
||||
});
|
||||
@@ -174,7 +176,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
let args = DocsSlashCommandArgs::parse(arguments);
|
||||
let store = args
|
||||
.provider()
|
||||
.ok_or_else(|| anyhow!("no docs provider specified"))
|
||||
.context("no docs provider specified")
|
||||
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
|
||||
cx.background_spawn(async move {
|
||||
fn build_completions(items: Vec<String>) -> Vec<ArgumentCompletion> {
|
||||
@@ -287,7 +289,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
let task = cx.background_spawn({
|
||||
let store = args
|
||||
.provider()
|
||||
.ok_or_else(|| anyhow!("no docs provider specified"))
|
||||
.context("no docs provider specified")
|
||||
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
|
||||
async move {
|
||||
let (provider, key) = match args.clone() {
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use anyhow::{Context, Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
|
||||
@@ -230,7 +230,10 @@ fn collect_files(
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
|
||||
else {
|
||||
return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
|
||||
return futures::stream::once(async {
|
||||
anyhow::bail!("invalid path");
|
||||
})
|
||||
.boxed();
|
||||
};
|
||||
|
||||
let project_handle = project.downgrade();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::ActionLog;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{OutlineItem, ParseStatus};
|
||||
use project::Project;
|
||||
@@ -22,7 +22,7 @@ pub async fn file_outline(
|
||||
let project_path = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.find_project_path(&path, cx)
|
||||
.ok_or_else(|| anyhow!("Path {path} not found in project"))
|
||||
.with_context(|| format!("Path {path} not found in project"))
|
||||
})??;
|
||||
|
||||
project
|
||||
@@ -41,9 +41,9 @@ pub async fn file_outline(
|
||||
}
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let Some(outline) = snapshot.outline(None) else {
|
||||
return Err(anyhow!("No outline information available for this file."));
|
||||
};
|
||||
let outline = snapshot
|
||||
.outline(None)
|
||||
.context("No outline information available for this file at path {path}")?;
|
||||
|
||||
render_outline(
|
||||
outline
|
||||
|
||||
@@ -27,12 +27,10 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
||||
const UNSUPPORTED_KEYS: [&str; 4] = ["if", "then", "else", "$ref"];
|
||||
|
||||
for key in UNSUPPORTED_KEYS {
|
||||
if obj.contains_key(key) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Schema cannot be made compatible because it contains \"{}\" ",
|
||||
key
|
||||
));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!obj.contains_key(key),
|
||||
"Schema cannot be made compatible because it contains \"{key}\""
|
||||
);
|
||||
}
|
||||
|
||||
const KEYS_TO_REMOVE: [&str; 5] = [
|
||||
|
||||
@@ -41,6 +41,7 @@ open.workspace = true
|
||||
paths.workspace = true
|
||||
portable-pty.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
regex.workspace = true
|
||||
rust-embed.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::AnyWindowHandle;
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
@@ -107,17 +107,13 @@ impl Tool for CopyPathTool {
|
||||
});
|
||||
|
||||
cx.background_spawn(async move {
|
||||
match copy_task.await {
|
||||
Ok(_) => Ok(
|
||||
format!("Copied {} to {}", input.source_path, input.destination_path).into(),
|
||||
),
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to copy {} to {}: {}",
|
||||
input.source_path,
|
||||
input.destination_path,
|
||||
err
|
||||
)),
|
||||
}
|
||||
let _ = copy_task.await.with_context(|| {
|
||||
format!(
|
||||
"Copying {} to {}",
|
||||
input.source_path, input.destination_path
|
||||
)
|
||||
})?;
|
||||
Ok(format!("Copied {} to {}", input.source_path, input.destination_path).into())
|
||||
})
|
||||
.into()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::AnyWindowHandle;
|
||||
use gpui::{App, Entity, Task};
|
||||
@@ -86,7 +86,7 @@ impl Tool for CreateDirectoryTool {
|
||||
project.create_entry(project_path.clone(), true, cx)
|
||||
})?
|
||||
.await
|
||||
.map_err(|err| anyhow!("Unable to create directory {destination_path}: {err}"))?;
|
||||
.with_context(|| format!("Creating directory {destination_path}"))?;
|
||||
|
||||
Ok(format!("Created directory {destination_path}").into())
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use futures::{SinkExt, StreamExt, channel::mpsc};
|
||||
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
|
||||
@@ -122,19 +122,17 @@ impl Tool for DeletePathTool {
|
||||
}
|
||||
}
|
||||
|
||||
let delete = project.update(cx, |project, cx| {
|
||||
project.delete_file(project_path, false, cx)
|
||||
})?;
|
||||
|
||||
match delete {
|
||||
Some(deletion_task) => match deletion_task.await {
|
||||
Ok(()) => Ok(format!("Deleted {path_str}").into()),
|
||||
Err(err) => Err(anyhow!("Failed to delete {path_str}: {err}")),
|
||||
},
|
||||
None => Err(anyhow!(
|
||||
"Couldn't delete {path_str} because that path isn't in this project."
|
||||
)),
|
||||
}
|
||||
let deletion_task = project
|
||||
.update(cx, |project, cx| {
|
||||
project.delete_file(project_path, false, cx)
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Couldn't delete {path_str} because that path isn't in this project.")
|
||||
})?;
|
||||
deletion_task
|
||||
.await
|
||||
.with_context(|| format!("Deleting {path_str}"))?;
|
||||
Ok(format!("Deleted {path_str}").into())
|
||||
})
|
||||
.into()
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp, iter, mem, ops::Range, path::PathBuf, sync::Arc, task::Poll};
|
||||
use streaming_diff::{CharOperation, StreamingDiff};
|
||||
use util::debug_panic;
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct CreateFilePromptTemplate {
|
||||
@@ -543,6 +544,11 @@ impl EditAgent {
|
||||
if last_message.content.is_empty() {
|
||||
conversation.messages.pop();
|
||||
}
|
||||
} else {
|
||||
debug_panic!(
|
||||
"Last message must be an Assistant tool calling! Got {:?}",
|
||||
last_message.content
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ use crate::{
|
||||
ReadFileToolInput,
|
||||
edit_file_tool::{EditFileMode, EditFileToolInput},
|
||||
grep_tool::GrepToolInput,
|
||||
list_directory_tool::ListDirectoryToolInput,
|
||||
};
|
||||
use Role::*;
|
||||
use anyhow::anyhow;
|
||||
use assistant_tool::ToolRegistry;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
@@ -18,6 +18,7 @@ use language_model::{
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, SelectedModel,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::{ModelContext, ProjectContext, PromptBuilder, WorktreeContext};
|
||||
use rand::prelude::*;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use serde_json::json;
|
||||
@@ -33,21 +34,39 @@ use util::path;
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_extract_handle_command_output() {
|
||||
// Test how well agent generates multiple edit hunks.
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
// claude-3.7-sonnet | 0.98
|
||||
// gemini-2.5-pro | 0.86
|
||||
// gemini-2.5-flash | 0.11
|
||||
// gpt-4.1 | 1.00
|
||||
|
||||
let input_file_path = "root/blame.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/extract_handle_command_output/before.rs");
|
||||
let output_file_content = include_str!("evals/fixtures/extract_handle_command_output/after.rs");
|
||||
let possible_diffs = vec![
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-01.diff"),
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-02.diff"),
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-03.diff"),
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-04.diff"),
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-05.diff"),
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-06.diff"),
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-07.diff"),
|
||||
];
|
||||
let edit_description = "Extract `handle_command_output` method from `run_git_blame`.";
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
0.7, // Taking the lower bar for Gemini
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
Read the `{input_file_path}` file and extract a method in
|
||||
the final stanza of `run_git_blame` to deal with command failures,
|
||||
call it `handle_command_output` and take the std::process::Output as the only parameter.
|
||||
Do not document the method and do not add any comments.
|
||||
|
||||
Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`.
|
||||
"})],
|
||||
@@ -81,11 +100,9 @@ fn eval_extract_handle_command_output() {
|
||||
)],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::assert_eq(output_file_content),
|
||||
},
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::assert_diff_any(possible_diffs),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -99,8 +116,8 @@ fn eval_delete_run_git_blame() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
@@ -137,11 +154,9 @@ fn eval_delete_run_git_blame() {
|
||||
)],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::assert_eq(output_file_content),
|
||||
},
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::assert_eq(output_file_content),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -154,8 +169,8 @@ fn eval_translate_doc_comments() {
|
||||
eval(
|
||||
200,
|
||||
1.,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
@@ -192,11 +207,9 @@ fn eval_translate_doc_comments() {
|
||||
)],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::judge_diff("Doc comments were translated to Italian"),
|
||||
},
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff("Doc comments were translated to Italian"),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -210,8 +223,8 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
@@ -307,14 +320,12 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
)],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::judge_diff(indoc! {"
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(indoc! {"
|
||||
- The compile_parser_to_wasm method has been changed to use wasi-sdk
|
||||
- ureq is used to download the SDK for current platform and architecture
|
||||
"}),
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -325,10 +336,10 @@ fn eval_disable_cursor_blinking() {
|
||||
let input_file_content = include_str!("evals/fixtures/disable_cursor_blinking/before.rs");
|
||||
let edit_description = "Comment out the call to `BlinkManager::enable`";
|
||||
eval(
|
||||
200,
|
||||
100,
|
||||
0.95,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Let's research how to cursor blinking works.")]),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -382,15 +393,13 @@ fn eval_disable_cursor_blinking() {
|
||||
)],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::judge_diff(indoc! {"
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(indoc! {"
|
||||
- Calls to BlinkManager in `observe_window_activation` were commented out
|
||||
- The call to `blink_manager.enable` above the call to show_cursor_names was commented out
|
||||
- All the edits have valid indentation
|
||||
"}),
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -403,8 +412,8 @@ fn eval_from_pixels_constructor() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(indoc! {"
|
||||
@@ -576,14 +585,12 @@ fn eval_from_pixels_constructor() {
|
||||
)],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::judge_diff(indoc! {"
|
||||
- The diff contains a new `from_pixels` constructor
|
||||
- The diff contains new tests for the `from_pixels` constructor
|
||||
"}),
|
||||
},
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(indoc! {"
|
||||
- The diff contains a new `from_pixels` constructor
|
||||
- The diff contains new tests for the `from_pixels` constructor
|
||||
"}),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -591,12 +598,13 @@ fn eval_from_pixels_constructor() {
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_zode() {
|
||||
let input_file_path = "root/zode.py";
|
||||
let input_content = None;
|
||||
let edit_description = "Create the main Zode CLI script";
|
||||
eval(
|
||||
200,
|
||||
1.,
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -654,14 +662,12 @@ fn eval_zode() {
|
||||
],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: None,
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::new(async move |sample, _, _cx| {
|
||||
input_content,
|
||||
EvalAssertion::new(async move |sample, _, _cx| {
|
||||
let invalid_starts = [' ', '`', '\n'];
|
||||
let mut message = String::new();
|
||||
for start in invalid_starts {
|
||||
if sample.text.starts_with(start) {
|
||||
if sample.text_after.starts_with(start) {
|
||||
message.push_str(&format!("The sample starts with a {:?}\n", start));
|
||||
break;
|
||||
}
|
||||
@@ -681,7 +687,7 @@ fn eval_zode() {
|
||||
})
|
||||
}
|
||||
}),
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -694,8 +700,8 @@ fn eval_add_overwrite_test() {
|
||||
eval(
|
||||
200,
|
||||
0.5, // TODO: make this eval better
|
||||
EvalInput {
|
||||
conversation: vec![
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(indoc! {"
|
||||
@@ -899,13 +905,93 @@ fn eval_add_overwrite_test() {
|
||||
],
|
||||
),
|
||||
],
|
||||
input_path: input_file_path.into(),
|
||||
input_content: Some(input_file_content.into()),
|
||||
edit_description: edit_description.into(),
|
||||
assertion: EvalAssertion::judge_diff(
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(
|
||||
"A new test for overwritten files was created, without changing any previous test",
|
||||
),
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_create_empty_file() {
|
||||
// Check that Edit Agent can create a file without writing its
|
||||
// thoughts into it. This issue is not specific to empty files, but
|
||||
// it's easier to reproduce with them.
|
||||
//
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// --------------------------------------------
|
||||
// Prompt version: 2025-05-21
|
||||
// --------------------------------------------
|
||||
//
|
||||
// claude-3.7-sonnet | 1.00
|
||||
// gemini-2.5-pro-preview-03-25 | 1.00
|
||||
// gemini-2.5-flash-preview-04-17 | 1.00
|
||||
// gpt-4.1 | 1.00
|
||||
//
|
||||
//
|
||||
// TODO: gpt-4.1-mini errored 38 times:
|
||||
// "data did not match any variant of untagged enum ResponseStreamResult"
|
||||
//
|
||||
let input_file_content = None;
|
||||
let expected_output_content = String::new();
|
||||
eval(
|
||||
100,
|
||||
0.99,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Create a second empty todo file ")]),
|
||||
message(
|
||||
Assistant,
|
||||
[
|
||||
text(formatdoc! {"
|
||||
I'll help you create a second empty todo file.
|
||||
First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one.
|
||||
"}),
|
||||
tool_use(
|
||||
"toolu_01GAF8TtsgpjKxCr8fgQLDgR",
|
||||
"list_directory",
|
||||
ListDirectoryToolInput {
|
||||
path: "root".to_string(),
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
message(
|
||||
User,
|
||||
[tool_result(
|
||||
"toolu_01GAF8TtsgpjKxCr8fgQLDgR",
|
||||
"list_directory",
|
||||
"root/TODO\nroot/TODO2\nroot/new.txt\n",
|
||||
)],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
[
|
||||
text(formatdoc! {"
|
||||
I can see there's already a `TODO` file in the `root` directory. Let me create a second empty todo file called `TODO3` in the same directory:
|
||||
"}),
|
||||
tool_use(
|
||||
"toolu_01Tb3iQ9griqSYMmVuykQPWU",
|
||||
"edit_file",
|
||||
EditFileToolInput {
|
||||
display_description: "Create empty TODO3 file".to_string(),
|
||||
mode: EditFileMode::Create,
|
||||
path: "root/TODO3".into(),
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
input_file_content,
|
||||
// Bad behavior is to write something like
|
||||
// "I'll create an empty TODO3 file as requested."
|
||||
EvalAssertion::assert_eq(expected_output_content),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -964,15 +1050,50 @@ fn tool_result(
|
||||
#[derive(Clone)]
|
||||
struct EvalInput {
|
||||
conversation: Vec<LanguageModelRequestMessage>,
|
||||
input_path: PathBuf,
|
||||
edit_file_input: EditFileToolInput,
|
||||
input_content: Option<String>,
|
||||
edit_description: String,
|
||||
assertion: EvalAssertion,
|
||||
}
|
||||
|
||||
impl EvalInput {
|
||||
fn from_conversation(
|
||||
conversation: Vec<LanguageModelRequestMessage>,
|
||||
input_content: Option<String>,
|
||||
assertion: EvalAssertion,
|
||||
) -> Self {
|
||||
let msg = conversation.last().expect("Conversation must not be empty");
|
||||
if msg.role != Role::Assistant {
|
||||
panic!("Conversation must end with an assistant message");
|
||||
}
|
||||
let tool_use = msg
|
||||
.content
|
||||
.iter()
|
||||
.flat_map(|content| match content {
|
||||
MessageContent::ToolUse(tool_use) if tool_use.name == "edit_file".into() => {
|
||||
Some(tool_use)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.next()
|
||||
.expect("Conversation must end with an edit_file tool use")
|
||||
.clone();
|
||||
|
||||
let edit_file_input: EditFileToolInput =
|
||||
serde_json::from_value(tool_use.input.clone()).unwrap();
|
||||
|
||||
EvalInput {
|
||||
conversation,
|
||||
edit_file_input,
|
||||
input_content,
|
||||
assertion,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct EvalSample {
|
||||
text: String,
|
||||
text_before: String,
|
||||
text_after: String,
|
||||
edit_output: EditAgentOutput,
|
||||
diff: String,
|
||||
}
|
||||
@@ -1029,7 +1150,7 @@ impl EvalAssertion {
|
||||
let expected = expected.into();
|
||||
Self::new(async move |sample, _judge, _cx| {
|
||||
Ok(EvalAssertionOutcome {
|
||||
score: if strip_empty_lines(&sample.text) == strip_empty_lines(&expected) {
|
||||
score: if strip_empty_lines(&sample.text_after) == strip_empty_lines(&expected) {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
@@ -1039,6 +1160,22 @@ impl EvalAssertion {
|
||||
})
|
||||
}
|
||||
|
||||
fn assert_diff_any(expected_diffs: Vec<impl Into<String>>) -> Self {
|
||||
let expected_diffs: Vec<String> = expected_diffs.into_iter().map(Into::into).collect();
|
||||
Self::new(async move |sample, _judge, _cx| {
|
||||
let matches = expected_diffs.iter().any(|possible_diff| {
|
||||
let expected =
|
||||
language::apply_diff_patch(&sample.text_before, possible_diff).unwrap();
|
||||
strip_empty_lines(&expected) == strip_empty_lines(&sample.text_after)
|
||||
});
|
||||
|
||||
Ok(EvalAssertionOutcome {
|
||||
score: if matches { 100 } else { 0 },
|
||||
message: None,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn judge_diff(assertions: &'static str) -> Self {
|
||||
Self::new(async move |sample, judge, cx| {
|
||||
let prompt = DiffJudgeTemplate {
|
||||
@@ -1077,10 +1214,7 @@ impl EvalAssertion {
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"No score found in response. Raw output: {}",
|
||||
output
|
||||
))
|
||||
anyhow::bail!("No score found in response. Raw output: {output}");
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1126,7 +1260,7 @@ fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
|
||||
if output.assertion.score < 80 {
|
||||
failed_count += 1;
|
||||
failed_evals
|
||||
.entry(output.sample.text.clone())
|
||||
.entry(output.sample.text_after.clone())
|
||||
.or_insert(Vec::new())
|
||||
.push(output);
|
||||
}
|
||||
@@ -1308,7 +1442,7 @@ impl EditAgentTest {
|
||||
let path = self
|
||||
.project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path(eval.input_path, cx)
|
||||
project.find_project_path(eval.edit_file_input.path, cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = self
|
||||
@@ -1316,31 +1450,69 @@ impl EditAgentTest {
|
||||
.update(cx, |project, cx| project.open_buffer(path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let conversation = LanguageModelRequest {
|
||||
messages: eval.conversation,
|
||||
tools: cx.update(|cx| {
|
||||
ToolRegistry::default_global(cx)
|
||||
.tools()
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
let input_schema = tool
|
||||
.input_schema(self.agent.model.tool_input_format())
|
||||
.ok()?;
|
||||
Some(LanguageModelRequestTool {
|
||||
name: tool.name(),
|
||||
description: tool.description(),
|
||||
input_schema,
|
||||
})
|
||||
let tools = cx.update(|cx| {
|
||||
ToolRegistry::default_global(cx)
|
||||
.tools()
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
let input_schema = tool
|
||||
.input_schema(self.agent.model.tool_input_format())
|
||||
.ok()?;
|
||||
Some(LanguageModelRequestTool {
|
||||
name: tool.name(),
|
||||
description: tool.description(),
|
||||
input_schema,
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let tool_names = tools
|
||||
.iter()
|
||||
.map(|tool| tool.name.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let worktrees = vec![WorktreeContext {
|
||||
root_name: "root".to_string(),
|
||||
rules_file: None,
|
||||
}];
|
||||
let prompt_builder = PromptBuilder::new(None)?;
|
||||
let project_context = ProjectContext::new(worktrees, Vec::default());
|
||||
let system_prompt = prompt_builder.generate_assistant_system_prompt(
|
||||
&project_context,
|
||||
&ModelContext {
|
||||
available_tools: tool_names,
|
||||
},
|
||||
)?;
|
||||
|
||||
let has_system_prompt = eval
|
||||
.conversation
|
||||
.first()
|
||||
.map_or(false, |msg| msg.role == Role::System);
|
||||
let messages = if has_system_prompt {
|
||||
eval.conversation
|
||||
} else {
|
||||
[LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: vec![MessageContent::Text(system_prompt)],
|
||||
cache: true,
|
||||
}]
|
||||
.into_iter()
|
||||
.chain(eval.conversation)
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let conversation = LanguageModelRequest {
|
||||
messages,
|
||||
tools,
|
||||
..Default::default()
|
||||
};
|
||||
let edit_output = if let Some(input_content) = eval.input_content.as_deref() {
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(input_content, cx));
|
||||
|
||||
let edit_output = if matches!(eval.edit_file_input.mode, EditFileMode::Edit) {
|
||||
if let Some(input_content) = eval.input_content.as_deref() {
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(input_content, cx));
|
||||
}
|
||||
let (edit_output, _) = self.agent.edit(
|
||||
buffer.clone(),
|
||||
eval.edit_description,
|
||||
eval.edit_file_input.display_description,
|
||||
&conversation,
|
||||
&mut cx.to_async(),
|
||||
);
|
||||
@@ -1348,7 +1520,7 @@ impl EditAgentTest {
|
||||
} else {
|
||||
let (edit_output, _) = self.agent.overwrite(
|
||||
buffer.clone(),
|
||||
eval.edit_description,
|
||||
eval.edit_file_input.display_description,
|
||||
&conversation,
|
||||
&mut cx.to_async(),
|
||||
);
|
||||
@@ -1362,7 +1534,8 @@ impl EditAgentTest {
|
||||
eval.input_content.as_deref().unwrap_or_default(),
|
||||
&buffer_text,
|
||||
),
|
||||
text: buffer_text,
|
||||
text_before: eval.input_content.unwrap_or_default(),
|
||||
text_after: buffer_text,
|
||||
};
|
||||
let assertion = eval
|
||||
.assertion
|
||||
|
||||
@@ -98,21 +98,21 @@ impl BlameEntry {
|
||||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
||||
@@ -80,7 +80,7 @@ async fn run_git_blame(
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
.context("starting git blame process")?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
@@ -92,10 +92,7 @@ async fn run_git_blame(
|
||||
}
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
@@ -103,7 +100,7 @@ async fn run_git_blame(
|
||||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
return Ok(String::new());
|
||||
}
|
||||
return Err(anyhow!("git blame process failed: {}", stderr));
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
@@ -144,21 +141,21 @@ impl BlameEntry {
|
||||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
||||
@@ -5272,7 +5272,7 @@ impl Editor {
|
||||
task.await?;
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
@@ -10369,8 +10369,8 @@ impl Editor {
|
||||
.map(|line| {
|
||||
line.strip_prefix(&line_prefix)
|
||||
.or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
|
||||
.ok_or_else(|| {
|
||||
anyhow!("line did not start with prefix {line_prefix:?}: {line:?}")
|
||||
.with_context(|| {
|
||||
format!("line did not start with prefix {line_prefix:?}: {line:?}")
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
@@ -16944,7 +16944,7 @@ impl Editor {
|
||||
Err(err) => {
|
||||
let message = format!("Failed to copy permalink: {err}");
|
||||
|
||||
Err::<(), anyhow::Error>(err).log_err();
|
||||
anyhow::Result::<()>::Err(err).log_err();
|
||||
|
||||
if let Some(workspace) = workspace {
|
||||
workspace
|
||||
@@ -16999,7 +16999,7 @@ impl Editor {
|
||||
Err(err) => {
|
||||
let message = format!("Failed to open permalink: {err}");
|
||||
|
||||
Err::<(), anyhow::Error>(err).log_err();
|
||||
anyhow::Result::<()>::Err(err).log_err();
|
||||
|
||||
if let Some(workspace) = workspace {
|
||||
workspace
|
||||
|
||||
@@ -1,378 +0,0 @@
|
||||
use crate::commit::get_messages;
|
||||
use crate::{GitRemote, Oid};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::AsyncWriteExt;
|
||||
use gpui::SharedString;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Stdio;
|
||||
use std::{ops::Range, path::Path};
|
||||
use text::Rope;
|
||||
use time::OffsetDateTime;
|
||||
use time::UtcOffset;
|
||||
use time::macros::format_description;
|
||||
|
||||
pub use git2 as libgit;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Blame {
|
||||
pub entries: Vec<BlameEntry>,
|
||||
pub messages: HashMap<Oid, String>,
|
||||
pub remote_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ParsedCommitMessage {
|
||||
pub message: SharedString,
|
||||
pub permalink: Option<url::Url>,
|
||||
pub pull_request: Option<crate::hosting_provider::PullRequest>,
|
||||
pub remote: Option<GitRemote>,
|
||||
}
|
||||
|
||||
impl Blame {
|
||||
pub async fn for_path(
|
||||
git_binary: &Path,
|
||||
working_directory: &Path,
|
||||
path: &Path,
|
||||
content: &Rope,
|
||||
remote_url: Option<String>,
|
||||
) -> Result<Self> {
|
||||
let output = run_git_blame(git_binary, working_directory, path, content).await?;
|
||||
let mut entries = parse_git_blame(&output)?;
|
||||
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
|
||||
|
||||
let mut unique_shas = HashSet::default();
|
||||
|
||||
for entry in entries.iter_mut() {
|
||||
unique_shas.insert(entry.sha);
|
||||
}
|
||||
|
||||
let shas = unique_shas.into_iter().collect::<Vec<_>>();
|
||||
let messages = get_messages(working_directory, &shas)
|
||||
.await
|
||||
.context("failed to get commit messages")?;
|
||||
|
||||
Ok(Self {
|
||||
entries,
|
||||
messages,
|
||||
remote_url,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
|
||||
const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
|
||||
|
||||
async fn run_git_blame(
|
||||
git_binary: &Path,
|
||||
working_directory: &Path,
|
||||
path: &Path,
|
||||
contents: &Rope,
|
||||
) -> Result<String> {
|
||||
let mut child = util::command::new_smol_command(git_binary)
|
||||
.current_dir(working_directory)
|
||||
.arg("blame")
|
||||
.arg("--incremental")
|
||||
.arg("--contents")
|
||||
.arg("-")
|
||||
.arg(path.as_os_str())
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
.as_mut()
|
||||
.context("failed to get pipe to stdin of git blame command")?;
|
||||
|
||||
for chunk in contents.chunks() {
|
||||
stdin.write_all(chunk.as_bytes()).await?;
|
||||
}
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
|
||||
handle_command_output(output)
|
||||
}
|
||||
|
||||
fn handle_command_output(output: std::process::Output) -> Result<String> {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let trimmed = stderr.trim();
|
||||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
return Ok(String::new());
|
||||
}
|
||||
return Err(anyhow!("git blame process failed: {}", stderr));
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct BlameEntry {
|
||||
pub sha: Oid,
|
||||
|
||||
pub range: Range<u32>,
|
||||
|
||||
pub original_line_number: u32,
|
||||
|
||||
pub author: Option<String>,
|
||||
pub author_mail: Option<String>,
|
||||
pub author_time: Option<i64>,
|
||||
pub author_tz: Option<String>,
|
||||
|
||||
pub committer_name: Option<String>,
|
||||
pub committer_email: Option<String>,
|
||||
pub committer_time: Option<i64>,
|
||||
pub committer_tz: Option<String>,
|
||||
|
||||
pub summary: Option<String>,
|
||||
|
||||
pub previous: Option<String>,
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
impl BlameEntry {
|
||||
// Returns a BlameEntry by parsing the first line of a `git blame --incremental`
|
||||
// entry. The line MUST have this format:
|
||||
//
|
||||
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
|
||||
fn new_from_blame_line(line: &str) -> Result<BlameEntry> {
|
||||
let mut parts = line.split_whitespace();
|
||||
|
||||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
let range = start_line..end_line;
|
||||
|
||||
Ok(Self {
|
||||
sha,
|
||||
range,
|
||||
original_line_number,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn author_offset_date_time(&self) -> Result<time::OffsetDateTime> {
|
||||
if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) {
|
||||
let format = format_description!("[offset_hour][offset_minute]");
|
||||
let offset = UtcOffset::parse(author_tz, &format)?;
|
||||
let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?;
|
||||
|
||||
Ok(date_time_utc.to_offset(offset))
|
||||
} else {
|
||||
// Directly return current time in UTC if there's no committer time or timezone
|
||||
Ok(time::OffsetDateTime::now_utc())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parse_git_blame parses the output of `git blame --incremental`, which returns
|
||||
// all the blame-entries for a given path incrementally, as it finds them.
|
||||
//
|
||||
// Each entry *always* starts with:
|
||||
//
|
||||
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
|
||||
//
|
||||
// Each entry *always* ends with:
|
||||
//
|
||||
// filename <whitespace-quoted-filename-goes-here>
|
||||
//
|
||||
// Line numbers are 1-indexed.
|
||||
//
|
||||
// A `git blame --incremental` entry looks like this:
|
||||
//
|
||||
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1
|
||||
// author Joe Schmoe
|
||||
// author-mail <joe.schmoe@example.com>
|
||||
// author-time 1709741400
|
||||
// author-tz +0100
|
||||
// committer Joe Schmoe
|
||||
// committer-mail <joe.schmoe@example.com>
|
||||
// committer-time 1709741400
|
||||
// committer-tz +0100
|
||||
// summary Joe's cool commit
|
||||
// previous 486c2409237a2c627230589e567024a96751d475 index.js
|
||||
// filename index.js
|
||||
//
|
||||
// If the entry has the same SHA as an entry that was already printed then no
|
||||
// signature information is printed:
|
||||
//
|
||||
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1
|
||||
// previous 486c2409237a2c627230589e567024a96751d475 index.js
|
||||
// filename index.js
|
||||
//
|
||||
// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html
|
||||
fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
|
||||
let mut entries: Vec<BlameEntry> = Vec::new();
|
||||
let mut index: HashMap<Oid, usize> = HashMap::default();
|
||||
|
||||
let mut current_entry: Option<BlameEntry> = None;
|
||||
|
||||
for line in output.lines() {
|
||||
let mut done = false;
|
||||
|
||||
match &mut current_entry {
|
||||
None => {
|
||||
let mut new_entry = BlameEntry::new_from_blame_line(line)?;
|
||||
|
||||
if let Some(existing_entry) = index
|
||||
.get(&new_entry.sha)
|
||||
.and_then(|slot| entries.get(*slot))
|
||||
{
|
||||
new_entry.author.clone_from(&existing_entry.author);
|
||||
new_entry
|
||||
.author_mail
|
||||
.clone_from(&existing_entry.author_mail);
|
||||
new_entry.author_time = existing_entry.author_time;
|
||||
new_entry.author_tz.clone_from(&existing_entry.author_tz);
|
||||
new_entry
|
||||
.committer_name
|
||||
.clone_from(&existing_entry.committer_name);
|
||||
new_entry
|
||||
.committer_email
|
||||
.clone_from(&existing_entry.committer_email);
|
||||
new_entry.committer_time = existing_entry.committer_time;
|
||||
new_entry
|
||||
.committer_tz
|
||||
.clone_from(&existing_entry.committer_tz);
|
||||
new_entry.summary.clone_from(&existing_entry.summary);
|
||||
}
|
||||
|
||||
current_entry.replace(new_entry);
|
||||
}
|
||||
Some(entry) => {
|
||||
let Some((key, value)) = line.split_once(' ') else {
|
||||
continue;
|
||||
};
|
||||
let is_committed = !entry.sha.is_zero();
|
||||
match key {
|
||||
"filename" => {
|
||||
entry.filename = value.into();
|
||||
done = true;
|
||||
}
|
||||
"previous" => entry.previous = Some(value.into()),
|
||||
|
||||
"summary" if is_committed => entry.summary = Some(value.into()),
|
||||
"author" if is_committed => entry.author = Some(value.into()),
|
||||
"author-mail" if is_committed => entry.author_mail = Some(value.into()),
|
||||
"author-time" if is_committed => {
|
||||
entry.author_time = Some(value.parse::<i64>()?)
|
||||
}
|
||||
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
|
||||
|
||||
"committer" if is_committed => entry.committer_name = Some(value.into()),
|
||||
"committer-mail" if is_committed => entry.committer_email = Some(value.into()),
|
||||
"committer-time" if is_committed => {
|
||||
entry.committer_time = Some(value.parse::<i64>()?)
|
||||
}
|
||||
"committer-tz" if is_committed => entry.committer_tz = Some(value.into()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if done {
|
||||
if let Some(entry) = current_entry.take() {
|
||||
index.insert(entry.sha, entries.len());
|
||||
|
||||
// We only want annotations that have a commit.
|
||||
if !entry.sha.is_zero() {
|
||||
entries.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::BlameEntry;
|
||||
use super::parse_git_blame;
|
||||
|
||||
fn read_test_data(filename: &str) -> String {
|
||||
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
path.push("test_data");
|
||||
path.push(filename);
|
||||
|
||||
std::fs::read_to_string(&path)
|
||||
.unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path))
|
||||
}
|
||||
|
||||
fn assert_eq_golden(entries: &Vec<BlameEntry>, golden_filename: &str) {
|
||||
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
path.push("test_data");
|
||||
path.push("golden");
|
||||
path.push(format!("{}.json", golden_filename));
|
||||
|
||||
let mut have_json =
|
||||
serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON");
|
||||
// We always want to save with a trailing newline.
|
||||
have_json.push('\n');
|
||||
|
||||
let update = std::env::var("UPDATE_GOLDEN")
|
||||
.map(|val| val.eq_ignore_ascii_case("true"))
|
||||
.unwrap_or(false);
|
||||
|
||||
if update {
|
||||
std::fs::create_dir_all(path.parent().unwrap())
|
||||
.expect("could not create golden test data directory");
|
||||
std::fs::write(&path, have_json).expect("could not write out golden data");
|
||||
} else {
|
||||
let want_json =
|
||||
std::fs::read_to_string(&path).unwrap_or_else(|_| {
|
||||
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
|
||||
}).replace("\r\n", "\n");
|
||||
|
||||
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_git_blame_not_committed() {
|
||||
let output = read_test_data("blame_incremental_not_committed");
|
||||
let entries = parse_git_blame(&output).unwrap();
|
||||
assert_eq_golden(&entries, "blame_incremental_not_committed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_git_blame_simple() {
|
||||
let output = read_test_data("blame_incremental_simple");
|
||||
let entries = parse_git_blame(&output).unwrap();
|
||||
assert_eq_golden(&entries, "blame_incremental_simple");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_git_blame_complex() {
|
||||
let output = read_test_data("blame_incremental_complex");
|
||||
let entries = parse_git_blame(&output).unwrap();
|
||||
assert_eq_golden(&entries, "blame_incremental_complex");
|
||||
}
|
||||
}
|
||||
@@ -80,7 +80,7 @@ async fn run_git_blame(
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
.context("starting git blame process")?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
@@ -92,10 +92,7 @@ async fn run_git_blame(
|
||||
}
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
@@ -103,7 +100,7 @@ async fn run_git_blame(
|
||||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
return Ok(String::new());
|
||||
}
|
||||
return Err(anyhow!("git blame process failed: {}", stderr));
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
@@ -144,21 +141,21 @@ impl BlameEntry {
|
||||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
@@ -94,6 +94,10 @@
|
||||
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
+ handle_command_output(output)
|
||||
+}
|
||||
+
|
||||
+fn handle_command_output(output: std::process::Output) -> Result<String> {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let trimmed = stderr.trim();
|
||||
@@ -0,0 +1,26 @@
|
||||
@@ -95,15 +95,19 @@
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
- let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
- let trimmed = stderr.trim();
|
||||
- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
- return Ok(String::new());
|
||||
- }
|
||||
- anyhow::bail!("git blame process failed: {stderr}");
|
||||
+ return handle_command_output(output);
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
+}
|
||||
+
|
||||
+fn handle_command_output(output: std::process::Output) -> Result<String> {
|
||||
+ let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
+ let trimmed = stderr.trim();
|
||||
+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
+ return Ok(String::new());
|
||||
+ }
|
||||
+ anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -0,0 +1,11 @@
|
||||
@@ -93,7 +93,10 @@
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
+ handle_command_output(output)
|
||||
+}
|
||||
|
||||
+fn handle_command_output(output: std::process::Output) -> Result<String> {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let trimmed = stderr.trim();
|
||||
@@ -0,0 +1,24 @@
|
||||
@@ -93,17 +93,20 @@
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
+ handle_command_output(&output)?;
|
||||
+ Ok(String::from_utf8(output.stdout)?)
|
||||
+}
|
||||
|
||||
+fn handle_command_output(output: &std::process::Output) -> Result<()> {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let trimmed = stderr.trim();
|
||||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
- return Ok(String::new());
|
||||
+ return Ok(());
|
||||
}
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
-
|
||||
- Ok(String::from_utf8(output.stdout)?)
|
||||
+ Ok(())
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -0,0 +1,26 @@
|
||||
@@ -95,15 +95,19 @@
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
- let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
- let trimmed = stderr.trim();
|
||||
- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
- return Ok(String::new());
|
||||
- }
|
||||
- anyhow::bail!("git blame process failed: {stderr}");
|
||||
+ return handle_command_output(&output);
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
+}
|
||||
+
|
||||
+fn handle_command_output(output: &std::process::Output) -> Result<String> {
|
||||
+ let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
+ let trimmed = stderr.trim();
|
||||
+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
+ return Ok(String::new());
|
||||
+ }
|
||||
+ anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -0,0 +1,23 @@
|
||||
@@ -93,7 +93,12 @@
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
+ handle_command_output(&output)?;
|
||||
|
||||
+ Ok(String::from_utf8(output.stdout)?)
|
||||
+}
|
||||
+
|
||||
+fn handle_command_output(output: &std::process::Output) -> Result<String> {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let trimmed = stderr.trim();
|
||||
@@ -102,8 +107,7 @@
|
||||
}
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
-
|
||||
- Ok(String::from_utf8(output.stdout)?)
|
||||
+ Ok(String::from_utf8_lossy(&output.stdout).into_owned())
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -0,0 +1,26 @@
|
||||
@@ -95,15 +95,19 @@
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
- let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
- let trimmed = stderr.trim();
|
||||
- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
- return Ok(String::new());
|
||||
- }
|
||||
- anyhow::bail!("git blame process failed: {stderr}");
|
||||
+ return handle_command_output(output);
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
+}
|
||||
+
|
||||
+fn handle_command_output(output: std::process::Output) -> Result<String> {
|
||||
+ let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
+ let trimmed = stderr.trim();
|
||||
+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
+ return Ok(String::new());
|
||||
+ }
|
||||
+ anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -0,0 +1,26 @@
|
||||
@@ -95,15 +95,19 @@
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
- let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
- let trimmed = stderr.trim();
|
||||
- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
- return Ok(String::new());
|
||||
- }
|
||||
- anyhow::bail!("git blame process failed: {stderr}");
|
||||
+ return handle_command_output(output);
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
+}
|
||||
+
|
||||
+fn handle_command_output(output: std::process::Output) -> Result<String> {
|
||||
+ let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
+ let trimmed = stderr.trim();
|
||||
+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
+ return Ok(String::new());
|
||||
+ }
|
||||
+ anyhow::bail!("git blame process failed: {stderr}")
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -20,7 +20,7 @@ use std::{
|
||||
|
||||
#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))]
|
||||
use anyhow::Error;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use etcetera::BaseStrategy as _;
|
||||
use fs4::fs_std::FileExt;
|
||||
use indoc::indoc;
|
||||
@@ -875,16 +875,13 @@ impl Loader {
|
||||
|
||||
FileExt::unlock(lock_file)?;
|
||||
fs::remove_file(lock_path)?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
))
|
||||
}
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
@@ -941,17 +938,13 @@ impl Loader {
|
||||
.map(|f| format!(" `{f}`"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
anyhow::bail!(format!(indoc! {"
|
||||
Missing required functions in the external scanner, parsing won't work without these!
|
||||
|
||||
return Err(anyhow!(format!(
|
||||
indoc! {"
|
||||
Missing required functions in the external scanner, parsing won't work without these!
|
||||
{missing}
|
||||
|
||||
{}
|
||||
|
||||
You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners
|
||||
"},
|
||||
missing,
|
||||
)));
|
||||
You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners
|
||||
"}));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1008,9 +1001,9 @@ impl Loader {
|
||||
{
|
||||
EmccSource::Podman
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"You must have either emcc, docker, or podman on your PATH to run this command"
|
||||
));
|
||||
);
|
||||
};
|
||||
|
||||
let mut command = match source {
|
||||
@@ -1103,12 +1096,11 @@ impl Loader {
|
||||
.spawn()
|
||||
.with_context(|| "Failed to run emcc command")?
|
||||
.wait()?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("emcc command failed"));
|
||||
}
|
||||
|
||||
fs::rename(src_path.join(output_name), output_path)
|
||||
.context("failed to rename wasm output file")?;
|
||||
anyhow::ensure!(status.success(), "emcc command failed");
|
||||
let source_path = src_path.join(output_name);
|
||||
fs::rename(&source_path, &output_path).with_context(|| {
|
||||
format!("failed to rename wasm output file from {source_path:?} to {output_path:?}")
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1185,11 +1177,8 @@ impl Loader {
|
||||
.map(|path| {
|
||||
let path = parser_path.join(path);
|
||||
// prevent p being above/outside of parser_path
|
||||
if path.starts_with(parser_path) {
|
||||
Ok(path)
|
||||
} else {
|
||||
Err(anyhow!("External file path {path:?} is outside of parser directory {parser_path:?}"))
|
||||
}
|
||||
anyhow::ensure!(path.starts_with(parser_path), "External file path {path:?} is outside of parser directory {parser_path:?}");
|
||||
Ok(path)
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()
|
||||
}).transpose()?,
|
||||
@@ -1324,11 +1313,8 @@ impl Loader {
|
||||
let name = GRAMMAR_NAME_REGEX
|
||||
.captures(&first_three_lines)
|
||||
.and_then(|c| c.get(1))
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to parse the language name from grammar.json at {}",
|
||||
grammar_path.display()
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Failed to parse the language name from grammar.json at {grammar_path:?}")
|
||||
})?;
|
||||
|
||||
Ok(name.as_str().to_string())
|
||||
@@ -1347,7 +1333,7 @@ impl Loader {
|
||||
{
|
||||
Ok(config.0)
|
||||
} else {
|
||||
Err(anyhow!("Unknown scope '{scope}'"))
|
||||
anyhow::bail!("Unknown scope '{scope}'")
|
||||
}
|
||||
} else if let Some((lang, _)) = self
|
||||
.language_configuration_for_file_name(path)
|
||||
@@ -1371,7 +1357,7 @@ impl Loader {
|
||||
} else if let Some(lang) = self.language_configuration_for_first_line_regex(path)? {
|
||||
Ok(lang.0)
|
||||
} else {
|
||||
Err(anyhow!("No language found"))
|
||||
anyhow::bail!("No language found");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{
|
||||
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
|
||||
schema::json_schema_for,
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{
|
||||
ActionLog, AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput,
|
||||
ToolUseStatus,
|
||||
@@ -22,7 +22,7 @@ use language::{
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::Project;
|
||||
use project::{Project, ProjectPath};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
@@ -38,7 +38,7 @@ use workspace::Workspace;
|
||||
|
||||
pub struct EditFileTool;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct EditFileToolInput {
|
||||
/// A one-line, user-friendly markdown description of the edit. This will be
|
||||
/// shown in the UI and also passed to another model to perform the edit.
|
||||
@@ -86,7 +86,7 @@ pub struct EditFileToolInput {
|
||||
pub mode: EditFileMode,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum EditFileMode {
|
||||
Edit,
|
||||
@@ -171,12 +171,9 @@ impl Tool for EditFileTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Path {} not found in project",
|
||||
input.path.display()
|
||||
)))
|
||||
.into();
|
||||
let project_path = match resolve_path(&input, project.clone(), cx) {
|
||||
Ok(path) => path,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let card = window.and_then(|window| {
|
||||
@@ -199,20 +196,6 @@ impl Tool for EditFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let exists = buffer.read_with(cx, |buffer, _| {
|
||||
buffer
|
||||
.file()
|
||||
.as_ref()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
})?;
|
||||
let create_or_overwrite = match input.mode {
|
||||
EditFileMode::Create | EditFileMode::Overwrite => true,
|
||||
_ => false,
|
||||
};
|
||||
if !create_or_overwrite && !exists {
|
||||
return Err(anyhow!("{} not found", input.path.display()));
|
||||
}
|
||||
|
||||
let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let old_text = cx
|
||||
.background_spawn({
|
||||
@@ -221,15 +204,15 @@ impl Tool for EditFileTool {
|
||||
})
|
||||
.await;
|
||||
|
||||
let (output, mut events) = if create_or_overwrite {
|
||||
edit_agent.overwrite(
|
||||
let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) {
|
||||
edit_agent.edit(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
edit_agent.edit(
|
||||
edit_agent.overwrite(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
@@ -296,15 +279,15 @@ impl Tool for EditFileTool {
|
||||
|
||||
let input_path = input.path.display();
|
||||
if diff.is_empty() {
|
||||
if hallucinated_old_text {
|
||||
Err(anyhow!(formatdoc! {"
|
||||
Some edits were produced but none of them could be applied.
|
||||
Read the relevant sections of {input_path} again so that
|
||||
I can perform the requested edits.
|
||||
"}))
|
||||
} else {
|
||||
Ok("No edits were made.".to_string().into())
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!hallucinated_old_text,
|
||||
formatdoc! {"
|
||||
Some edits were produced but none of them could be applied.
|
||||
Read the relevant sections of {input_path} again so that
|
||||
I can perform the requested edits.
|
||||
"}
|
||||
);
|
||||
Ok("No edits were made.".to_string().into())
|
||||
} else {
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text(format!(
|
||||
@@ -349,6 +332,71 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate that the file path is valid, meaning:
|
||||
///
|
||||
/// - For `edit` and `overwrite`, the path must point to an existing file.
|
||||
/// - For `create`, the file must not already exist, but it's parent dir must exist.
|
||||
fn resolve_path(
|
||||
input: &EditFileToolInput,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Result<ProjectPath> {
|
||||
let project = project.read(cx);
|
||||
|
||||
match input.mode {
|
||||
EditFileMode::Edit | EditFileMode::Overwrite => {
|
||||
let path = project
|
||||
.find_project_path(&input.path, cx)
|
||||
.context("Can't edit file: path not found")?;
|
||||
|
||||
let entry = project
|
||||
.entry_for_path(&path, cx)
|
||||
.context("Can't edit file: path not found")?;
|
||||
|
||||
anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory");
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
EditFileMode::Create => {
|
||||
if let Some(path) = project.find_project_path(&input.path, cx) {
|
||||
anyhow::ensure!(
|
||||
project.entry_for_path(&path, cx).is_none(),
|
||||
"Can't create file: file already exists"
|
||||
);
|
||||
}
|
||||
|
||||
let parent_path = input
|
||||
.path
|
||||
.parent()
|
||||
.context("Can't create file: incorrect path")?;
|
||||
|
||||
let parent_project_path = project.find_project_path(&parent_path, cx);
|
||||
|
||||
let parent_entry = parent_project_path
|
||||
.as_ref()
|
||||
.and_then(|path| project.entry_for_path(&path, cx))
|
||||
.context("Can't create file: parent directory doesn't exist")?;
|
||||
|
||||
anyhow::ensure!(
|
||||
parent_entry.is_dir(),
|
||||
"Can't create file: parent is not a directory"
|
||||
);
|
||||
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.context("Can't create file: invalid filename")?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
..parent
|
||||
});
|
||||
|
||||
new_file_path.context("Can't create file")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EditFileToolCard {
|
||||
path: PathBuf,
|
||||
editor: Entity<Editor>,
|
||||
@@ -400,7 +448,7 @@ impl EditFileToolCard {
|
||||
diff_task: None,
|
||||
preview_expanded: true,
|
||||
error_expanded: None,
|
||||
full_height_expanded: false,
|
||||
full_height_expanded: true,
|
||||
total_lines: None,
|
||||
}
|
||||
}
|
||||
@@ -869,6 +917,7 @@ async fn build_buffer_diff(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use client::TelemetrySettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -908,10 +957,102 @@ mod tests {
|
||||
.await;
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
"root/nonexistent_file.txt not found"
|
||||
"Can't edit file: path not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Create;
|
||||
|
||||
let result = test_resolve_path(mode, "root/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "dir/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "dir/new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: file already exists"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: parent directory doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Edit;
|
||||
|
||||
let path_with_root = "root/dir/subdir/existing.txt";
|
||||
let path_without_root = "dir/subdir/existing.txt";
|
||||
let result = test_resolve_path(mode, path_with_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, path_without_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path not found"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path is a directory"
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_resolve_path(
|
||||
mode: &EditFileMode,
|
||||
path: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) -> anyhow::Result<ProjectPath> {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"dir": {
|
||||
"subdir": {
|
||||
"existing.txt": "hello"
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let input = EditFileToolInput {
|
||||
display_description: "Some edit".into(),
|
||||
path: path.into(),
|
||||
mode: mode.clone(),
|
||||
};
|
||||
|
||||
let result = cx.update(|cx| resolve_path(&input, project, cx));
|
||||
result
|
||||
}
|
||||
|
||||
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "/"); // Naive Windows paths normalization
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_streaming_ui_text_with_path() {
|
||||
let input = json!({
|
||||
@@ -984,6 +1125,7 @@ mod tests {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@ impl Tool for GrepTool {
|
||||
let input = match serde_json::from_value::<GrepToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("Failed to parse input: {}", error))).into();
|
||||
return Task::ready(Err(anyhow!("Failed to parse input: {error}"))).into();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -122,7 +122,7 @@ impl Tool for GrepTool {
|
||||
) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("invalid include glob pattern: {}", error))).into();
|
||||
return Task::ready(Err(anyhow!("invalid include glob pattern: {error}"))).into();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
@@ -117,17 +117,10 @@ impl Tool for MovePathTool {
|
||||
});
|
||||
|
||||
cx.background_spawn(async move {
|
||||
match rename_task.await {
|
||||
Ok(_) => {
|
||||
Ok(format!("Moved {} to {}", input.source_path, input.destination_path).into())
|
||||
}
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to move {} to {}: {}",
|
||||
input.source_path,
|
||||
input.destination_path,
|
||||
err
|
||||
)),
|
||||
}
|
||||
let _ = rename_task.await.with_context(|| {
|
||||
format!("Moving {} to {}", input.source_path, input.destination_path)
|
||||
})?;
|
||||
Ok(format!("Moved {} to {}", input.source_path, input.destination_path).into())
|
||||
})
|
||||
.into()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use assistant_tool::{ToolResultContent, outline};
|
||||
use gpui::{AnyWindowHandle, App, Entity, Task};
|
||||
@@ -129,7 +129,7 @@ impl Tool for ReadFileTool {
|
||||
let language_model_image = cx
|
||||
.update(|cx| LanguageModelImage::from_image(image, cx))?
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Failed to process image"))?;
|
||||
.context("processing image")?;
|
||||
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Image(language_model_image),
|
||||
@@ -152,7 +152,7 @@ impl Tool for ReadFileTool {
|
||||
.as_ref()
|
||||
.map_or(true, |file| !file.disk_state().exists())
|
||||
})? {
|
||||
return Err(anyhow!("{} not found", file_path));
|
||||
anyhow::bail!("{file_path} not found");
|
||||
}
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
You are an expert engineer and your task is to write a new file from scratch.
|
||||
|
||||
<file_to_edit>
|
||||
You MUST respond directly with the file's content, without explanations, additional text or triple backticks.
|
||||
The text you output will be saved verbatim as the content of the file.
|
||||
Tool calls have been disabled. You MUST start your response directly with the file's new content.
|
||||
|
||||
<file_path>
|
||||
{{path}}
|
||||
</file_to_edit>
|
||||
</file_path>
|
||||
|
||||
<edit_description>
|
||||
{{edit_description}}
|
||||
</edit_description>
|
||||
|
||||
You MUST respond directly with the file's content, without explanations, additional text or triple backticks.
|
||||
The text you output will be saved verbatim as the content of the file.
|
||||
|
||||
@@ -27,20 +27,57 @@ NEW TEXT 3 HERE
|
||||
</edits>
|
||||
```
|
||||
|
||||
Rules for editing:
|
||||
# File Editing Instructions
|
||||
|
||||
- Use `<old_text>` and `<new_text>` tags to replace content
|
||||
- `<old_text>` must exactly match existing file content, including indentation
|
||||
- `<old_text>` must come from the actual file, not an outline
|
||||
- `<old_text>` cannot be empty
|
||||
- Be minimal with replacements:
|
||||
- For unique lines, include only those lines
|
||||
- For non-unique lines, include enough context to identify them
|
||||
- Do not escape quotes, newlines, or other characters within tags
|
||||
- For multiple occurrences, repeat the same tag pair for each instance
|
||||
- Edits are sequential - each assumes previous edits are already applied
|
||||
- Only edit the specified file
|
||||
- Always close all tags properly
|
||||
|
||||
|
||||
{{!-- This example is important for Gemini 2.5 --}}
|
||||
<example>
|
||||
<edits>
|
||||
|
||||
<old_text>
|
||||
struct User {
|
||||
name: String,
|
||||
email: String,
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
struct User {
|
||||
name: String,
|
||||
email: String,
|
||||
active: bool,
|
||||
}
|
||||
</new_text>
|
||||
|
||||
<old_text>
|
||||
let user = User {
|
||||
name: String::from("John"),
|
||||
email: String::from("john@example.com"),
|
||||
};
|
||||
</old_text>
|
||||
<new_text>
|
||||
let user = User {
|
||||
name: String::from("John"),
|
||||
email: String::from("john@example.com"),
|
||||
active: true,
|
||||
};
|
||||
</new_text>
|
||||
|
||||
</edits>
|
||||
</example>
|
||||
|
||||
- `old_text` represents lines in the input file that will be replaced with `new_text`.
|
||||
- `old_text` MUST exactly match the existing file content, character for character, including indentation.
|
||||
- `old_text` MUST NEVER come from the outline, but from actual lines in the file.
|
||||
- Strive to be minimal in the lines you replace in `old_text`:
|
||||
- If the lines you want to replace are unique, you MUST include just those in the `old_text`.
|
||||
- If the lines you want to replace are NOT unique, you MUST include enough context around them in `old_text` to distinguish them from other lines.
|
||||
- If you want to replace many occurrences of the same text, repeat the same `old_text`/`new_text` pair multiple times and I will apply them sequentially, one occurrence at a time.
|
||||
- When reporting multiple edits, each edit assumes the previous one has already been applied! Therefore, you must ensure `old_text` doesn't reference text that has already been modified by a previous edit.
|
||||
- Don't explain the edits, just report them.
|
||||
- Only edit the file specified in `<file_to_edit>` and NEVER include edits to other files!
|
||||
- If you open an <old_text> tag, you MUST close it using </old_text>
|
||||
- If you open an <new_text> tag, you MUST close it using </new_text>
|
||||
|
||||
<file_to_edit>
|
||||
{{path}}
|
||||
|
||||
@@ -382,13 +382,11 @@ fn working_dir(
|
||||
|
||||
match worktrees.next() {
|
||||
Some(worktree) => {
|
||||
if worktrees.next().is_none() {
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.",
|
||||
))
|
||||
}
|
||||
anyhow::ensure!(
|
||||
worktrees.next().is_none(),
|
||||
"'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.",
|
||||
);
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
@@ -409,9 +407,7 @@ fn working_dir(
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"`cd` directory {cd:?} was not in any of the project's worktrees."
|
||||
))
|
||||
anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{io::Cursor, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AssetSource, Global};
|
||||
use rodio::{
|
||||
@@ -44,8 +44,8 @@ impl SoundRegistry {
|
||||
let bytes = self
|
||||
.assets
|
||||
.load(&path)?
|
||||
.map(Ok)
|
||||
.unwrap_or_else(|| Err(anyhow::anyhow!("No such asset available")))?
|
||||
.map(anyhow::Ok)
|
||||
.with_context(|| format!("No asset available for path {path}"))??
|
||||
.into_owned();
|
||||
let cursor = Cursor::new(bytes);
|
||||
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{Client, TelemetrySettings};
|
||||
use db::RELEASE_CHANNEL;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
@@ -39,13 +39,22 @@ struct UpdateRequestBody {
|
||||
destination: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum VersionCheckType {
|
||||
Sha(String),
|
||||
Semantic(SemanticVersion),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub enum AutoUpdateStatus {
|
||||
Idle,
|
||||
Checking,
|
||||
Downloading,
|
||||
Installing,
|
||||
Updated { binary_path: PathBuf },
|
||||
Updated {
|
||||
binary_path: PathBuf,
|
||||
version: VersionCheckType,
|
||||
},
|
||||
Errored,
|
||||
}
|
||||
|
||||
@@ -62,7 +71,7 @@ pub struct AutoUpdater {
|
||||
pending_poll: Option<Task<Option<()>>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[derive(Deserialize, Clone, Debug)]
|
||||
pub struct JsonRelease {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
@@ -307,7 +316,7 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
pub fn poll(&mut self, cx: &mut Context<Self>) {
|
||||
if self.pending_poll.is_some() || self.status.is_updated() {
|
||||
if self.pending_poll.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -358,7 +367,7 @@ impl AutoUpdater {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
@@ -402,7 +411,7 @@ impl AutoUpdater {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
@@ -456,12 +465,11 @@ impl AutoUpdater {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
);
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
@@ -484,35 +492,43 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
async fn update(this: Entity<Self>, mut cx: AsyncApp) -> Result<()> {
|
||||
let (client, current_version, release_channel) = this.update(&mut cx, |this, cx| {
|
||||
let (client, installed_version, previous_status, release_channel) =
|
||||
this.update(&mut cx, |this, cx| {
|
||||
(
|
||||
this.http_client.clone(),
|
||||
this.current_version,
|
||||
this.status.clone(),
|
||||
ReleaseChannel::try_global(cx),
|
||||
)
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Checking;
|
||||
cx.notify();
|
||||
(
|
||||
this.http_client.clone(),
|
||||
this.current_version,
|
||||
ReleaseChannel::try_global(cx),
|
||||
)
|
||||
})?;
|
||||
|
||||
let release =
|
||||
let fetched_release_data =
|
||||
Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
|
||||
let fetched_version = fetched_release_data.clone().version;
|
||||
let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.0));
|
||||
let newer_version = Self::check_for_newer_version(
|
||||
*RELEASE_CHANNEL,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
previous_status.clone(),
|
||||
fetched_version,
|
||||
)?;
|
||||
|
||||
let should_download = match *RELEASE_CHANNEL {
|
||||
ReleaseChannel::Nightly => cx
|
||||
.update(|cx| AppCommitSha::try_global(cx).map(|sha| release.version != sha.0))
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or(true),
|
||||
_ => release.version.parse::<SemanticVersion>()? > current_version,
|
||||
};
|
||||
|
||||
if !should_download {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Idle;
|
||||
let Some(newer_version) = newer_version else {
|
||||
return this.update(&mut cx, |this, cx| {
|
||||
let status = match previous_status {
|
||||
AutoUpdateStatus::Updated { .. } => previous_status,
|
||||
_ => AutoUpdateStatus::Idle,
|
||||
};
|
||||
this.status = status;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Ok(());
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Downloading;
|
||||
@@ -520,11 +536,76 @@ impl AutoUpdater {
|
||||
})?;
|
||||
|
||||
let installer_dir = InstallerDir::new().await?;
|
||||
let target_path = Self::target_path(&installer_dir).await?;
|
||||
download_release(&target_path, fetched_release_data, client, &cx).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Installing;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let binary_path = Self::binary_path(installer_dir, target_path, &cx).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.set_should_show_update_notification(true, cx)
|
||||
.detach_and_log_err(cx);
|
||||
this.status = AutoUpdateStatus::Updated {
|
||||
binary_path,
|
||||
version: newer_version,
|
||||
};
|
||||
cx.notify();
|
||||
})
|
||||
}
|
||||
|
||||
fn check_for_newer_version(
|
||||
release_channel: ReleaseChannel,
|
||||
app_commit_sha: Result<Option<String>>,
|
||||
installed_version: SemanticVersion,
|
||||
status: AutoUpdateStatus,
|
||||
fetched_version: String,
|
||||
) -> Result<Option<VersionCheckType>> {
|
||||
let parsed_fetched_version = fetched_version.parse::<SemanticVersion>();
|
||||
|
||||
if let AutoUpdateStatus::Updated { version, .. } = status {
|
||||
match version {
|
||||
VersionCheckType::Sha(cached_version) => {
|
||||
let should_download = fetched_version != cached_version;
|
||||
let newer_version =
|
||||
should_download.then(|| VersionCheckType::Sha(fetched_version));
|
||||
return Ok(newer_version);
|
||||
}
|
||||
VersionCheckType::Semantic(cached_version) => {
|
||||
return Self::check_for_newer_version_non_nightly(
|
||||
cached_version,
|
||||
parsed_fetched_version?,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match release_channel {
|
||||
ReleaseChannel::Nightly => {
|
||||
let should_download = app_commit_sha
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|sha| fetched_version != sha)
|
||||
.unwrap_or(true);
|
||||
let newer_version = should_download.then(|| VersionCheckType::Sha(fetched_version));
|
||||
Ok(newer_version)
|
||||
}
|
||||
_ => Self::check_for_newer_version_non_nightly(
|
||||
installed_version,
|
||||
parsed_fetched_version?,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
async fn target_path(installer_dir: &InstallerDir) -> Result<PathBuf> {
|
||||
let filename = match OS {
|
||||
"macos" => Ok("Zed.dmg"),
|
||||
"macos" => anyhow::Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
"windows" => Ok("ZedUpdateInstaller.exe"),
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}?;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -533,29 +614,29 @@ impl AutoUpdater {
|
||||
"Aborting. Could not find rsync which is required for auto-updates."
|
||||
);
|
||||
|
||||
let downloaded_asset = installer_dir.path().join(filename);
|
||||
download_release(&downloaded_asset, release, client, &cx).await?;
|
||||
Ok(installer_dir.path().join(filename))
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Installing;
|
||||
cx.notify();
|
||||
})?;
|
||||
async fn binary_path(
|
||||
installer_dir: InstallerDir,
|
||||
target_path: PathBuf,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<PathBuf> {
|
||||
match OS {
|
||||
"macos" => install_release_macos(&installer_dir, target_path, cx).await,
|
||||
"linux" => install_release_linux(&installer_dir, target_path, cx).await,
|
||||
"windows" => install_release_windows(target_path).await,
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}
|
||||
}
|
||||
|
||||
let binary_path = match OS {
|
||||
"macos" => install_release_macos(&installer_dir, downloaded_asset, &cx).await,
|
||||
"linux" => install_release_linux(&installer_dir, downloaded_asset, &cx).await,
|
||||
"windows" => install_release_windows(downloaded_asset).await,
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
}?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.set_should_show_update_notification(true, cx)
|
||||
.detach_and_log_err(cx);
|
||||
this.status = AutoUpdateStatus::Updated { binary_path };
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
fn check_for_newer_version_non_nightly(
|
||||
installed_version: SemanticVersion,
|
||||
fetched_version: SemanticVersion,
|
||||
) -> Result<Option<VersionCheckType>> {
|
||||
let should_download = fetched_version > installed_version;
|
||||
let newer_version = should_download.then(|| VersionCheckType::Semantic(fetched_version));
|
||||
Ok(newer_version)
|
||||
}
|
||||
|
||||
pub fn set_should_show_update_notification(
|
||||
@@ -601,12 +682,11 @@ async fn download_remote_server_binary(
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"failed to download remote server release: {:?}",
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to download remote server release: {:?}",
|
||||
response.status()
|
||||
);
|
||||
smol::io::copy(response.body_mut(), &mut temp_file).await?;
|
||||
smol::fs::rename(&temp, &target_path).await?;
|
||||
|
||||
@@ -753,7 +833,7 @@ async fn install_release_macos(
|
||||
let running_app_path = cx.update(|cx| cx.app_path())??;
|
||||
let running_app_filename = running_app_path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("invalid running app path"))?;
|
||||
.with_context(|| format!("invalid running app path {running_app_path:?}"))?;
|
||||
|
||||
let mount_path = temp_dir.path().join("Zed");
|
||||
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
|
||||
@@ -831,3 +911,255 @@ pub fn check_pending_installation() -> bool {
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_stable_does_not_update_when_fetched_version_is_not_higher() {
|
||||
let release_channel = ReleaseChannel::Stable;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Idle;
|
||||
let fetched_version = SemanticVersion::new(1, 0, 0);
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_version.to_string(),
|
||||
);
|
||||
|
||||
assert_eq!(newer_version.unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stable_does_update_when_fetched_version_is_higher() {
|
||||
let release_channel = ReleaseChannel::Stable;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Idle;
|
||||
let fetched_version = SemanticVersion::new(1, 0, 1);
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_version.to_string(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
newer_version.unwrap(),
|
||||
Some(VersionCheckType::Semantic(fetched_version))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stable_does_not_update_when_fetched_version_is_not_higher_than_cached() {
|
||||
let release_channel = ReleaseChannel::Stable;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Updated {
|
||||
binary_path: PathBuf::new(),
|
||||
version: VersionCheckType::Semantic(SemanticVersion::new(1, 0, 1)),
|
||||
};
|
||||
let fetched_version = SemanticVersion::new(1, 0, 1);
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_version.to_string(),
|
||||
);
|
||||
|
||||
assert_eq!(newer_version.unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stable_does_update_when_fetched_version_is_higher_than_cached() {
|
||||
let release_channel = ReleaseChannel::Stable;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Updated {
|
||||
binary_path: PathBuf::new(),
|
||||
version: VersionCheckType::Semantic(SemanticVersion::new(1, 0, 1)),
|
||||
};
|
||||
let fetched_version = SemanticVersion::new(1, 0, 2);
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_version.to_string(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
newer_version.unwrap(),
|
||||
Some(VersionCheckType::Semantic(fetched_version))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_not_update_when_fetched_sha_is_same() {
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Idle;
|
||||
let fetched_sha = "a".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha,
|
||||
);
|
||||
|
||||
assert_eq!(newer_version.unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_update_when_fetched_sha_is_not_same() {
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Idle;
|
||||
let fetched_sha = "b".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha.clone(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
newer_version.unwrap(),
|
||||
Some(VersionCheckType::Sha(fetched_sha))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_not_update_when_fetched_sha_is_same_as_cached() {
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Updated {
|
||||
binary_path: PathBuf::new(),
|
||||
version: VersionCheckType::Sha("b".to_string()),
|
||||
};
|
||||
let fetched_sha = "b".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha,
|
||||
);
|
||||
|
||||
assert_eq!(newer_version.unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_update_when_fetched_sha_is_not_same_as_cached() {
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(Some("a".to_string()));
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Updated {
|
||||
binary_path: PathBuf::new(),
|
||||
version: VersionCheckType::Sha("b".to_string()),
|
||||
};
|
||||
let fetched_sha = "c".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha.clone(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
newer_version.unwrap(),
|
||||
Some(VersionCheckType::Sha(fetched_sha))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_update_when_installed_versions_sha_cannot_be_retrieved() {
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(None);
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Idle;
|
||||
let fetched_sha = "a".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha.clone(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
newer_version.unwrap(),
|
||||
Some(VersionCheckType::Sha(fetched_sha))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_not_update_when_cached_update_is_same_as_fetched_and_installed_versions_sha_cannot_be_retrieved()
|
||||
{
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(None);
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Updated {
|
||||
binary_path: PathBuf::new(),
|
||||
version: VersionCheckType::Sha("b".to_string()),
|
||||
};
|
||||
let fetched_sha = "b".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha,
|
||||
);
|
||||
|
||||
assert_eq!(newer_version.unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nightly_does_update_when_cached_update_is_not_same_as_fetched_and_installed_versions_sha_cannot_be_retrieved()
|
||||
{
|
||||
let release_channel = ReleaseChannel::Nightly;
|
||||
let app_commit_sha = Ok(None);
|
||||
let installed_version = SemanticVersion::new(1, 0, 0);
|
||||
let status = AutoUpdateStatus::Updated {
|
||||
binary_path: PathBuf::new(),
|
||||
version: VersionCheckType::Sha("b".to_string()),
|
||||
};
|
||||
let fetched_sha = "c".to_string();
|
||||
|
||||
let newer_version = AutoUpdater::check_for_newer_version(
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
status,
|
||||
fetched_sha.clone(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
newer_version.unwrap(),
|
||||
Some(VersionCheckType::Sha(fetched_sha))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ mod windows_impl {
|
||||
|
||||
use super::dialog::create_dialog_window;
|
||||
use super::updater::perform_update;
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context as _, Result};
|
||||
use windows::{
|
||||
Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context as _, Result};
|
||||
use windows::Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
System::Threading::CREATE_NEW_PROCESS_GROUP,
|
||||
@@ -124,9 +124,7 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>) -> Result<()>
|
||||
for job in JOBS.iter() {
|
||||
let start = Instant::now();
|
||||
loop {
|
||||
if start.elapsed().as_secs() > 2 {
|
||||
return Err(anyhow::anyhow!("Timed out"));
|
||||
}
|
||||
anyhow::ensure!(start.elapsed().as_secs() <= 2, "Timed out");
|
||||
match (*job)(app_dir) {
|
||||
Ok(_) => {
|
||||
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
|
||||
|
||||
@@ -3,7 +3,7 @@ mod models;
|
||||
use std::collections::HashMap;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{Error, Result, anyhow};
|
||||
use anyhow::{Context as _, Error, Result, anyhow};
|
||||
use aws_sdk_bedrockruntime as bedrock;
|
||||
pub use aws_sdk_bedrockruntime as bedrock_client;
|
||||
pub use aws_sdk_bedrockruntime::types::{
|
||||
@@ -97,7 +97,7 @@ pub async fn stream_completion(
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|err| anyhow!("failed to spawn task: {err:?}"))?
|
||||
.context("spawning a task")?
|
||||
}
|
||||
|
||||
pub fn aws_document_to_value(document: &Document) -> Value {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use anyhow::anyhow;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::EnumIter;
|
||||
|
||||
@@ -107,7 +106,7 @@ impl Model {
|
||||
} else if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
Ok(Self::Claude3_7SonnetThinking)
|
||||
} else {
|
||||
Err(anyhow!("invalid model id"))
|
||||
anyhow::bail!("invalid model id {id}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -294,7 +293,7 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cross_region_inference_id(&self, region: &str) -> Result<String, anyhow::Error> {
|
||||
pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result<String> {
|
||||
let region_group = if region.starts_with("us-gov-") {
|
||||
"us-gov"
|
||||
} else if region.starts_with("us-") {
|
||||
@@ -307,8 +306,7 @@ impl Model {
|
||||
// Canada and South America regions - default to US profiles
|
||||
"us"
|
||||
} else {
|
||||
// Unknown region
|
||||
return Err(anyhow!("Unsupported Region"));
|
||||
anyhow::bail!("Unsupported Region {region}");
|
||||
};
|
||||
|
||||
let model_id = self.id();
|
||||
|
||||
@@ -2,7 +2,7 @@ pub mod participant;
|
||||
pub mod room;
|
||||
|
||||
use crate::call_settings::CallSettings;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use audio::Audio;
|
||||
use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE, proto};
|
||||
use collections::HashSet;
|
||||
@@ -187,7 +187,7 @@ impl ActiveCall {
|
||||
|
||||
let invite = if let Some(room) = room {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
|
||||
let room = room.await.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
let initial_project_id = if let Some(initial_project) = initial_project {
|
||||
Some(
|
||||
@@ -236,7 +236,7 @@ impl ActiveCall {
|
||||
.shared();
|
||||
self.pending_room_creation = Some(room.clone());
|
||||
cx.background_spawn(async move {
|
||||
room.await.map_err(|err| anyhow!("{:?}", err))?;
|
||||
room.await.map_err(|err| anyhow!("{err:?}"))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
};
|
||||
@@ -326,7 +326,7 @@ impl ActiveCall {
|
||||
.0
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
.context("no incoming call")?;
|
||||
telemetry::event!("Incoming Call Declined", room_id = call.room_id);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
@@ -399,12 +399,9 @@ impl ActiveCall {
|
||||
project: Entity<Project>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<()> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("Project Unshared", cx);
|
||||
room.update(cx, |room, cx| room.unshare_project(project, cx))
|
||||
} else {
|
||||
Err(anyhow!("no active call"))
|
||||
}
|
||||
let (room, _) = self.room.as_ref().context("no active call")?;
|
||||
self.report_call_event("Project Unshared", cx);
|
||||
room.update(cx, |room, cx| room.unshare_project(project, cx))
|
||||
}
|
||||
|
||||
pub fn location(&self) -> Option<&WeakEntity<Project>> {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{ParticipantIndex, User, proto};
|
||||
use collections::HashMap;
|
||||
use gpui::WeakEntity;
|
||||
@@ -18,17 +18,17 @@ pub enum ParticipantLocation {
|
||||
|
||||
impl ParticipantLocation {
|
||||
pub fn from_proto(location: Option<proto::ParticipantLocation>) -> Result<Self> {
|
||||
match location.and_then(|l| l.variant) {
|
||||
Some(proto::participant_location::Variant::SharedProject(project)) => {
|
||||
match location
|
||||
.and_then(|l| l.variant)
|
||||
.context("participant location was not provided")?
|
||||
{
|
||||
proto::participant_location::Variant::SharedProject(project) => {
|
||||
Ok(Self::SharedProject {
|
||||
project_id: project.id,
|
||||
})
|
||||
}
|
||||
Some(proto::participant_location::Variant::UnsharedProject(_)) => {
|
||||
Ok(Self::UnsharedProject)
|
||||
}
|
||||
Some(proto::participant_location::Variant::External(_)) => Ok(Self::External),
|
||||
None => Err(anyhow!("participant location was not provided")),
|
||||
proto::participant_location::Variant::UnsharedProject(_) => Ok(Self::UnsharedProject),
|
||||
proto::participant_location::Variant::External(_) => Ok(Self::External),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::{
|
||||
call_settings::CallSettings,
|
||||
participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use audio::{Audio, Sound};
|
||||
use client::{
|
||||
ChannelId, Client, ParticipantIndex, TypedEnvelope, User, UserStore,
|
||||
@@ -165,7 +165,7 @@ impl Room {
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(async move |cx| {
|
||||
let response = client.request(proto::CreateRoom {}).await?;
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room_proto = response.room.context("invalid room")?;
|
||||
let room = cx.new(|cx| {
|
||||
let mut room = Self::new(
|
||||
room_proto.id,
|
||||
@@ -270,7 +270,7 @@ impl Room {
|
||||
user_store: Entity<UserStore>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<Entity<Self>> {
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room_proto = response.room.context("invalid room")?;
|
||||
let room = cx.new(|cx| {
|
||||
Self::new(
|
||||
room_proto.id,
|
||||
@@ -360,7 +360,7 @@ impl Room {
|
||||
log::info!("detected client disconnection");
|
||||
|
||||
this.upgrade()
|
||||
.ok_or_else(|| anyhow!("room was dropped"))?
|
||||
.context("room was dropped")?
|
||||
.update(cx, |this, cx| {
|
||||
this.status = RoomStatus::Rejoining;
|
||||
cx.notify();
|
||||
@@ -428,9 +428,7 @@ impl Room {
|
||||
log::info!("reconnection failed, leaving room");
|
||||
this.update(cx, |this, cx| this.leave(cx))?.await?;
|
||||
}
|
||||
Err(anyhow!(
|
||||
"can't reconnect to room: client failed to re-establish connection"
|
||||
))
|
||||
anyhow::bail!("can't reconnect to room: client failed to re-establish connection");
|
||||
}
|
||||
|
||||
fn rejoin(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
@@ -494,7 +492,7 @@ impl Room {
|
||||
let response = response.await?;
|
||||
let message_id = response.message_id;
|
||||
let response = response.payload;
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room_proto = response.room.context("invalid room")?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = RoomStatus::Online;
|
||||
this.apply_room_update(room_proto, cx)?;
|
||||
@@ -645,10 +643,7 @@ impl Room {
|
||||
envelope: TypedEnvelope<proto::RoomUpdated>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let room = envelope
|
||||
.payload
|
||||
.room
|
||||
.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room = envelope.payload.room.context("invalid room")?;
|
||||
this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
|
||||
}
|
||||
|
||||
@@ -937,12 +932,15 @@ impl Room {
|
||||
} => {
|
||||
let user_id = participant.identity().0.parse()?;
|
||||
let track_id = track.sid();
|
||||
let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"{:?} subscribed to track by unknown participant {user_id}",
|
||||
self.client.user_id()
|
||||
)
|
||||
})?;
|
||||
let participant =
|
||||
self.remote_participants
|
||||
.get_mut(&user_id)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"{:?} subscribed to track by unknown participant {user_id}",
|
||||
self.client.user_id()
|
||||
)
|
||||
})?;
|
||||
if self.live_kit.as_ref().map_or(true, |kit| kit.deafened) {
|
||||
if publication.is_audio() {
|
||||
publication.set_enabled(false, cx);
|
||||
@@ -972,12 +970,15 @@ impl Room {
|
||||
track, participant, ..
|
||||
} => {
|
||||
let user_id = participant.identity().0.parse()?;
|
||||
let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"{:?}, unsubscribed from track by unknown participant {user_id}",
|
||||
self.client.user_id()
|
||||
)
|
||||
})?;
|
||||
let participant =
|
||||
self.remote_participants
|
||||
.get_mut(&user_id)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"{:?}, unsubscribed from track by unknown participant {user_id}",
|
||||
self.client.user_id()
|
||||
)
|
||||
})?;
|
||||
match track {
|
||||
livekit_client::RemoteTrack::Audio(track) => {
|
||||
participant.audio_tracks.remove(&track.sid());
|
||||
@@ -1324,7 +1325,7 @@ impl Room {
|
||||
let live_kit = this
|
||||
.live_kit
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?;
|
||||
.context("live-kit was not initialized")?;
|
||||
|
||||
let canceled = if let LocalTrack::Pending {
|
||||
publish_id: cur_publish_id,
|
||||
@@ -1389,7 +1390,7 @@ impl Room {
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let sources = sources.await??;
|
||||
let source = sources.first().ok_or_else(|| anyhow!("no display found"))?;
|
||||
let source = sources.first().context("no display found")?;
|
||||
|
||||
let publication = participant.publish_screenshare_track(&**source, cx).await;
|
||||
|
||||
@@ -1397,7 +1398,7 @@ impl Room {
|
||||
let live_kit = this
|
||||
.live_kit
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?;
|
||||
.context("live-kit was not initialized")?;
|
||||
|
||||
let canceled = if let LocalTrack::Pending {
|
||||
publish_id: cur_publish_id,
|
||||
@@ -1485,16 +1486,14 @@ impl Room {
|
||||
}
|
||||
|
||||
pub fn unshare_screen(&mut self, cx: &mut Context<Self>) -> Result<()> {
|
||||
if self.status.is_offline() {
|
||||
return Err(anyhow!("room is offline"));
|
||||
}
|
||||
anyhow::ensure!(!self.status.is_offline(), "room is offline");
|
||||
|
||||
let live_kit = self
|
||||
.live_kit
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?;
|
||||
.context("live-kit was not initialized")?;
|
||||
match mem::take(&mut live_kit.screen_track) {
|
||||
LocalTrack::None => Err(anyhow!("screen was not shared")),
|
||||
LocalTrack::None => anyhow::bail!("screen was not shared"),
|
||||
LocalTrack::Pending { .. } => {
|
||||
cx.notify();
|
||||
Ok(())
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{Channel, ChannelStore};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{
|
||||
ChannelId, Client, Subscription, TypedEnvelope, UserId, proto,
|
||||
user::{User, UserStore},
|
||||
@@ -170,15 +170,16 @@ impl ChannelChat {
|
||||
message: MessageParams,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<Task<Result<u64>>> {
|
||||
if message.text.trim().is_empty() {
|
||||
Err(anyhow!("message body can't be empty"))?;
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!message.text.trim().is_empty(),
|
||||
"message body can't be empty"
|
||||
);
|
||||
|
||||
let current_user = self
|
||||
.user_store
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.ok_or_else(|| anyhow!("current_user is not present"))?;
|
||||
.context("current_user is not present")?;
|
||||
|
||||
let channel_id = self.channel_id;
|
||||
let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
|
||||
@@ -215,7 +216,7 @@ impl ChannelChat {
|
||||
});
|
||||
let response = request.await?;
|
||||
drop(outgoing_message_guard);
|
||||
let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
|
||||
let response = response.message.context("invalid message")?;
|
||||
let id = response.id;
|
||||
let message = ChannelMessage::from_proto(response, &user_store, cx).await?;
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -470,7 +471,7 @@ impl ChannelChat {
|
||||
});
|
||||
let response = request.await?;
|
||||
let message = ChannelMessage::from_proto(
|
||||
response.message.ok_or_else(|| anyhow!("invalid message"))?,
|
||||
response.message.context("invalid message")?,
|
||||
&user_store,
|
||||
cx,
|
||||
)
|
||||
@@ -531,10 +532,7 @@ impl ChannelChat {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
|
||||
let message = message
|
||||
.payload
|
||||
.message
|
||||
.ok_or_else(|| anyhow!("empty message"))?;
|
||||
let message = message.payload.message.context("empty message")?;
|
||||
let message_id = message.id;
|
||||
|
||||
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
|
||||
@@ -566,10 +564,7 @@ impl ChannelChat {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
|
||||
let message = message
|
||||
.payload
|
||||
.message
|
||||
.ok_or_else(|| anyhow!("empty message"))?;
|
||||
let message = message.payload.message.context("empty message")?;
|
||||
|
||||
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
|
||||
|
||||
@@ -753,10 +748,7 @@ impl ChannelMessage {
|
||||
.collect(),
|
||||
timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?,
|
||||
sender,
|
||||
nonce: message
|
||||
.nonce
|
||||
.ok_or_else(|| anyhow!("nonce is required"))?
|
||||
.into(),
|
||||
nonce: message.nonce.context("nonce is required")?.into(),
|
||||
reply_to_message_id: message.reply_to_message_id,
|
||||
edited_at,
|
||||
})
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
mod channel_index;
|
||||
|
||||
use crate::{ChannelMessage, channel_buffer::ChannelBuffer, channel_chat::ChannelChat};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
|
||||
use collections::{HashMap, HashSet, hash_map};
|
||||
@@ -332,9 +332,7 @@ impl ChannelStore {
|
||||
cx.spawn(async move |this, cx| {
|
||||
if let Some(request) = request {
|
||||
let response = request.await?;
|
||||
let this = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("channel store dropped"))?;
|
||||
let this = this.upgrade().context("channel store dropped")?;
|
||||
let user_store = this.update(cx, |this, _| this.user_store.clone())?;
|
||||
ChannelMessage::from_proto_vec(response.messages, &user_store, cx).await
|
||||
} else {
|
||||
@@ -482,7 +480,7 @@ impl ChannelStore {
|
||||
.spawn(async move |this, cx| {
|
||||
let channel = this.update(cx, |this, _| {
|
||||
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
|
||||
Arc::new(anyhow!("no channel for id: {}", channel_id))
|
||||
Arc::new(anyhow!("no channel for id: {channel_id}"))
|
||||
})
|
||||
})??;
|
||||
|
||||
@@ -514,7 +512,7 @@ impl ChannelStore {
|
||||
}
|
||||
}
|
||||
};
|
||||
cx.background_spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
|
||||
cx.background_spawn(async move { task.await.map_err(|error| anyhow!("{error}")) })
|
||||
}
|
||||
|
||||
pub fn is_channel_admin(&self, channel_id: ChannelId) -> bool {
|
||||
@@ -578,9 +576,7 @@ impl ChannelStore {
|
||||
})
|
||||
.await?;
|
||||
|
||||
let channel = response
|
||||
.channel
|
||||
.ok_or_else(|| anyhow!("missing channel in response"))?;
|
||||
let channel = response.channel.context("missing channel in response")?;
|
||||
let channel_id = ChannelId(channel.id);
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -752,7 +748,7 @@ impl ChannelStore {
|
||||
})
|
||||
.await?
|
||||
.channel
|
||||
.ok_or_else(|| anyhow!("missing channel in response"))?;
|
||||
.context("missing channel in response")?;
|
||||
this.update(cx, |this, cx| {
|
||||
let task = this.update_channels(
|
||||
proto::UpdateChannels {
|
||||
|
||||
@@ -169,7 +169,7 @@ fn main() -> Result<()> {
|
||||
"To retrieve the system specs on the command line, run the following command:",
|
||||
&format!("{} --system-specs", path.display()),
|
||||
];
|
||||
return Err(anyhow::anyhow!(msg.join("\n")));
|
||||
anyhow::bail!(msg.join("\n"));
|
||||
}
|
||||
|
||||
#[cfg(all(
|
||||
@@ -255,11 +255,10 @@ fn main() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(_) = args.dev_server_token {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
|
||||
))?;
|
||||
}
|
||||
anyhow::ensure!(
|
||||
args.dev_server_token.is_none(),
|
||||
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
|
||||
);
|
||||
|
||||
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
|
||||
let exit_status = exit_status.clone();
|
||||
@@ -400,7 +399,7 @@ mod linux {
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
|
||||
use fork::Fork;
|
||||
|
||||
@@ -417,9 +416,7 @@ mod linux {
|
||||
path.to_path_buf().canonicalize()?
|
||||
} else {
|
||||
let cli = env::current_exe()?;
|
||||
let dir = cli
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("no parent path for cli"))?;
|
||||
let dir = cli.parent().context("no parent path for cli")?;
|
||||
|
||||
// libexec is the standard, lib/zed is for Arch (and other non-libexec distros),
|
||||
// ./zed is for the target directory in development builds.
|
||||
@@ -428,8 +425,8 @@ mod linux {
|
||||
possible_locations
|
||||
.iter()
|
||||
.find_map(|p| dir.join(p).canonicalize().ok().filter(|path| path != &cli))
|
||||
.ok_or_else(|| {
|
||||
anyhow!("could not find any of: {}", possible_locations.join(", "))
|
||||
.with_context(|| {
|
||||
format!("could not find any of: {}", possible_locations.join(", "))
|
||||
})?
|
||||
};
|
||||
|
||||
@@ -759,7 +756,7 @@ mod windows {
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod mac_os {
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use core_foundation::{
|
||||
array::{CFArray, CFIndex},
|
||||
base::TCFType as _,
|
||||
@@ -800,9 +797,10 @@ mod mac_os {
|
||||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
while app_path.extension() != Some(OsStr::new("app")) {
|
||||
if !app_path.pop() {
|
||||
return Err(anyhow!("cannot find app bundle containing {:?}", cli_path));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
app_path.pop(),
|
||||
"cannot find app bundle containing {cli_path:?}"
|
||||
);
|
||||
}
|
||||
Ok(app_path)
|
||||
}
|
||||
|
||||
@@ -49,7 +49,6 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-native-tls = "0.3"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
@@ -72,3 +71,10 @@ windows.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
cocoa.workspace = true
|
||||
|
||||
[target.'cfg(any(target_os = "windows", target_os = "macos"))'.dependencies]
|
||||
tokio-native-tls = "0.3"
|
||||
|
||||
[target.'cfg(not(any(target_os = "windows", target_os = "macos")))'.dependencies]
|
||||
rustls-pki-types = "1.12"
|
||||
tokio-rustls = { version = "0.26", features = ["tls12", "ring"], default-features = false }
|
||||
|
||||
@@ -490,14 +490,14 @@ impl<T: 'static> Drop for PendingEntitySubscription<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Deserialize, Debug)]
|
||||
pub struct TelemetrySettings {
|
||||
pub diagnostics: bool,
|
||||
pub metrics: bool,
|
||||
}
|
||||
|
||||
/// Control what info is collected by Zed.
|
||||
#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct TelemetrySettingsContent {
|
||||
/// Send debug info like crash reports.
|
||||
///
|
||||
@@ -515,25 +515,7 @@ impl settings::Settings for TelemetrySettings {
|
||||
type FileContent = TelemetrySettingsContent;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
|
||||
Ok(Self {
|
||||
diagnostics: sources
|
||||
.user
|
||||
.as_ref()
|
||||
.or(sources.server.as_ref())
|
||||
.and_then(|v| v.diagnostics)
|
||||
.unwrap_or(
|
||||
sources
|
||||
.default
|
||||
.diagnostics
|
||||
.ok_or_else(Self::missing_default)?,
|
||||
),
|
||||
metrics: sources
|
||||
.user
|
||||
.as_ref()
|
||||
.or(sources.server.as_ref())
|
||||
.and_then(|v| v.metrics)
|
||||
.unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?),
|
||||
})
|
||||
sources.json_merge()
|
||||
}
|
||||
|
||||
fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
|
||||
@@ -729,9 +711,10 @@ impl Client {
|
||||
let id = (TypeId::of::<T>(), remote_id);
|
||||
|
||||
let mut state = self.handler_set.lock();
|
||||
if state.entities_by_type_and_remote_id.contains_key(&id) {
|
||||
return Err(anyhow!("already subscribed to entity"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!state.entities_by_type_and_remote_id.contains_key(&id),
|
||||
"already subscribed to entity"
|
||||
);
|
||||
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
@@ -980,10 +963,7 @@ impl Client {
|
||||
hello_message_type_name
|
||||
)
|
||||
})?;
|
||||
let peer_id = hello
|
||||
.payload
|
||||
.peer_id
|
||||
.ok_or_else(|| anyhow!("invalid peer id"))?;
|
||||
let peer_id = hello.payload.peer_id.context("invalid peer id")?;
|
||||
Ok(peer_id)
|
||||
};
|
||||
|
||||
@@ -1093,22 +1073,19 @@ impl Client {
|
||||
}
|
||||
|
||||
let response = http.get(&url, Default::default(), false).await?;
|
||||
let collab_url = if response.status().is_redirection() {
|
||||
response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
|
||||
.to_str()
|
||||
.map_err(EstablishConnectionError::other)?
|
||||
.to_string()
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"unexpected /rpc response status {}",
|
||||
response.status()
|
||||
))?
|
||||
};
|
||||
|
||||
Url::parse(&collab_url).context("invalid rpc url")
|
||||
anyhow::ensure!(
|
||||
response.status().is_redirection(),
|
||||
"unexpected /rpc response status {}",
|
||||
response.status()
|
||||
);
|
||||
let collab_url = response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.context("missing location header in /rpc response")?
|
||||
.to_str()
|
||||
.map_err(EstablishConnectionError::other)?
|
||||
.to_string();
|
||||
Url::parse(&collab_url).with_context(|| format!("parsing colab rpc url {collab_url}"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1150,7 +1127,7 @@ impl Client {
|
||||
let rpc_host = rpc_url
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
.ok_or_else(|| anyhow!("missing host in rpc url"))?;
|
||||
.context("missing host in rpc url")?;
|
||||
|
||||
let stream = {
|
||||
let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
|
||||
@@ -1305,16 +1282,13 @@ impl Client {
|
||||
)
|
||||
.context("failed to respond to login http request")?;
|
||||
return Ok((
|
||||
user_id
|
||||
.ok_or_else(|| anyhow!("missing user_id parameter"))?,
|
||||
access_token.ok_or_else(|| {
|
||||
anyhow!("missing access_token parameter")
|
||||
})?,
|
||||
user_id.context("missing user_id parameter")?,
|
||||
access_token.context("missing access_token parameter")?,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("didn't receive login redirect"))
|
||||
anyhow::bail!("didn't receive login redirect");
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -1432,13 +1406,12 @@ impl Client {
|
||||
let mut response = http.send(request).await?;
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"admin user request failed {} - {}",
|
||||
response.status().as_u16(),
|
||||
body,
|
||||
))?;
|
||||
}
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"admin user request failed {} - {}",
|
||||
response.status().as_u16(),
|
||||
body,
|
||||
);
|
||||
let response: AuthenticatedUserResponse = serde_json::from_str(&body)?;
|
||||
|
||||
// Use the admin API token to authenticate as the impersonated user.
|
||||
@@ -1475,7 +1448,7 @@ impl Client {
|
||||
if let Status::Connected { connection_id, .. } = *self.status().borrow() {
|
||||
Ok(connection_id)
|
||||
} else {
|
||||
Err(anyhow!("not connected"))
|
||||
anyhow::bail!("not connected");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
mod http_proxy;
|
||||
mod socks_proxy;
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use http_client::Url;
|
||||
use http_proxy::{HttpProxyType, connect_http_proxy_stream, parse_http_proxy};
|
||||
use socks_proxy::{SocksVersion, connect_socks_proxy_stream, parse_socks_proxy};
|
||||
@@ -16,7 +16,7 @@ pub(crate) async fn connect_proxy_stream(
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
anyhow::bail!("Parsing proxy url failed");
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
|
||||
@@ -5,7 +5,10 @@ use tokio::{
|
||||
io::{AsyncBufReadExt, AsyncWriteExt, BufStream},
|
||||
net::TcpStream,
|
||||
};
|
||||
#[cfg(any(target_os = "windows", target_os = "macos"))]
|
||||
use tokio_native_tls::{TlsConnector, native_tls};
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
|
||||
use tokio_rustls::TlsConnector;
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
@@ -61,6 +64,7 @@ where
|
||||
Ok(Box::new(stream))
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "windows", target_os = "macos"))]
|
||||
async fn https_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
@@ -75,6 +79,24 @@ where
|
||||
http_connect(stream, target, auth).await
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
|
||||
async fn https_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let proxy_domain = rustls_pki_types::ServerName::try_from(proxy_domain)
|
||||
.context("Address resolution failed")?
|
||||
.to_owned();
|
||||
let tls_connector = TlsConnector::from(std::sync::Arc::new(http_client_tls::tls_config()));
|
||||
let stream = tls_connector.connect(proxy_domain, stream).await?;
|
||||
http_connect(stream, target, auth).await
|
||||
}
|
||||
|
||||
fn make_request(target: (&str, u16), auth: Option<HttpProxyAuthorization<'_>>) -> String {
|
||||
let (host, port) = target;
|
||||
let mut request = format!(
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
//! socks proxy
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context as _, Result};
|
||||
use http_client::Url;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
|
||||
use url::Url;
|
||||
use tokio_socks::{
|
||||
IntoTargetAddr, TargetAddr,
|
||||
tcp::{Socks4Stream, Socks5Stream},
|
||||
};
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
@@ -23,8 +26,14 @@ pub(super) struct Socks5Authorization<'a> {
|
||||
/// V4 allows idenfication using a user_id
|
||||
/// V5 allows authorization using a username and password
|
||||
pub(super) enum SocksVersion<'a> {
|
||||
V4(Option<Socks4Identification<'a>>),
|
||||
V5(Option<Socks5Authorization<'a>>),
|
||||
V4 {
|
||||
local_dns: bool,
|
||||
identification: Option<Socks4Identification<'a>>,
|
||||
},
|
||||
V5 {
|
||||
local_dns: bool,
|
||||
authorization: Option<Socks5Authorization<'a>>,
|
||||
},
|
||||
}
|
||||
|
||||
pub(super) fn parse_socks_proxy<'t>(scheme: &str, proxy: &'t Url) -> SocksVersion<'t> {
|
||||
@@ -33,13 +42,19 @@ pub(super) fn parse_socks_proxy<'t>(scheme: &str, proxy: &'t Url) -> SocksVersio
|
||||
"" => None,
|
||||
username => Some(Socks4Identification { user_id: username }),
|
||||
};
|
||||
SocksVersion::V4(identification)
|
||||
SocksVersion::V4 {
|
||||
local_dns: scheme != "socks4a",
|
||||
identification,
|
||||
}
|
||||
} else {
|
||||
let authorization = proxy.password().map(|password| Socks5Authorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
SocksVersion::V5(authorization)
|
||||
SocksVersion::V5 {
|
||||
local_dns: scheme != "socks5h",
|
||||
authorization,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,26 +63,58 @@ pub(super) async fn connect_socks_proxy_stream(
|
||||
socks_version: SocksVersion<'_>,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let rpc_host = rpc_host
|
||||
.into_target_addr()
|
||||
.context("Failed to parse target addr")?;
|
||||
|
||||
let local_dns = match &socks_version {
|
||||
SocksVersion::V4 { local_dns, .. } => local_dns,
|
||||
SocksVersion::V5 { local_dns, .. } => local_dns,
|
||||
};
|
||||
let rpc_host = match (rpc_host, local_dns) {
|
||||
(TargetAddr::Domain(domain, port), true) => {
|
||||
let ip_addr = tokio::net::lookup_host((domain.as_ref(), port))
|
||||
.await
|
||||
.with_context(|| format!("Failed to lookup domain {}", domain))?
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to lookup domain {}", domain))?;
|
||||
TargetAddr::Ip(ip_addr)
|
||||
}
|
||||
(rpc_host, _) => rpc_host,
|
||||
};
|
||||
|
||||
match socks_version {
|
||||
SocksVersion::V4(None) => {
|
||||
SocksVersion::V4 {
|
||||
identification: None,
|
||||
..
|
||||
} => {
|
||||
let socks = Socks4Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id })) => {
|
||||
SocksVersion::V4 {
|
||||
identification: Some(Socks4Identification { user_id }),
|
||||
..
|
||||
} => {
|
||||
let socks = Socks4Stream::connect_with_userid_and_socket(stream, rpc_host, user_id)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V5(None) => {
|
||||
SocksVersion::V5 {
|
||||
authorization: None,
|
||||
..
|
||||
} => {
|
||||
let socks = Socks5Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V5(Some(Socks5Authorization { username, password })) => {
|
||||
SocksVersion::V5 {
|
||||
authorization: Some(Socks5Authorization { username, password }),
|
||||
..
|
||||
} => {
|
||||
let socks = Socks5Stream::connect_with_password_and_socket(
|
||||
stream, rpc_host, username, password,
|
||||
)
|
||||
@@ -90,7 +137,13 @@ mod tests {
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, SocksVersion::V4(None)))
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V4 {
|
||||
local_dns: true,
|
||||
identification: None
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -101,7 +154,25 @@ mod tests {
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id: "userid" }))
|
||||
SocksVersion::V4 {
|
||||
local_dns: true,
|
||||
identification: Some(Socks4Identification { user_id: "userid" })
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks4_with_remote_dns() {
|
||||
let proxy = Url::parse("socks4a://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V4 {
|
||||
local_dns: false,
|
||||
identification: None
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
@@ -111,7 +182,13 @@ mod tests {
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, SocksVersion::V5(None)))
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V5 {
|
||||
local_dns: true,
|
||||
authorization: None
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -122,10 +199,28 @@ mod tests {
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V5(Some(Socks5Authorization {
|
||||
username: "username",
|
||||
password: "password"
|
||||
}))
|
||||
SocksVersion::V5 {
|
||||
local_dns: true,
|
||||
authorization: Some(Socks5Authorization {
|
||||
username: "username",
|
||||
password: "password"
|
||||
})
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks5_with_remote_dns() {
|
||||
let proxy = Url::parse("socks5h://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V5 {
|
||||
local_dns: false,
|
||||
authorization: None
|
||||
}
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use chrono::Duration;
|
||||
use futures::{StreamExt, stream::BoxStream};
|
||||
use gpui::{AppContext as _, BackgroundExecutor, Entity, TestAppContext};
|
||||
@@ -45,7 +45,7 @@ impl FakeServer {
|
||||
move |cx| {
|
||||
let state = state.clone();
|
||||
cx.spawn(async move |_| {
|
||||
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let state = state.upgrade().context("server dropped")?;
|
||||
let mut state = state.lock();
|
||||
state.auth_count += 1;
|
||||
let access_token = state.access_token.to_string();
|
||||
@@ -64,8 +64,8 @@ impl FakeServer {
|
||||
let state = state.clone();
|
||||
let credentials = credentials.clone();
|
||||
cx.spawn(async move |cx| {
|
||||
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let state = state.upgrade().context("server dropped")?;
|
||||
let peer = peer.upgrade().context("server dropped")?;
|
||||
if state.lock().forbid_connections {
|
||||
Err(EstablishConnectionError::Other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
@@ -155,7 +155,7 @@ impl FakeServer {
|
||||
.expect("not connected")
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("other half hung up"))?;
|
||||
.context("other half hung up")?;
|
||||
self.executor.finish_waiting();
|
||||
let type_name = message.payload_type_name();
|
||||
let message = message.into_any();
|
||||
|
||||
@@ -388,9 +388,7 @@ impl UserStore {
|
||||
// Users are fetched in parallel above and cached in call to get_users
|
||||
// No need to parallelize here
|
||||
let mut updated_contacts = Vec::new();
|
||||
let this = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
|
||||
let this = this.upgrade().context("can't upgrade user store handle")?;
|
||||
for contact in message.contacts {
|
||||
updated_contacts
|
||||
.push(Arc::new(Contact::from_proto(contact, &this, cx).await?));
|
||||
@@ -574,7 +572,7 @@ impl UserStore {
|
||||
let client = self.client.upgrade();
|
||||
cx.spawn(async move |_, _| {
|
||||
client
|
||||
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
|
||||
.context("can't upgrade client reference")?
|
||||
.request(proto::RespondToContactRequest {
|
||||
requester_id,
|
||||
response: proto::ContactRequestResponse::Dismiss as i32,
|
||||
@@ -596,7 +594,7 @@ impl UserStore {
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = client
|
||||
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
|
||||
.context("can't upgrade client reference")?
|
||||
.request(request)
|
||||
.await;
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -663,7 +661,7 @@ impl UserStore {
|
||||
this.users
|
||||
.get(user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("user {} not found", user_id))
|
||||
.with_context(|| format!("user {user_id} not found"))
|
||||
})
|
||||
.collect()
|
||||
})?
|
||||
@@ -703,7 +701,7 @@ impl UserStore {
|
||||
this.users
|
||||
.get(&user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("server responded with no users"))
|
||||
.context("server responded with no users")
|
||||
})?
|
||||
})
|
||||
}
|
||||
@@ -765,20 +763,17 @@ impl UserStore {
|
||||
};
|
||||
|
||||
let client = self.client.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
let response = client
|
||||
.request(proto::AcceptTermsOfService {})
|
||||
.await
|
||||
.context("error accepting tos")?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
|
||||
cx.emit(Event::PrivateUserInfoUpdated);
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!("client not found"))
|
||||
}
|
||||
cx.spawn(async move |this, cx| -> anyhow::Result<()> {
|
||||
let client = client.upgrade().context("client not found")?;
|
||||
let response = client
|
||||
.request(proto::AcceptTermsOfService {})
|
||||
.await
|
||||
.context("error accepting tos")?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
|
||||
cx.emit(Event::PrivateUserInfoUpdated);
|
||||
})?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -897,7 +892,7 @@ impl Contact {
|
||||
impl Collaborator {
|
||||
pub fn from_proto(message: proto::Collaborator) -> Result<Self> {
|
||||
Ok(Self {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
peer_id: message.peer_id.context("invalid peer id")?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
is_host: message.is_host,
|
||||
|
||||
@@ -92,6 +92,7 @@ command_palette_hooks.workspace = true
|
||||
context_server.workspace = true
|
||||
ctor.workspace = true
|
||||
dap = { workspace = true, features = ["test-support"] }
|
||||
dap_adapters = { workspace = true, features = ["test-support"] }
|
||||
debugger_ui = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
drop table monthly_usages;
|
||||
drop table lifetime_usages;
|
||||
@@ -0,0 +1 @@
|
||||
drop table billing_events;
|
||||
@@ -5,12 +5,13 @@ pub mod extensions;
|
||||
pub mod ips_file;
|
||||
pub mod slack;
|
||||
|
||||
use crate::db::Database;
|
||||
use crate::{
|
||||
AppState, Error, Result, auth,
|
||||
db::{User, UserId},
|
||||
rpc,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Context as _;
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
body::Body,
|
||||
@@ -97,6 +98,7 @@ impl std::fmt::Display for SystemIdHeader {
|
||||
pub fn routes(rpc_server: Arc<rpc::Server>) -> Router<(), Body> {
|
||||
Router::new()
|
||||
.route("/user", get(get_authenticated_user))
|
||||
.route("/users/look_up", get(look_up_user))
|
||||
.route("/users/:id/access_tokens", post(create_access_token))
|
||||
.route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
|
||||
.merge(billing::router())
|
||||
@@ -181,6 +183,87 @@ async fn get_authenticated_user(
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct LookUpUserParams {
|
||||
identifier: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct LookUpUserResponse {
|
||||
user: Option<User>,
|
||||
}
|
||||
|
||||
async fn look_up_user(
|
||||
Query(params): Query<LookUpUserParams>,
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
) -> Result<Json<LookUpUserResponse>> {
|
||||
let user = resolve_identifier_to_user(&app.db, ¶ms.identifier).await?;
|
||||
let user = if let Some(user) = user {
|
||||
match user {
|
||||
UserOrId::User(user) => Some(user),
|
||||
UserOrId::Id(id) => app.db.get_user_by_id(id).await?,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Json(LookUpUserResponse { user }))
|
||||
}
|
||||
|
||||
enum UserOrId {
|
||||
User(User),
|
||||
Id(UserId),
|
||||
}
|
||||
|
||||
async fn resolve_identifier_to_user(
|
||||
db: &Arc<Database>,
|
||||
identifier: &str,
|
||||
) -> Result<Option<UserOrId>> {
|
||||
if let Some(identifier) = identifier.parse::<i32>().ok() {
|
||||
let user = db.get_user_by_id(UserId(identifier)).await?;
|
||||
|
||||
return Ok(user.map(UserOrId::User));
|
||||
}
|
||||
|
||||
if identifier.starts_with("cus_") {
|
||||
let billing_customer = db
|
||||
.get_billing_customer_by_stripe_customer_id(&identifier)
|
||||
.await?;
|
||||
|
||||
return Ok(billing_customer.map(|billing_customer| UserOrId::Id(billing_customer.user_id)));
|
||||
}
|
||||
|
||||
if identifier.starts_with("sub_") {
|
||||
let billing_subscription = db
|
||||
.get_billing_subscription_by_stripe_subscription_id(&identifier)
|
||||
.await?;
|
||||
|
||||
if let Some(billing_subscription) = billing_subscription {
|
||||
let billing_customer = db
|
||||
.get_billing_customer_by_id(billing_subscription.billing_customer_id)
|
||||
.await?;
|
||||
|
||||
return Ok(
|
||||
billing_customer.map(|billing_customer| UserOrId::Id(billing_customer.user_id))
|
||||
);
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
|
||||
if identifier.contains('@') {
|
||||
let user = db.get_user_by_email(identifier).await?;
|
||||
|
||||
return Ok(user.map(UserOrId::User));
|
||||
}
|
||||
|
||||
if let Some(user) = db.get_user_by_github_login(identifier).await? {
|
||||
return Ok(Some(UserOrId::User(user)));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct CreateUserParams {
|
||||
github_user_id: i32,
|
||||
@@ -220,7 +303,7 @@ async fn create_access_token(
|
||||
.db
|
||||
.get_user_by_id(user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let mut impersonated_user_id = None;
|
||||
if let Some(impersonate) = params.impersonate {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user